You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by jo...@apache.org on 2014/10/06 23:19:29 UTC

git commit: [SPARK-3479] [Build] Report failed test category

Repository: spark
Updated Branches:
  refs/heads/master 2300eb58a -> 69c3f441a


[SPARK-3479] [Build] Report failed test category

This PR allows SparkQA (i.e. Jenkins) to report in its posts to GitHub what category of test failed, if one can be determined.

The failure categories are:
* general failure
* RAT checks failed
* Scala style checks failed
* Python style checks failed
* Build failed
* Spark unit tests failed
* PySpark unit tests failed
* MiMa checks failed

This PR also fixes the diffing logic used to determine if a patch introduces new classes.

Author: Nicholas Chammas <ni...@gmail.com>

Closes #2606 from nchammas/report-failed-test-category and squashes the following commits:

d67df03 [Nicholas Chammas] report what test category failed


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/69c3f441
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/69c3f441
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/69c3f441

Branch: refs/heads/master
Commit: 69c3f441a9b6e942d6c08afecd59a0349d61cc7b
Parents: 2300eb5
Author: Nicholas Chammas <ni...@gmail.com>
Authored: Mon Oct 6 14:19:06 2014 -0700
Committer: Josh Rosen <jo...@apache.org>
Committed: Mon Oct 6 14:19:06 2014 -0700

----------------------------------------------------------------------
 dev/run-tests          |  32 +++++++++++++-
 dev/run-tests-codes.sh |  27 ++++++++++++
 dev/run-tests-jenkins  | 102 ++++++++++++++++++++++++++++++--------------
 3 files changed, 126 insertions(+), 35 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/69c3f441/dev/run-tests
----------------------------------------------------------------------
diff --git a/dev/run-tests b/dev/run-tests
index c3d8f49..4be2baa 100755
--- a/dev/run-tests
+++ b/dev/run-tests
@@ -24,6 +24,16 @@ cd "$FWDIR"
 # Remove work directory
 rm -rf ./work
 
+source "$FWDIR/dev/run-tests-codes.sh"
+
+CURRENT_BLOCK=$BLOCK_GENERAL
+
+function handle_error () {
+  echo "[error] Got a return code of $? on line $1 of the run-tests script."
+  exit $CURRENT_BLOCK
+}
+
+
 # Build against the right verison of Hadoop.
 {
   if [ -n "$AMPLAB_JENKINS_BUILD_PROFILE" ]; then
@@ -91,26 +101,34 @@ if [ -n "$AMPLAB_JENKINS" ]; then
   fi
 fi
 
-# Fail fast
-set -e
 set -o pipefail
+trap 'handle_error $LINENO' ERR
 
 echo ""
 echo "========================================================================="
 echo "Running Apache RAT checks"
 echo "========================================================================="
+
+CURRENT_BLOCK=$BLOCK_RAT
+
 ./dev/check-license
 
 echo ""
 echo "========================================================================="
 echo "Running Scala style checks"
 echo "========================================================================="
+
+CURRENT_BLOCK=$BLOCK_SCALA_STYLE
+
 ./dev/lint-scala
 
 echo ""
 echo "========================================================================="
 echo "Running Python style checks"
 echo "========================================================================="
+
+CURRENT_BLOCK=$BLOCK_PYTHON_STYLE
+
 ./dev/lint-python
 
 echo ""
@@ -118,6 +136,8 @@ echo "========================================================================="
 echo "Building Spark"
 echo "========================================================================="
 
+CURRENT_BLOCK=$BLOCK_BUILD
+
 {
   # We always build with Hive because the PySpark Spark SQL tests need it.
   BUILD_MVN_PROFILE_ARGS="$SBT_MAVEN_PROFILES_ARGS -Phive"
@@ -141,6 +161,8 @@ echo "========================================================================="
 echo "Running Spark unit tests"
 echo "========================================================================="
 
+CURRENT_BLOCK=$BLOCK_SPARK_UNIT_TESTS
+
 {
   # If the Spark SQL tests are enabled, run the tests with the Hive profiles enabled.
   # This must be a single argument, as it is.
@@ -175,10 +197,16 @@ echo ""
 echo "========================================================================="
 echo "Running PySpark tests"
 echo "========================================================================="
+
+CURRENT_BLOCK=$BLOCK_PYSPARK_UNIT_TESTS
+
 ./python/run-tests
 
 echo ""
 echo "========================================================================="
 echo "Detecting binary incompatibilites with MiMa"
 echo "========================================================================="
+
+CURRENT_BLOCK=$BLOCK_MIMA
+
 ./dev/mima

http://git-wip-us.apache.org/repos/asf/spark/blob/69c3f441/dev/run-tests-codes.sh
----------------------------------------------------------------------
diff --git a/dev/run-tests-codes.sh b/dev/run-tests-codes.sh
new file mode 100644
index 0000000..1348e06
--- /dev/null
+++ b/dev/run-tests-codes.sh
@@ -0,0 +1,27 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+readonly BLOCK_GENERAL=10
+readonly BLOCK_RAT=11
+readonly BLOCK_SCALA_STYLE=12
+readonly BLOCK_PYTHON_STYLE=13
+readonly BLOCK_BUILD=14
+readonly BLOCK_SPARK_UNIT_TESTS=15
+readonly BLOCK_PYSPARK_UNIT_TESTS=16
+readonly BLOCK_MIMA=17

http://git-wip-us.apache.org/repos/asf/spark/blob/69c3f441/dev/run-tests-jenkins
----------------------------------------------------------------------
diff --git a/dev/run-tests-jenkins b/dev/run-tests-jenkins
index 0b1e31b..451f3b7 100755
--- a/dev/run-tests-jenkins
+++ b/dev/run-tests-jenkins
@@ -26,9 +26,23 @@
 FWDIR="$(cd `dirname $0`/..; pwd)"
 cd "$FWDIR"
 
+source "$FWDIR/dev/run-tests-codes.sh"
+
 COMMENTS_URL="https://api.github.com/repos/apache/spark/issues/$ghprbPullId/comments"
 PULL_REQUEST_URL="https://github.com/apache/spark/pull/$ghprbPullId"
 
+# Important Environment Variables
+# ---
+# $ghprbActualCommit
+#+  This is the hash of the most recent commit in the PR.
+#+  The merge-base of this and master is the commit from which the PR was branched.
+# $sha1
+#+  If the patch merges cleanly, this is a reference to the merge commit hash
+#+    (e.g. "origin/pr/2606/merge").
+#+  If the patch does not merge cleanly, it is equal to $ghprbActualCommit.
+#+  The merge-base of this and master in the case of a clean merge is the most recent commit
+#+    against master.
+
 COMMIT_URL="https://github.com/apache/spark/commit/${ghprbActualCommit}"
 # GitHub doesn't auto-link short hashes when submitted via the API, unfortunately. :(
 SHORT_COMMIT_HASH="${ghprbActualCommit:0:7}"
@@ -84,42 +98,46 @@ function post_message () {
   fi
 }
 
+
+# We diff master...$ghprbActualCommit because that gets us changes introduced in the PR
+#+ and not anything else added to master since the PR was branched.
+
 # check PR merge-ability and check for new public classes
 {
   if [ "$sha1" == "$ghprbActualCommit" ]; then
-    merge_note=" * This patch **does not** merge cleanly!"
+    merge_note=" * This patch **does not merge cleanly**."
   else
     merge_note=" * This patch merges cleanly."
+  fi
+  
+  source_files=$(
+      git diff master...$ghprbActualCommit --name-only  `# diff patch against master from branch point` \
+    | grep -v -e "\/test"                               `# ignore files in test directories` \
+    | grep -e "\.py$" -e "\.java$" -e "\.scala$"        `# include only code files` \
+    | tr "\n" " "
+  )
+  new_public_classes=$(
+      git diff master...$ghprbActualCommit ${source_files}      `# diff patch against master from branch point` \
+    | grep "^\+"                              `# filter in only added lines` \
+    | sed -r -e "s/^\+//g"                    `# remove the leading +` \
+    | grep -e "trait " -e "class "            `# filter in lines with these key words` \
+    | grep -e "{" -e "("                      `# filter in lines with these key words, too` \
+    | grep -v -e "\@\@" -e "private"          `# exclude lines with these words` \
+    | grep -v -e "^// " -e "^/\*" -e "^ \* "  `# exclude comment lines` \
+    | sed -r -e "s/\{.*//g"                   `# remove from the { onwards` \
+    | sed -r -e "s/\}//g"                     `# just in case, remove }; they mess the JSON` \
+    | sed -r -e "s/\"/\\\\\"/g"               `# escape double quotes; they mess the JSON` \
+    | sed -r -e "s/^(.*)$/\`\1\`/g"           `# surround with backticks for style` \
+    | sed -r -e "s/^/  \* /g"                 `# prepend '  *' to start of line` \
+    | sed -r -e "s/$/\\\n/g"                  `# append newline to end of line` \
+    | tr -d "\n"                              `# remove actual LF characters`
+  )
 
-    source_files=$(
-        git diff master... --name-only              `# diff patch against master from branch point` \
-      | grep -v -e "\/test"                         `# ignore files in test directories` \
-      | grep -e "\.py$" -e "\.java$" -e "\.scala$"  `# include only code files` \
-      | tr "\n" " "
-    )
-    new_public_classes=$(
-        git diff master... ${source_files}      `# diff patch against master from branch point` \
-      | grep "^\+"                              `# filter in only added lines` \
-      | sed -r -e "s/^\+//g"                    `# remove the leading +` \
-      | grep -e "trait " -e "class "            `# filter in lines with these key words` \
-      | grep -e "{" -e "("                      `# filter in lines with these key words, too` \
-      | grep -v -e "\@\@" -e "private"          `# exclude lines with these words` \
-      | grep -v -e "^// " -e "^/\*" -e "^ \* "  `# exclude comment lines` \
-      | sed -r -e "s/\{.*//g"                   `# remove from the { onwards` \
-      | sed -r -e "s/\}//g"                     `# just in case, remove }; they mess the JSON` \
-      | sed -r -e "s/\"/\\\\\"/g"               `# escape double quotes; they mess the JSON` \
-      | sed -r -e "s/^(.*)$/\`\1\`/g"           `# surround with backticks for style` \
-      | sed -r -e "s/^/  \* /g"                 `# prepend '  *' to start of line` \
-      | sed -r -e "s/$/\\\n/g"                  `# append newline to end of line` \
-      | tr -d "\n"                              `# remove actual LF characters`
-    )
-
-    if [ "$new_public_classes" == "" ]; then
-      public_classes_note=" * This patch adds no public classes."
-    else
-      public_classes_note=" * This patch adds the following public classes _(experimental)_:"
-      public_classes_note="${public_classes_note}\n${new_public_classes}"
-    fi
+  if [ -z "$new_public_classes" ]; then
+    public_classes_note=" * This patch adds no public classes."
+  else
+    public_classes_note=" * This patch adds the following public classes _(experimental)_:"
+    public_classes_note="${public_classes_note}\n${new_public_classes}"
   fi
 }
 
@@ -147,12 +165,30 @@ function post_message () {
 
     post_message "$fail_message"
     exit $test_result
+  elif [ "$test_result" -eq "0" ]; then
+    test_result_note=" * This patch **passes all tests**."
   else
-    if [ "$test_result" -eq "0" ]; then
-      test_result_note=" * This patch **passes** unit tests."
+    if [ "$test_result" -eq "$BLOCK_GENERAL" ]; then
+      failing_test="some tests"
+    elif [ "$test_result" -eq "$BLOCK_RAT" ]; then
+      failing_test="RAT tests"
+    elif [ "$test_result" -eq "$BLOCK_SCALA_STYLE" ]; then
+      failing_test="Scala style tests"
+    elif [ "$test_result" -eq "$BLOCK_PYTHON_STYLE" ]; then
+      failing_test="Python style tests"
+    elif [ "$test_result" -eq "$BLOCK_BUILD" ]; then
+      failing_test="to build"
+    elif [ "$test_result" -eq "$BLOCK_SPARK_UNIT_TESTS" ]; then
+      failing_test="Spark unit tests"
+    elif [ "$test_result" -eq "$BLOCK_PYSPARK_UNIT_TESTS" ]; then
+      failing_test="PySpark unit tests"
+    elif [ "$test_result" -eq "$BLOCK_MIMA" ]; then
+      failing_test="MiMa tests"
     else
-      test_result_note=" * This patch **fails** unit tests."
+      failing_test="some tests"
     fi
+    
+    test_result_note=" * This patch **fails $failing_test**."
   fi
 }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org