You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by jo...@apache.org on 2015/06/29 17:55:46 UTC

spark git commit: [SPARK-8693] [PROJECT INFRA] profiles and goals are not printed in a nice way

Repository: spark
Updated Branches:
  refs/heads/master 630bd5fd8 -> 5c796d576


[SPARK-8693] [PROJECT INFRA] profiles and goals are not printed in a nice way

Hotfix to correct formatting errors of print statements within the dev and jenkins builds. Error looks like:

```
-Phadoop-1[info] Building Spark (w/Hive 0.13.1) using SBT with these arguments:  -Dhadoop.version=1.0.4[info] Building Spark (w/Hive 0.13.1) using SBT with these arguments:  -Pkinesis-asl[info] Building Spark (w/Hive 0.13.1) using SBT with these arguments:  -Phive-thriftserver[info] Building Spark (w/Hive 0.13.1) using SBT with these arguments:  -Phive[info] Building Spark (w/Hive 0.13.1) using SBT with these arguments:  package[info] Building Spark (w/Hive 0.13.1) using SBT with these arguments:  assembly/assembly[info] Building Spark (w/Hive 0.13.1) using SBT with these arguments:  streaming-kafka-assembly/assembly
```

Author: Brennon York <br...@capitalone.com>

Closes #7085 from brennonyork/SPARK-8693 and squashes the following commits:

c5575f1 [Brennon York] added commas to end of print statements for proper printing


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/5c796d57
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/5c796d57
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/5c796d57

Branch: refs/heads/master
Commit: 5c796d576ec2de96bf72dbf6ccd0e85480a6e3b1
Parents: 630bd5f
Author: Brennon York <br...@capitalone.com>
Authored: Mon Jun 29 08:55:06 2015 -0700
Committer: Josh Rosen <jo...@databricks.com>
Committed: Mon Jun 29 08:55:06 2015 -0700

----------------------------------------------------------------------
 dev/run-tests.py | 16 ++++++++--------
 1 file changed, 8 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/5c796d57/dev/run-tests.py
----------------------------------------------------------------------
diff --git a/dev/run-tests.py b/dev/run-tests.py
index eb79a2a..e5c897b 100755
--- a/dev/run-tests.py
+++ b/dev/run-tests.py
@@ -210,7 +210,7 @@ def build_spark_documentation():
     jekyll_bin = which("jekyll")
 
     if not jekyll_bin:
-        print("[error] Cannot find a version of `jekyll` on the system; please"
+        print("[error] Cannot find a version of `jekyll` on the system; please",
               " install one and retry to build documentation.")
         sys.exit(int(os.environ.get("CURRENT_BLOCK", 255)))
     else:
@@ -270,7 +270,7 @@ def get_hadoop_profiles(hadoop_version):
     if hadoop_version in sbt_maven_hadoop_profiles:
         return sbt_maven_hadoop_profiles[hadoop_version]
     else:
-        print("[error] Could not find", hadoop_version, "in the list. Valid options"
+        print("[error] Could not find", hadoop_version, "in the list. Valid options",
               " are", sbt_maven_hadoop_profiles.keys())
         sys.exit(int(os.environ.get("CURRENT_BLOCK", 255)))
 
@@ -281,7 +281,7 @@ def build_spark_maven(hadoop_version):
     mvn_goals = ["clean", "package", "-DskipTests"]
     profiles_and_goals = build_profiles + mvn_goals
 
-    print("[info] Building Spark (w/Hive 0.13.1) using Maven with these arguments: "
+    print("[info] Building Spark (w/Hive 0.13.1) using Maven with these arguments: ",
           " ".join(profiles_and_goals))
 
     exec_maven(profiles_and_goals)
@@ -295,7 +295,7 @@ def build_spark_sbt(hadoop_version):
                  "streaming-kafka-assembly/assembly"]
     profiles_and_goals = build_profiles + sbt_goals
 
-    print("[info] Building Spark (w/Hive 0.13.1) using SBT with these arguments: "
+    print("[info] Building Spark (w/Hive 0.13.1) using SBT with these arguments: ",
           " ".join(profiles_and_goals))
 
     exec_sbt(profiles_and_goals)
@@ -324,7 +324,7 @@ def run_scala_tests_maven(test_profiles):
     mvn_test_goals = ["test", "--fail-at-end"]
     profiles_and_goals = test_profiles + mvn_test_goals
 
-    print("[info] Running Spark tests using Maven with these arguments: "
+    print("[info] Running Spark tests using Maven with these arguments: ",
           " ".join(profiles_and_goals))
 
     exec_maven(profiles_and_goals)
@@ -339,7 +339,7 @@ def run_scala_tests_sbt(test_modules, test_profiles):
 
     profiles_and_goals = test_profiles + list(sbt_test_goals)
 
-    print("[info] Running Spark tests using SBT with these arguments: "
+    print("[info] Running Spark tests using SBT with these arguments: ",
           " ".join(profiles_and_goals))
 
     exec_sbt(profiles_and_goals)
@@ -382,7 +382,7 @@ def run_sparkr_tests():
 def main():
     # Ensure the user home directory (HOME) is valid and is an absolute directory
     if not USER_HOME or not os.path.isabs(USER_HOME):
-        print("[error] Cannot determine your home directory as an absolute path;"
+        print("[error] Cannot determine your home directory as an absolute path;",
               " ensure the $HOME environment variable is set properly.")
         sys.exit(1)
 
@@ -397,7 +397,7 @@ def main():
     java_exe = determine_java_executable()
 
     if not java_exe:
-        print("[error] Cannot find a version of `java` on the system; please"
+        print("[error] Cannot find a version of `java` on the system; please",
               " install one and retry.")
         sys.exit(2)
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org