You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kudu.apache.org by da...@apache.org on 2017/04/05 17:51:51 UTC

[4/4] kudu git commit: kudu-spark: switch default profile from spark1/2.10 to spark2/2.11

kudu-spark: switch default profile from spark1/2.10 to spark2/2.11

Now that we have kudu-spark-tools which only works with Spark2, it's
probably best if developers are defaulted to running tests with Spark2.
This shouldn't affect CI or published artifacts.

Change-Id: Idf57e99890bd413a678e275af55b9c6315edf917
Reviewed-on: http://gerrit.cloudera.org:8080/6420
Tested-by: Kudu Jenkins
Reviewed-by: Jean-Daniel Cryans <jd...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/kudu/repo
Commit: http://git-wip-us.apache.org/repos/asf/kudu/commit/34310a8e
Tree: http://git-wip-us.apache.org/repos/asf/kudu/tree/34310a8e
Diff: http://git-wip-us.apache.org/repos/asf/kudu/diff/34310a8e

Branch: refs/heads/master
Commit: 34310a8e9f050165ce9c93ae36f575ac408b6312
Parents: 94899aa
Author: Dan Burkert <da...@apache.org>
Authored: Fri Mar 17 14:07:39 2017 -0700
Committer: Dan Burkert <da...@apache.org>
Committed: Wed Apr 5 17:51:29 2017 +0000

----------------------------------------------------------------------
 build-support/jenkins/build-and-test.sh |  6 +++---
 java/pom.xml                            | 22 +++++++++++-----------
 2 files changed, 14 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kudu/blob/34310a8e/build-support/jenkins/build-and-test.sh
----------------------------------------------------------------------
diff --git a/build-support/jenkins/build-and-test.sh b/build-support/jenkins/build-and-test.sh
index 0c58969..c24a104 100755
--- a/build-support/jenkins/build-and-test.sh
+++ b/build-support/jenkins/build-and-test.sh
@@ -360,12 +360,12 @@ if [ "$BUILD_JAVA" == "1" ]; then
     EXIT_STATUS=1
     FAILURES="$FAILURES"$'Java build/test failed\n'
   fi
-  # Test kudu-spark and kudu-spark-tools with Spark 2.x + Scala 2.11 profile
+  # Test kudu-spark with Spark 1.x + Scala 2.10 profile
   # This won't work if there are ever Spark integration tests!
   rm -rf kudu-spark/target/
-  if ! mvn $MVN_FLAGS -Pspark2_2.11 -Dtest="org.apache.kudu.spark.*.*" test; then
+  if ! mvn $MVN_FLAGS -Pspark_2.10 -Dtest="org.apache.kudu.spark.*.*" test; then
     EXIT_STATUS=1
-    FAILURES="$FAILURES"$'spark2 build/test failed\n'
+    FAILURES="$FAILURES"$'spark build/test failed\n'
   fi
   set +x
   popd

http://git-wip-us.apache.org/repos/asf/kudu/blob/34310a8e/java/pom.xml
----------------------------------------------------------------------
diff --git a/java/pom.xml b/java/pom.xml
index 1a82ef2..403f185 100644
--- a/java/pom.xml
+++ b/java/pom.xml
@@ -321,21 +321,11 @@
              Kudu supports integration with Spark2/Scala2.11 *and* Spark1/Scala2.10
              with the same module, so we use profiles to control which mode to compile in. -->
         <profile>
-            <id>spark_2.10</id>
+            <id>spark2_2.11</id>
             <activation>
                 <activeByDefault>true</activeByDefault>
             </activation>
             <properties>
-                <spark.version>${spark1.version}</spark.version>
-                <spark.version.label>spark</spark.version.label>
-                <scala.version>${scala-2.10.version}</scala.version>
-                <scala.binary.version>2.10</scala.binary.version>
-                <kudu-spark.compat.src>src/main/spark1</kudu-spark.compat.src>
-            </properties>
-        </profile>
-        <profile>
-            <id>spark2_2.11</id>
-            <properties>
                 <spark.version>${spark2.version}</spark.version>
                 <spark.version.label>spark2</spark.version.label>
                 <scala.version>${scala-2.11.version}</scala.version>
@@ -347,5 +337,15 @@
                 <module>kudu-spark-tools</module>
             </modules>
         </profile>
+        <profile>
+            <id>spark_2.10</id>
+            <properties>
+                <spark.version>${spark1.version}</spark.version>
+                <spark.version.label>spark</spark.version.label>
+                <scala.version>${scala-2.10.version}</scala.version>
+                <scala.binary.version>2.10</scala.binary.version>
+                <kudu-spark.compat.src>src/main/spark1</kudu-spark.compat.src>
+            </properties>
+        </profile>
     </profiles>
 </project>