You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by xu...@apache.org on 2015/01/27 22:27:27 UTC
svn commit: r1655164 - in /hive/branches/spark: pom.xml
spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
Author: xuefu
Date: Tue Jan 27 21:27:27 2015
New Revision: 1655164
URL: http://svn.apache.org/r1655164
Log:
HIVE-9379: Fix tests with some versions of Spark + Snappy [Spark Branch] (Brock via Xuefu)
Modified:
hive/branches/spark/pom.xml
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
Modified: hive/branches/spark/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/pom.xml?rev=1655164&r1=1655163&r2=1655164&view=diff
==============================================================================
--- hive/branches/spark/pom.xml (original)
+++ hive/branches/spark/pom.xml Tue Jan 27 21:27:27 2015
@@ -824,6 +824,7 @@
<HADOOP_CLASSPATH>${test.tmp.dir}/conf:${basedir}/${hive.path.to.root}/conf</HADOOP_CLASSPATH>
<HIVE_HADOOP_TEST_CLASSPATH>${test.hive.hadoop.classpath}</HIVE_HADOOP_TEST_CLASSPATH>
<SPARK_SUBMIT_CLASSPATH>${spark.home}/lib/spark-assembly-${spark.version}-hadoop2.4.0.jar:${test.hive.hadoop.classpath}</SPARK_SUBMIT_CLASSPATH>
+ <SPARK_OSX_TEST_OPTS>-Dorg.xerial.snappy.tempdir=/tmp -Dorg.xerial.snappy.lib.name=libsnappyjava.jnilib</SPARK_OSX_TEST_OPTS>
<PATH>${env.PATH}${test.extra.path}</PATH>
</environmentVariables>
<systemPropertyVariables>
Modified: hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java?rev=1655164&r1=1655163&r2=1655164&view=diff
==============================================================================
--- hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java (original)
+++ hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java Tue Jan 27 21:27:27 2015
@@ -64,6 +64,7 @@ class SparkClientImpl implements SparkCl
private static final long DEFAULT_SHUTDOWN_TIMEOUT = 10000; // In milliseconds
+ private static final String OSX_TEST_OPTS = "SPARK_OSX_TEST_OPTS";
private static final String DRIVER_OPTS_KEY = "spark.driver.extraJavaOptions";
private static final String EXECUTOR_OPTS_KEY = "spark.executor.extraJavaOptions";
private static final String DRIVER_EXTRA_CLASSPATH = "spark.driver.extraClassPath";
@@ -219,10 +220,16 @@ class SparkClientImpl implements SparkCl
sparkLogDir = sparkHome + "/logs/";
}
}
+
+ String osxTestOpts = "";
+ if (Strings.nullToEmpty(System.getProperty("os.name")).toLowerCase().contains("mac")) {
+ osxTestOpts = Strings.nullToEmpty(System.getenv(OSX_TEST_OPTS));
+ }
+
String driverJavaOpts = Joiner.on(" ").skipNulls().join(
- "-Dhive.spark.log.dir=" + sparkLogDir, conf.get(DRIVER_OPTS_KEY));
+ "-Dhive.spark.log.dir=" + sparkLogDir, osxTestOpts, conf.get(DRIVER_OPTS_KEY));
String executorJavaOpts = Joiner.on(" ").skipNulls().join(
- "-Dhive.spark.log.dir=" + sparkLogDir, conf.get(EXECUTOR_OPTS_KEY));
+ "-Dhive.spark.log.dir=" + sparkLogDir, osxTestOpts, conf.get(EXECUTOR_OPTS_KEY));
// Create a file with all the job properties to be read by spark-submit. Change the
// file's permissions so that only the owner can read it. This avoid having the