You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sr...@apache.org on 2016/10/08 11:12:41 UTC
spark git commit: [MINOR][SQL] Use resource path for test_script.sh
Repository: spark
Updated Branches:
refs/heads/master 4201ddcc0 -> 8a6bbe095
[MINOR][SQL] Use resource path for test_script.sh
## What changes were proposed in this pull request?
This PR modified the test case `test("script")` to use resource path for `test_script.sh`. Make the test case portable (even in IntelliJ).
## How was this patch tested?
Passed the test case.
Before:
Run `test("script")` in IntelliJ:
```
Caused by: org.apache.spark.SparkException: Subprocess exited with status 127. Error: bash: src/test/resources/test_script.sh: No such file or directory
```
After:
Test passed.
Author: Weiqing Yang <ya...@gmail.com>
Closes #15246 from weiqingy/hivetest.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/8a6bbe09
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/8a6bbe09
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/8a6bbe09
Branch: refs/heads/master
Commit: 8a6bbe095b6a9aa33989c0deaa5ed0128d70320f
Parents: 4201ddc
Author: Weiqing Yang <ya...@gmail.com>
Authored: Sat Oct 8 12:12:35 2016 +0100
Committer: Sean Owen <so...@cloudera.com>
Committed: Sat Oct 8 12:12:35 2016 +0100
----------------------------------------------------------------------
.../test/scala/org/apache/spark/SparkFunSuite.scala | 11 +++++++++++
.../spark/deploy/history/HistoryServerSuite.scala | 6 +++---
.../src/test/scala/org/apache/spark/ui/UISuite.scala | 3 ++-
.../spark/sql/catalyst/LogicalPlanToSQLSuite.scala | 2 +-
.../apache/spark/sql/hive/HiveSparkSubmitSuite.scala | 3 ++-
.../spark/sql/hive/execution/SQLQuerySuite.scala | 15 +++++++++------
6 files changed, 28 insertions(+), 12 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/8a6bbe09/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
index cd87680..18077c0 100644
--- a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
@@ -18,6 +18,8 @@
package org.apache.spark
// scalastyle:off
+import java.io.File
+
import org.scalatest.{BeforeAndAfterAll, FunSuite, Outcome}
import org.apache.spark.internal.Logging
@@ -41,6 +43,15 @@ abstract class SparkFunSuite
}
}
+ // helper function
+ protected final def getTestResourceFile(file: String): File = {
+ new File(getClass.getClassLoader.getResource(file).getFile)
+ }
+
+ protected final def getTestResourcePath(file: String): String = {
+ getTestResourceFile(file).getCanonicalPath
+ }
+
/**
* Log the suite name and the test name before and after each test.
*
http://git-wip-us.apache.org/repos/asf/spark/blob/8a6bbe09/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
index 5b316b2..a595bc1 100644
--- a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
@@ -59,8 +59,8 @@ class HistoryServerSuite extends SparkFunSuite with BeforeAndAfter with Matchers
with JsonTestUtils with Eventually with WebBrowser with LocalSparkContext
with ResetSystemProperties {
- private val logDir = new File("src/test/resources/spark-events")
- private val expRoot = new File("src/test/resources/HistoryServerExpectations/")
+ private val logDir = getTestResourcePath("spark-events")
+ private val expRoot = getTestResourceFile("HistoryServerExpectations")
private var provider: FsHistoryProvider = null
private var server: HistoryServer = null
@@ -68,7 +68,7 @@ class HistoryServerSuite extends SparkFunSuite with BeforeAndAfter with Matchers
def init(): Unit = {
val conf = new SparkConf()
- .set("spark.history.fs.logDirectory", logDir.getAbsolutePath)
+ .set("spark.history.fs.logDirectory", logDir)
.set("spark.history.fs.update.interval", "0")
.set("spark.testing", "true")
provider = new FsHistoryProvider(conf)
http://git-wip-us.apache.org/repos/asf/spark/blob/8a6bbe09/core/src/test/scala/org/apache/spark/ui/UISuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ui/UISuite.scala b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
index dbb8dca..4abcfb7 100644
--- a/core/src/test/scala/org/apache/spark/ui/UISuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
@@ -53,9 +53,10 @@ class UISuite extends SparkFunSuite {
}
private def sslEnabledConf(): (SparkConf, SSLOptions) = {
+ val keyStoreFilePath = getTestResourcePath("spark.keystore")
val conf = new SparkConf()
.set("spark.ssl.ui.enabled", "true")
- .set("spark.ssl.ui.keyStore", "./src/test/resources/spark.keystore")
+ .set("spark.ssl.ui.keyStore", keyStoreFilePath)
.set("spark.ssl.ui.keyStorePassword", "123456")
.set("spark.ssl.ui.keyPassword", "123456")
(conf, new SecurityManager(conf).getSSLOptions("ui"))
http://git-wip-us.apache.org/repos/asf/spark/blob/8a6bbe09/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala
index 9ac1e86..c7f10e5 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala
@@ -45,7 +45,7 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
// Used for generating new query answer files by saving
private val regenerateGoldenFiles: Boolean = System.getenv("SPARK_GENERATE_GOLDEN_FILES") == "1"
- private val goldenSQLPath = "src/test/resources/sqlgen/"
+ private val goldenSQLPath = getTestResourcePath("sqlgen")
protected override def beforeAll(): Unit = {
super.beforeAll()
http://git-wip-us.apache.org/repos/asf/spark/blob/8a6bbe09/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
index 29317e2..d3873cf 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
@@ -152,7 +152,8 @@ class HiveSparkSubmitSuite
case v if v.startsWith("2.10") || v.startsWith("2.11") => v.substring(0, 4)
case x => throw new Exception(s"Unsupported Scala Version: $x")
}
- val testJar = s"sql/hive/src/test/resources/regression-test-SPARK-8489/test-$version.jar"
+ val jarDir = getTestResourcePath("regression-test-SPARK-8489")
+ val testJar = s"$jarDir/test-$version.jar"
val args = Seq(
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
http://git-wip-us.apache.org/repos/asf/spark/blob/8a6bbe09/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index 6c77a0d..6f2a166 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -66,13 +66,14 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
import spark.implicits._
test("script") {
+ val scriptFilePath = getTestResourcePath("test_script.sh")
if (testCommandAvailable("bash") && testCommandAvailable("echo | sed")) {
val df = Seq(("x1", "y1", "z1"), ("x2", "y2", "z2")).toDF("c1", "c2", "c3")
df.createOrReplaceTempView("script_table")
val query1 = sql(
- """
+ s"""
|SELECT col1 FROM (from(SELECT c1, c2, c3 FROM script_table) tempt_table
- |REDUCE c1, c2, c3 USING 'bash src/test/resources/test_script.sh' AS
+ |REDUCE c1, c2, c3 USING 'bash $scriptFilePath' AS
|(col1 STRING, col2 STRING)) script_test_table""".stripMargin)
checkAnswer(query1, Row("x1_y1") :: Row("x2_y2") :: Nil)
}
@@ -1290,11 +1291,12 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
.selectExpr("id AS a", "id AS b")
.createOrReplaceTempView("test")
+ val scriptFilePath = getTestResourcePath("data")
checkAnswer(
sql(
- """FROM(
+ s"""FROM(
| FROM test SELECT TRANSFORM(a, b)
- | USING 'python src/test/resources/data/scripts/test_transform.py "\t"'
+ | USING 'python $scriptFilePath/scripts/test_transform.py "\t"'
| AS (c STRING, d STRING)
|) t
|SELECT c
@@ -1308,12 +1310,13 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
.selectExpr("id AS a", "id AS b")
.createOrReplaceTempView("test")
+ val scriptFilePath = getTestResourcePath("data")
val df = sql(
- """FROM test
+ s"""FROM test
|SELECT TRANSFORM(a, b)
|ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
|WITH SERDEPROPERTIES('field.delim' = '|')
- |USING 'python src/test/resources/data/scripts/test_transform.py "|"'
+ |USING 'python $scriptFilePath/scripts/test_transform.py "|"'
|AS (c STRING, d STRING)
|ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
|WITH SERDEPROPERTIES('field.delim' = '|')
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org