You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ya...@apache.org on 2018/12/04 13:08:39 UTC
spark git commit: [MINOR][SQL] Combine the same codes in test cases
Repository: spark
Updated Branches:
refs/heads/master 261284842 -> 93f5592aa
[MINOR][SQL] Combine the same codes in test cases
## What changes were proposed in this pull request?
In the DDLSuit, there are four test cases have the same codes , writing a function can combine the same code.
## How was this patch tested?
existing tests.
Closes #23194 from CarolinePeng/Update_temp.
Authored-by: 彭灿00244106 <00...@zte.intra>
Signed-off-by: Takeshi Yamamuro <ya...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/93f5592a
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/93f5592a
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/93f5592a
Branch: refs/heads/master
Commit: 93f5592aa8c1254a93524fda81cf0e418c22cb2f
Parents: 2612848
Author: 彭灿00244106 <00...@zte.intra>
Authored: Tue Dec 4 22:08:16 2018 +0900
Committer: Takeshi Yamamuro <ya...@apache.org>
Committed: Tue Dec 4 22:08:16 2018 +0900
----------------------------------------------------------------------
.../spark/sql/execution/command/DDLSuite.scala | 40 ++++++++------------
1 file changed, 16 insertions(+), 24 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/93f5592a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index 9d32fb6..052a5e7 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -377,41 +377,41 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils {
}
}
- test("CTAS a managed table with the existing empty directory") {
- val tableLoc = new File(spark.sessionState.catalog.defaultTablePath(TableIdentifier("tab1")))
+ private def withEmptyDirInTablePath(dirName: String)(f : File => Unit): Unit = {
+ val tableLoc =
+ new File(spark.sessionState.catalog.defaultTablePath(TableIdentifier(dirName)))
try {
tableLoc.mkdir()
+ f(tableLoc)
+ } finally {
+ waitForTasksToFinish()
+ Utils.deleteRecursively(tableLoc)
+ }
+ }
+
+
+ test("CTAS a managed table with the existing empty directory") {
+ withEmptyDirInTablePath("tab1") { tableLoc =>
withTable("tab1") {
sql(s"CREATE TABLE tab1 USING ${dataSource} AS SELECT 1, 'a'")
checkAnswer(spark.table("tab1"), Row(1, "a"))
}
- } finally {
- waitForTasksToFinish()
- Utils.deleteRecursively(tableLoc)
}
}
test("create a managed table with the existing empty directory") {
- val tableLoc = new File(spark.sessionState.catalog.defaultTablePath(TableIdentifier("tab1")))
- try {
- tableLoc.mkdir()
+ withEmptyDirInTablePath("tab1") { tableLoc =>
withTable("tab1") {
sql(s"CREATE TABLE tab1 (col1 int, col2 string) USING ${dataSource}")
sql("INSERT INTO tab1 VALUES (1, 'a')")
checkAnswer(spark.table("tab1"), Row(1, "a"))
}
- } finally {
- waitForTasksToFinish()
- Utils.deleteRecursively(tableLoc)
}
}
test("create a managed table with the existing non-empty directory") {
withTable("tab1") {
- val tableLoc = new File(spark.sessionState.catalog.defaultTablePath(TableIdentifier("tab1")))
- try {
- // create an empty hidden file
- tableLoc.mkdir()
+ withEmptyDirInTablePath("tab1") { tableLoc =>
val hiddenGarbageFile = new File(tableLoc.getCanonicalPath, ".garbage")
hiddenGarbageFile.createNewFile()
val exMsg = "Can not create the managed table('`tab1`'). The associated location"
@@ -439,28 +439,20 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils {
}.getMessage
assert(ex.contains(exMsgWithDefaultDB))
}
- } finally {
- waitForTasksToFinish()
- Utils.deleteRecursively(tableLoc)
}
}
}
test("rename a managed table with existing empty directory") {
- val tableLoc = new File(spark.sessionState.catalog.defaultTablePath(TableIdentifier("tab2")))
- try {
+ withEmptyDirInTablePath("tab2") { tableLoc =>
withTable("tab1") {
sql(s"CREATE TABLE tab1 USING $dataSource AS SELECT 1, 'a'")
- tableLoc.mkdir()
val ex = intercept[AnalysisException] {
sql("ALTER TABLE tab1 RENAME TO tab2")
}.getMessage
val expectedMsg = "Can not rename the managed table('`tab1`'). The associated location"
assert(ex.contains(expectedMsg))
}
- } finally {
- waitForTasksToFinish()
- Utils.deleteRecursively(tableLoc)
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org