You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by we...@apache.org on 2017/01/17 11:42:06 UTC

spark git commit: [SPARK-19240][SQL][TEST] add test for setting location for managed table

Repository: spark
Updated Branches:
  refs/heads/master 84f0b645b -> a774bca05


[SPARK-19240][SQL][TEST] add test for setting location for managed table

## What changes were proposed in this pull request?

SET LOCATION can also work on managed table(or table created without custom path), the behavior is a little weird, but as we have already supported it, we should add a test to explicitly show the behavior.

## How was this patch tested?

N/A

Author: Wenchen Fan <we...@databricks.com>

Closes #16597 from cloud-fan/set-location.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/a774bca0
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/a774bca0
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/a774bca0

Branch: refs/heads/master
Commit: a774bca05ec6dd0deec638048dc8672a84427f49
Parents: 84f0b64
Author: Wenchen Fan <we...@databricks.com>
Authored: Tue Jan 17 19:42:02 2017 +0800
Committer: Wenchen Fan <we...@databricks.com>
Committed: Tue Jan 17 19:42:02 2017 +0800

----------------------------------------------------------------------
 .../spark/sql/execution/command/DDLSuite.scala  | 28 ++++++++++++++++++++
 1 file changed, 28 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/a774bca0/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index ac3878e..97990a6 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -18,6 +18,7 @@
 package org.apache.spark.sql.execution.command
 
 import java.io.File
+import java.net.URI
 
 import org.apache.hadoop.fs.Path
 import org.scalatest.BeforeAndAfterEach
@@ -1787,4 +1788,31 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
     val rows: Seq[Row] = df.toLocalIterator().asScala.toSeq
     assert(rows.length > 0)
   }
+
+  test("SET LOCATION for managed table") {
+    withTable("src") {
+      withTempDir { dir =>
+        sql("CREATE TABLE tbl(i INT) USING parquet")
+        sql("INSERT INTO tbl SELECT 1")
+        checkAnswer(spark.table("tbl"), Row(1))
+        val defaultTablePath = spark.sessionState.catalog
+          .getTableMetadata(TableIdentifier("tbl")).storage.locationUri.get
+
+        sql(s"ALTER TABLE tbl SET LOCATION '${dir.getCanonicalPath}'")
+        // SET LOCATION won't move data from previous table path to new table path.
+        assert(spark.table("tbl").count() == 0)
+        // the previous table path should be still there.
+        assert(new File(new URI(defaultTablePath)).exists())
+
+        sql("INSERT INTO tbl SELECT 2")
+        checkAnswer(spark.table("tbl"), Row(2))
+        // newly inserted data will go to the new table path.
+        assert(dir.listFiles().nonEmpty)
+
+        sql("DROP TABLE tbl")
+        // the new table path will be removed after DROP TABLE.
+        assert(!dir.exists())
+      }
+    }
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org