You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by yh...@apache.org on 2016/04/06 06:23:23 UTC

spark git commit: [SPARK-14128][SQL] Alter table DDL followup

Repository: spark
Updated Branches:
  refs/heads/master f6456fa80 -> adbfdb878


[SPARK-14128][SQL] Alter table DDL followup

## What changes were proposed in this pull request?

This is just a followup to #12121, which implemented the alter table DDLs using the `SessionCatalog`. Specially, this corrects the behavior of setting the location of a datasource table. For datasource tables, we need to set the `locationUri` in addition to the `path` entry in the serde properties. Additionally, changing the location of a datasource table partition is not allowed.

## How was this patch tested?

`DDLSuite`

Author: Andrew Or <an...@databricks.com>

Closes #12186 from andrewor14/alter-table-ddl-followup.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/adbfdb87
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/adbfdb87
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/adbfdb87

Branch: refs/heads/master
Commit: adbfdb878dd1029738db3d1955d08b33de1aa8a9
Parents: f6456fa
Author: Andrew Or <an...@databricks.com>
Authored: Tue Apr 5 21:23:20 2016 -0700
Committer: Yin Huai <yh...@databricks.com>
Committed: Tue Apr 5 21:23:20 2016 -0700

----------------------------------------------------------------------
 .../spark/sql/execution/command/ddl.scala       |  6 ++++--
 .../spark/sql/execution/command/DDLSuite.scala  | 20 +++++++++++++++++---
 2 files changed, 21 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/adbfdb87/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
index 0d38c41..6d56a6f 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
@@ -383,8 +383,9 @@ case class AlterTableSetLocation(
         val part = catalog.getPartition(tableName, spec)
         val newPart =
           if (DDLUtils.isDatasourceTable(table)) {
-            part.copy(storage = part.storage.copy(
-              serdeProperties = part.storage.serdeProperties ++ Map("path" -> location)))
+            throw new AnalysisException(
+              "alter table set location for partition is not allowed for tables defined " +
+              "using the datasource API")
           } else {
             part.copy(storage = part.storage.copy(locationUri = Some(location)))
           }
@@ -394,6 +395,7 @@ case class AlterTableSetLocation(
         val newTable =
           if (DDLUtils.isDatasourceTable(table)) {
             table.withNewStorage(
+              locationUri = Some(location),
               serdeProperties = table.storage.serdeProperties ++ Map("path" -> location))
           } else {
             table.withNewStorage(locationUri = Some(location))

http://git-wip-us.apache.org/repos/asf/spark/blob/adbfdb87/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index d8e2c94..a8db4e9 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -417,23 +417,37 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
         .map { s => catalog.getPartition(tableIdent, s).storage }
         .getOrElse { catalog.getTable(tableIdent).storage }
       if (isDatasourceTable) {
-        assert(storageFormat.serdeProperties.get("path") === Some(expected))
+        if (spec.isDefined) {
+          assert(storageFormat.serdeProperties.isEmpty)
+          assert(storageFormat.locationUri.isEmpty)
+        } else {
+          assert(storageFormat.serdeProperties.get("path") === Some(expected))
+          assert(storageFormat.locationUri === Some(expected))
+        }
       } else {
         assert(storageFormat.locationUri === Some(expected))
       }
     }
+    // Optionally expect AnalysisException
+    def maybeWrapException[T](expectException: Boolean)(body: => T): Unit = {
+      if (expectException) intercept[AnalysisException] { body } else body
+    }
     // set table location
     sql("ALTER TABLE dbx.tab1 SET LOCATION '/path/to/your/lovely/heart'")
     verifyLocation("/path/to/your/lovely/heart")
     // set table partition location
-    sql("ALTER TABLE dbx.tab1 PARTITION (a='1') SET LOCATION '/path/to/part/ways'")
+    maybeWrapException(isDatasourceTable) {
+      sql("ALTER TABLE dbx.tab1 PARTITION (a='1') SET LOCATION '/path/to/part/ways'")
+    }
     verifyLocation("/path/to/part/ways", Some(partSpec))
     // set table location without explicitly specifying database
     catalog.setCurrentDatabase("dbx")
     sql("ALTER TABLE tab1 SET LOCATION '/swanky/steak/place'")
     verifyLocation("/swanky/steak/place")
     // set table partition location without explicitly specifying database
-    sql("ALTER TABLE tab1 PARTITION (a='1') SET LOCATION 'vienna'")
+    maybeWrapException(isDatasourceTable) {
+      sql("ALTER TABLE tab1 PARTITION (a='1') SET LOCATION 'vienna'")
+    }
     verifyLocation("vienna", Some(partSpec))
     // table to alter does not exist
     intercept[AnalysisException] {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org