You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2016/02/04 21:20:20 UTC

spark git commit: [SPARK-13079][SQL] InMemoryCatalog follow-ups

Repository: spark
Updated Branches:
  refs/heads/master c756bda47 -> bd38dd6f7


[SPARK-13079][SQL] InMemoryCatalog follow-ups

This patch incorporates review feedback from #11069, which is already merged.

Author: Andrew Or <an...@databricks.com>

Closes #11080 from andrewor14/catalog-follow-ups.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/bd38dd6f
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/bd38dd6f
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/bd38dd6f

Branch: refs/heads/master
Commit: bd38dd6f75c4af0f8f32bb21a82da53fffa5e825
Parents: c756bda
Author: Andrew Or <an...@databricks.com>
Authored: Thu Feb 4 12:20:18 2016 -0800
Committer: Reynold Xin <rx...@databricks.com>
Committed: Thu Feb 4 12:20:18 2016 -0800

----------------------------------------------------------------------
 .../spark/sql/catalyst/catalog/interface.scala       | 15 +++++++++++++++
 .../sql/catalyst/catalog/CatalogTestCases.scala      | 12 +++++++-----
 2 files changed, 22 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/bd38dd6f/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
index b4d7dd2..56aaa6b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
@@ -39,6 +39,9 @@ abstract class Catalog {
 
   def dropDatabase(db: String, ignoreIfNotExists: Boolean, cascade: Boolean): Unit
 
+  /**
+   * Alter an existing database. This operation does not support renaming.
+   */
   def alterDatabase(db: String, dbDefinition: Database): Unit
 
   def getDatabase(db: String): Database
@@ -57,6 +60,9 @@ abstract class Catalog {
 
   def renameTable(db: String, oldName: String, newName: String): Unit
 
+  /**
+   * Alter an existing table. This operation does not support renaming.
+   */
   def alterTable(db: String, table: String, tableDefinition: Table): Unit
 
   def getTable(db: String, table: String): Table
@@ -81,6 +87,9 @@ abstract class Catalog {
       parts: Seq[PartitionSpec],
       ignoreIfNotExists: Boolean): Unit
 
+  /**
+   * Alter an existing table partition and optionally override its spec.
+   */
   def alterPartition(
       db: String,
       table: String,
@@ -100,6 +109,9 @@ abstract class Catalog {
 
   def dropFunction(db: String, funcName: String): Unit
 
+  /**
+   * Alter an existing function and optionally override its name.
+   */
   def alterFunction(db: String, funcName: String, funcDefinition: Function): Unit
 
   def getFunction(db: String, funcName: String): Function
@@ -194,5 +206,8 @@ case class Database(
 
 
 object Catalog {
+  /**
+   * Specifications of a table partition indexed by column name.
+   */
   type PartitionSpec = Map[String, String]
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/bd38dd6f/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala
index 0d84343..45c5cee 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala
@@ -27,10 +27,10 @@ import org.apache.spark.sql.AnalysisException
  * Implementations of the [[Catalog]] interface can create test suites by extending this.
  */
 abstract class CatalogTestCases extends SparkFunSuite {
-  private val storageFormat = StorageFormat("usa", "$", "zzz", "serde", Map.empty[String, String])
-  private val part1 = TablePartition(Map[String, String]("a" -> "1"), storageFormat)
-  private val part2 = TablePartition(Map[String, String]("b" -> "2"), storageFormat)
-  private val part3 = TablePartition(Map[String, String]("c" -> "3"), storageFormat)
+  private val storageFormat = StorageFormat("usa", "$", "zzz", "serde", Map())
+  private val part1 = TablePartition(Map("a" -> "1"), storageFormat)
+  private val part2 = TablePartition(Map("b" -> "2"), storageFormat)
+  private val part3 = TablePartition(Map("c" -> "3"), storageFormat)
   private val funcClass = "org.apache.spark.myFunc"
 
   protected def newEmptyCatalog(): Catalog
@@ -42,6 +42,8 @@ abstract class CatalogTestCases extends SparkFunSuite {
    * db2
    *   - tbl1
    *   - tbl2
+   *     - part1
+   *     - part2
    *   - func1
    */
   private def newBasicCatalog(): Catalog = {
@@ -50,8 +52,8 @@ abstract class CatalogTestCases extends SparkFunSuite {
     catalog.createDatabase(newDb("db2"), ignoreIfExists = false)
     catalog.createTable("db2", newTable("tbl1"), ignoreIfExists = false)
     catalog.createTable("db2", newTable("tbl2"), ignoreIfExists = false)
-    catalog.createFunction("db2", newFunc("func1"), ignoreIfExists = false)
     catalog.createPartitions("db2", "tbl2", Seq(part1, part2), ignoreIfExists = false)
+    catalog.createFunction("db2", newFunc("func1"), ignoreIfExists = false)
     catalog
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org