You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2019/05/20 04:31:50 UTC

[spark] branch master updated: [SPARK-27693][SQL] Add default catalog property

This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new bc46fea  [SPARK-27693][SQL] Add default catalog property
bc46fea is described below

commit bc46feaced9f6694d4829af3cf5637bb1272ef77
Author: Ryan Blue <bl...@apache.org>
AuthorDate: Sun May 19 21:30:20 2019 -0700

    [SPARK-27693][SQL] Add default catalog property
    
    Add a SQL config property for the default v2 catalog.
    
    Existing tests for regressions.
    
    Closes #24594 from rdblue/SPARK-27693-add-default-catalog-config.
    
    Authored-by: Ryan Blue <bl...@apache.org>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 .../src/main/scala/org/apache/spark/sql/internal/SQLConf.scala     | 7 +++++++
 .../spark/sql/execution/datasources/DataSourceResolution.scala     | 4 +++-
 .../apache/spark/sql/execution/command/PlanResolutionSuite.scala   | 3 +--
 .../org/apache/spark/sql/sources/v2/DataSourceV2SQLSuite.scala     | 3 +--
 4 files changed, 12 insertions(+), 5 deletions(-)

diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
index b4c68a7..b7e6135 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
@@ -1767,6 +1767,11 @@ object SQLConf {
         "with String")
     .booleanConf
     .createWithDefault(false)
+
+  val DEFAULT_V2_CATALOG = buildConf("spark.sql.default.catalog")
+      .doc("Name of the default v2 catalog, used when a catalog is not identified in queries")
+      .stringConf
+      .createOptional
 }
 
 /**
@@ -2223,6 +2228,8 @@ class SQLConf extends Serializable with Logging {
 
   def castDatetimeToString: Boolean = getConf(SQLConf.LEGACY_CAST_DATETIME_TO_STRING)
 
+  def defaultV2Catalog: Option[String] = getConf(DEFAULT_V2_CATALOG)
+
   /** ********************** SQLConf functionality methods ************ */
 
   /** Set Spark SQL configuration properties. */
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceResolution.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceResolution.scala
index 09506f0..72b0503 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceResolution.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceResolution.scala
@@ -43,6 +43,8 @@ case class DataSourceResolution(
 
   override def lookupCatalog: Option[String => CatalogPlugin] = Some(findCatalog)
 
+  def defaultCatalog: Option[CatalogPlugin] = conf.defaultV2Catalog.map(findCatalog)
+
   override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
     case CreateTableStatement(
         AsTableIdentifier(table), schema, partitionCols, bucketSpec, properties,
@@ -67,7 +69,7 @@ case class DataSourceResolution(
     case create: CreateTableAsSelectStatement =>
       // the provider was not a v1 source, convert to a v2 plan
       val CatalogObjectIdentifier(maybeCatalog, identifier) = create.tableName
-      val catalog = maybeCatalog
+      val catalog = maybeCatalog.orElse(defaultCatalog)
           .getOrElse(throw new AnalysisException(
             s"No catalog specified for table ${identifier.quoted} and no default catalog is set"))
           .asTableCatalog
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
index c525b4c..f8119fd 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
@@ -329,8 +329,7 @@ class PlanResolutionSuite extends AnalysisTest {
     }
   }
 
-  // TODO(rblue): enable this test after the default catalog is available
-  ignore("Test v2 CTAS with data source v2 provider") {
+  test("Test v2 CTAS with data source v2 provider") {
     val sql =
       s"""
         |CREATE TABLE IF NOT EXISTS mydb.page_view
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/v2/DataSourceV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/v2/DataSourceV2SQLSuite.scala
index a9bc036..0cfdfdd 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/v2/DataSourceV2SQLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/v2/DataSourceV2SQLSuite.scala
@@ -66,8 +66,7 @@ class DataSourceV2SQLSuite extends QueryTest with SharedSQLContext with BeforeAn
     checkAnswer(spark.internalCreateDataFrame(rdd, table.schema), spark.table("source"))
   }
 
-  // TODO(rblue): enable this test after the default catalog is available
-  ignore("CreateTableAsSelect: use v2 plan because provider is v2") {
+  test("CreateTableAsSelect: use v2 plan because provider is v2") {
     spark.sql(s"CREATE TABLE table_name USING $orc2 AS SELECT id, data FROM source")
 
     val testCatalog = spark.catalog("testcat").asTableCatalog


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org