You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kyuubi.apache.org by ch...@apache.org on 2022/07/04 10:45:21 UTC

[incubator-kyuubi] branch master updated: [KYUUBI #2997] Use spark shim set current namespace

This is an automated email from the ASF dual-hosted git repository.

chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 407fc8db4 [KYUUBI #2997] Use spark shim set current namespace
407fc8db4 is described below

commit 407fc8db456c3707d7cb93f4ad73a74b466611c8
Author: ulysses-you <ul...@gmail.com>
AuthorDate: Mon Jul 4 18:45:11 2022 +0800

    [KYUUBI #2997] Use spark shim set current namespace
    
    ### _Why are the changes needed?_
    
    To make the behavior consistent.
    
    The master GA failed due to https://github.com/apache/spark/pull/36969
    <img width="1137" alt="image" src="https://user-images.githubusercontent.com/12025282/177069041-b0a102ca-51bf-4faf-b89d-53bfd950cbc2.png">
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible
    
    - [x] Add screenshots for manual tests if appropriate
    
    - [ ] [Run test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests) locally before make a pull request
    
    Closes #2998 from ulysses-you/setCurrentNamespace.
    
    Closes #2997
    
    2646d4ff [ulysses-you] style
    79a9f332 [ulysses-you] simplify
    246ed71c [ulysses-you] v2 catalog
    
    Authored-by: ulysses-you <ul...@gmail.com>
    Signed-off-by: Cheng Pan <ch...@apache.org>
---
 .../kyuubi/engine/spark/session/SparkSessionImpl.scala      | 13 ++++++++++++-
 .../kyuubi/engine/spark/operation/SparkOperationSuite.scala |  2 +-
 2 files changed, 13 insertions(+), 2 deletions(-)

diff --git a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/session/SparkSessionImpl.scala b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/session/SparkSessionImpl.scala
index bf2b4dc39..e0628a807 100644
--- a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/session/SparkSessionImpl.scala
+++ b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/session/SparkSessionImpl.scala
@@ -22,6 +22,7 @@ import org.apache.spark.sql.{AnalysisException, SparkSession}
 
 import org.apache.kyuubi.engine.spark.events.SessionEvent
 import org.apache.kyuubi.engine.spark.operation.SparkSQLOperationManager
+import org.apache.kyuubi.engine.spark.shim.SparkCatalogShim
 import org.apache.kyuubi.engine.spark.udf.KDFRegistry
 import org.apache.kyuubi.events.EventBus
 import org.apache.kyuubi.operation.{Operation, OperationHandle}
@@ -52,7 +53,17 @@ class SparkSessionImpl(
 
   override def open(): Unit = {
     normalizedConf.foreach {
-      case ("use:database", database) => spark.catalog.setCurrentDatabase(database)
+      case ("use:database", database) =>
+        try {
+          SparkCatalogShim().setCurrentDatabase(spark, database)
+        } catch {
+          case e
+              if database == "default" && e.getMessage != null &&
+                e.getMessage.contains("not found") =>
+          // use:database is from hive so the catalog is always session catalog which must have
+          // default namespace `default`. But as spark support v2 catalog, catalog may not have
+          // default namespace. Here we do nothing for compatible both session and v2 catalog.
+        }
       case (key, value) => setModifiableConfig(key, value)
     }
     KDFRegistry.registerAll(spark)
diff --git a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
index 3b6c352b8..d1ab4856f 100644
--- a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
+++ b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
@@ -449,7 +449,7 @@ class SparkOperationSuite extends WithSparkSQLEngine with HiveMetadataTests with
       val tOpenSessionResp = client.OpenSession(req)
       val status = tOpenSessionResp.getStatus
       assert(status.getStatusCode === TStatusCode.ERROR_STATUS)
-      assert(status.getErrorMessage.contains("Database 'default2' does not exist"))
+      assert(status.getErrorMessage.contains("Database 'default2' not found"))
     }
   }