You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by we...@apache.org on 2020/02/03 06:33:14 UTC
[spark] branch branch-3.0 updated: [SPARK-30697][SQL] Handle
database and namespace exceptions in catalog.isView
This is an automated email from the ASF dual-hosted git repository.
wenchen pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.0 by this push:
new 91f78ae [SPARK-30697][SQL] Handle database and namespace exceptions in catalog.isView
91f78ae is described below
commit 91f78aee718888fad5677445ba21024263d1037a
Author: Burak Yavuz <br...@gmail.com>
AuthorDate: Mon Feb 3 14:08:59 2020 +0800
[SPARK-30697][SQL] Handle database and namespace exceptions in catalog.isView
### What changes were proposed in this pull request?
Adds NoSuchDatabaseException and NoSuchNamespaceException to the `isView` method for SessionCatalog.
### Why are the changes needed?
This method prevents specialized resolutions from kicking in within Analysis when using V2 Catalogs if the identifier is a specialized identifier.
### Does this PR introduce any user-facing change?
No
### How was this patch tested?
Added test to DataSourceV2SessionCatalogSuite
Closes #27423 from brkyvz/isViewF.
Authored-by: Burak Yavuz <br...@gmail.com>
Signed-off-by: Wenchen Fan <we...@databricks.com>
(cherry picked from commit 2eccfd8a73c4afa30a6aa97c2afd38661f29e24b)
Signed-off-by: Wenchen Fan <we...@databricks.com>
---
.../sql/catalyst/catalog/SessionCatalog.scala | 2 ++
.../DataSourceV2DataFrameSessionCatalogSuite.scala | 22 ++++++++++++++++++++++
.../DataSourceV2SQLSessionCatalogSuite.scala | 14 ++++++++++++++
3 files changed, 38 insertions(+)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index 45f0ef6..12f9a61 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -826,6 +826,8 @@ class SessionCatalog(
getTempViewOrPermanentTableMetadata(ident).tableType == CatalogTableType.VIEW
} catch {
case _: NoSuchTableException => false
+ case _: NoSuchDatabaseException => false
+ case _: NoSuchNamespaceException => false
}
}
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2DataFrameSessionCatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2DataFrameSessionCatalogSuite.scala
index 4c67888..01caf8e 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2DataFrameSessionCatalogSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2DataFrameSessionCatalogSuite.scala
@@ -101,6 +101,13 @@ class InMemoryTableSessionCatalog extends TestV2SessionCatalogBase[InMemoryTable
new InMemoryTable(name, schema, partitions, properties)
}
+ override def loadTable(ident: Identifier): Table = {
+ val identToUse = Option(InMemoryTableSessionCatalog.customIdentifierResolution)
+ .map(_(ident))
+ .getOrElse(ident)
+ super.loadTable(identToUse)
+ }
+
override def alterTable(ident: Identifier, changes: TableChange*): Table = {
val fullIdent = fullIdentifier(ident)
Option(tables.get(fullIdent)) match {
@@ -125,6 +132,21 @@ class InMemoryTableSessionCatalog extends TestV2SessionCatalogBase[InMemoryTable
}
}
+object InMemoryTableSessionCatalog {
+ private var customIdentifierResolution: Identifier => Identifier = _
+
+ def withCustomIdentifierResolver(
+ resolver: Identifier => Identifier)(
+ f: => Unit): Unit = {
+ try {
+ customIdentifierResolution = resolver
+ f
+ } finally {
+ customIdentifierResolution = null
+ }
+ }
+}
+
private [connector] trait SessionCatalogTest[T <: Table, Catalog <: TestV2SessionCatalogBase[T]]
extends QueryTest
with SharedSparkSession
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSessionCatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSessionCatalogSuite.scala
index 27725bc..b699744 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSessionCatalogSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSessionCatalogSuite.scala
@@ -49,4 +49,18 @@ class DataSourceV2SQLSessionCatalogSuite
v2Catalog.asInstanceOf[TableCatalog]
.loadTable(Identifier.of(Array.empty, nameParts.last))
}
+
+ test("SPARK-30697: catalog.isView doesn't throw an error for specialized identifiers") {
+ val t1 = "tbl"
+ withTable(t1) {
+ sql(s"CREATE TABLE $t1 (id bigint, data string) USING $v2Format")
+
+ def idResolver(id: Identifier): Identifier = Identifier.of(Array.empty, id.name())
+
+ InMemoryTableSessionCatalog.withCustomIdentifierResolver(idResolver) {
+ // The following should not throw AnalysisException.
+ sql(s"DESCRIBE TABLE ignored.$t1")
+ }
+ }
+ }
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org