You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by we...@apache.org on 2017/01/03 03:55:33 UTC
spark git commit: [SPARK-19029][SQL] Remove databaseName from
SimpleCatalogRelation
Repository: spark
Updated Branches:
refs/heads/master 46b212602 -> a6cd9dbc6
[SPARK-19029][SQL] Remove databaseName from SimpleCatalogRelation
### What changes were proposed in this pull request?
Remove useless `databaseName ` from `SimpleCatalogRelation`.
### How was this patch tested?
Existing test cases.
Author: gatorsmile <ga...@gmail.com>
Closes #16438 from gatorsmile/removeDBFromSimpleCatalogRelation.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/a6cd9dbc
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/a6cd9dbc
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/a6cd9dbc
Branch: refs/heads/master
Commit: a6cd9dbc6095570e93dab1d93671abecdce40c25
Parents: 46b2126
Author: gatorsmile <ga...@gmail.com>
Authored: Tue Jan 3 11:55:31 2017 +0800
Committer: Wenchen Fan <we...@databricks.com>
Committed: Tue Jan 3 11:55:31 2017 +0800
----------------------------------------------------------------------
.../apache/spark/sql/catalyst/catalog/SessionCatalog.scala | 2 +-
.../org/apache/spark/sql/catalyst/catalog/interface.scala | 5 -----
.../spark/sql/catalyst/catalog/SessionCatalogSuite.scala | 8 ++++----
3 files changed, 5 insertions(+), 10 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/a6cd9dbc/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index 741ed05..2060d53 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -573,7 +573,7 @@ class SessionCatalog(
val view = Option(metadata.tableType).collect {
case CatalogTableType.VIEW => name
}
- SubqueryAlias(relationAlias, SimpleCatalogRelation(db, metadata), view)
+ SubqueryAlias(relationAlias, SimpleCatalogRelation(metadata), view)
} else {
SubqueryAlias(relationAlias, tempTables(table), Option(name))
}
http://git-wip-us.apache.org/repos/asf/spark/blob/a6cd9dbc/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
index 24d75ab..b402bd2 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
@@ -318,7 +318,6 @@ trait CatalogRelation {
* Note that in the future we should consolidate this and HiveCatalogRelation.
*/
case class SimpleCatalogRelation(
- databaseName: String,
metadata: CatalogTable)
extends LeafNode with CatalogRelation {
@@ -335,8 +334,4 @@ case class SimpleCatalogRelation(
}
dataCols ++ partCols
}
-
- require(
- metadata.identifier.database == Some(databaseName),
- "provided database does not match the one specified in the table definition")
}
http://git-wip-us.apache.org/repos/asf/spark/blob/a6cd9dbc/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
index 5cc772d..19b7a46 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
@@ -433,24 +433,24 @@ class SessionCatalogSuite extends SparkFunSuite {
sessionCatalog.setCurrentDatabase("db2")
// If we explicitly specify the database, we'll look up the relation in that database
assert(sessionCatalog.lookupRelation(TableIdentifier("tbl1", Some("db2")))
- == SubqueryAlias("tbl1", SimpleCatalogRelation("db2", metastoreTable1), None))
+ == SubqueryAlias("tbl1", SimpleCatalogRelation(metastoreTable1), None))
// Otherwise, we'll first look up a temporary table with the same name
assert(sessionCatalog.lookupRelation(TableIdentifier("tbl1"))
== SubqueryAlias("tbl1", tempTable1, Some(TableIdentifier("tbl1"))))
// Then, if that does not exist, look up the relation in the current database
sessionCatalog.dropTable(TableIdentifier("tbl1"), ignoreIfNotExists = false, purge = false)
assert(sessionCatalog.lookupRelation(TableIdentifier("tbl1"))
- == SubqueryAlias("tbl1", SimpleCatalogRelation("db2", metastoreTable1), None))
+ == SubqueryAlias("tbl1", SimpleCatalogRelation(metastoreTable1), None))
}
test("lookup table relation with alias") {
val catalog = new SessionCatalog(newBasicCatalog())
val alias = "monster"
val tableMetadata = catalog.getTableMetadata(TableIdentifier("tbl1", Some("db2")))
- val relation = SubqueryAlias("tbl1", SimpleCatalogRelation("db2", tableMetadata), None)
+ val relation = SubqueryAlias("tbl1", SimpleCatalogRelation(tableMetadata), None)
val relationWithAlias =
SubqueryAlias(alias,
- SimpleCatalogRelation("db2", tableMetadata), None)
+ SimpleCatalogRelation(tableMetadata), None)
assert(catalog.lookupRelation(
TableIdentifier("tbl1", Some("db2")), alias = None) == relation)
assert(catalog.lookupRelation(
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org