You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2014/12/12 07:52:00 UTC
spark git commit: [SPARK-4825] [SQL] CTAS fails to resolve when
created using saveAsTable
Repository: spark
Updated Branches:
refs/heads/master cbb634ae6 -> 0abbff286
[SPARK-4825] [SQL] CTAS fails to resolve when created using saveAsTable
Fix bug when query like:
```
test("save join to table") {
val testData = sparkContext.parallelize(1 to 10).map(i => TestData(i, i.toString))
sql("CREATE TABLE test1 (key INT, value STRING)")
testData.insertInto("test1")
sql("CREATE TABLE test2 (key INT, value STRING)")
testData.insertInto("test2")
testData.insertInto("test2")
sql("SELECT COUNT(a.value) FROM test1 a JOIN test2 b ON a.key = b.key").saveAsTable("test")
checkAnswer(
table("test"),
sql("SELECT COUNT(a.value) FROM test1 a JOIN test2 b ON a.key = b.key").collect().toSeq)
}
```
Author: Cheng Hao <ha...@intel.com>
Closes #3673 from chenghao-intel/spark_4825 and squashes the following commits:
e8cbd56 [Cheng Hao] alternate the pattern matching order for logical plan:CTAS
e004895 [Cheng Hao] fix bug
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/0abbff28
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/0abbff28
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/0abbff28
Branch: refs/heads/master
Commit: 0abbff286220bbcbbf28fbd80b8c5bf59ff37ce2
Parents: cbb634a
Author: Cheng Hao <ha...@intel.com>
Authored: Thu Dec 11 22:51:49 2014 -0800
Committer: Michael Armbrust <mi...@databricks.com>
Committed: Thu Dec 11 22:51:49 2014 -0800
----------------------------------------------------------------------
.../sql/catalyst/plans/logical/basicOperators.scala | 2 +-
.../apache/spark/sql/hive/HiveMetastoreCatalog.scala | 9 +++++++++
.../spark/sql/hive/execution/SQLQuerySuite.scala | 13 +++++++++++++
3 files changed, 23 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/0abbff28/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
index 00bdf10..64b8d45 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
@@ -121,7 +121,7 @@ case class CreateTableAsSelect[T](
allowExisting: Boolean,
desc: Option[T] = None) extends UnaryNode {
override def output = Seq.empty[Attribute]
- override lazy val resolved = (databaseName != None && childrenResolved)
+ override lazy val resolved = databaseName != None && childrenResolved
}
case class WriteToFile(
http://git-wip-us.apache.org/repos/asf/spark/blob/0abbff28/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
index 6086563..d8b10b7 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
@@ -261,6 +261,8 @@ private[hive] class HiveMetastoreCatalog(hive: HiveContext) extends Catalog with
// Wait until children are resolved.
case p: LogicalPlan if !p.childrenResolved => p
+ // TODO extra is in type of ASTNode which means the logical plan is not resolved
+ // Need to think about how to implement the CreateTableAsSelect.resolved
case CreateTableAsSelect(db, tableName, child, allowExisting, Some(extra: ASTNode)) =>
val (dbName, tblName) = processDatabaseAndTableName(db, tableName)
val databaseName = dbName.getOrElse(hive.sessionState.getCurrentDatabase)
@@ -285,6 +287,13 @@ private[hive] class HiveMetastoreCatalog(hive: HiveContext) extends Catalog with
}
CreateTableAsSelect(Some(databaseName), tblName, child, allowExisting, desc)
+
+ case p: LogicalPlan if p.resolved => p
+
+ case p @ CreateTableAsSelect(db, tableName, child, allowExisting, None) =>
+ val (dbName, tblName) = processDatabaseAndTableName(db, tableName)
+ val databaseName = dbName.getOrElse(hive.sessionState.getCurrentDatabase)
+ CreateTableAsSelect(Some(databaseName), tblName, child, allowExisting, None)
}
}
http://git-wip-us.apache.org/repos/asf/spark/blob/0abbff28/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index b341eae..96f3430 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -137,6 +137,19 @@ class SQLQuerySuite extends QueryTest {
sql("SELECT key, value FROM src ORDER BY key").collect().toSeq)
}
+ test("SPARK-4825 save join to table") {
+ val testData = sparkContext.parallelize(1 to 10).map(i => TestData(i, i.toString))
+ sql("CREATE TABLE test1 (key INT, value STRING)")
+ testData.insertInto("test1")
+ sql("CREATE TABLE test2 (key INT, value STRING)")
+ testData.insertInto("test2")
+ testData.insertInto("test2")
+ sql("SELECT COUNT(a.value) FROM test1 a JOIN test2 b ON a.key = b.key").saveAsTable("test")
+ checkAnswer(
+ table("test"),
+ sql("SELECT COUNT(a.value) FROM test1 a JOIN test2 b ON a.key = b.key").collect().toSeq)
+ }
+
test("SPARK-3708 Backticks aren't handled correctly is aliases") {
checkAnswer(
sql("SELECT k FROM (SELECT `key` AS `k` FROM src) a"),
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org