You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by hv...@apache.org on 2016/12/29 20:22:19 UTC
spark git commit: [SPARK-19012][SQL] Fix `createTempViewCommand` to
throw AnalysisException instead of ParseException
Repository: spark
Updated Branches:
refs/heads/master 7d19b6ab7 -> 752d9eeb9
[SPARK-19012][SQL] Fix `createTempViewCommand` to throw AnalysisException instead of ParseException
## What changes were proposed in this pull request?
Currently, `createTempView`, `createOrReplaceTempView`, and `createGlobalTempView` show `ParseExceptions` on invalid table names. We had better show better error message. Also, this PR also adds and updates the missing description on the API docs correctly.
**BEFORE**
```
scala> spark.range(10).createOrReplaceTempView("11111")
org.apache.spark.sql.catalyst.parser.ParseException:
mismatched input '11111' expecting {'SELECT', 'FROM', 'ADD', ...}(line 1, pos 0)
== SQL ==
11111
...
```
**AFTER**
```
scala> spark.range(10).createOrReplaceTempView("11111")
org.apache.spark.sql.AnalysisException: Invalid view name: 11111;
...
```
## How was this patch tested?
Pass the Jenkins with updated a test case.
Author: Dongjoon Hyun <do...@apache.org>
Closes #16427 from dongjoon-hyun/SPARK-19012.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/752d9eeb
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/752d9eeb
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/752d9eeb
Branch: refs/heads/master
Commit: 752d9eeb9bff7934a0645ffd8059cde42da8eeef
Parents: 7d19b6a
Author: Dongjoon Hyun <do...@apache.org>
Authored: Thu Dec 29 21:22:13 2016 +0100
Committer: Herman van Hovell <hv...@databricks.com>
Committed: Thu Dec 29 21:22:13 2016 +0100
----------------------------------------------------------------------
.../main/scala/org/apache/spark/sql/Dataset.scala | 13 ++++++++++---
.../org/apache/spark/sql/DataFrameSuite.scala | 18 ++++++++++--------
2 files changed, 20 insertions(+), 11 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/752d9eeb/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
index 29397b1..c1cedd8 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
@@ -39,6 +39,7 @@ import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate._
import org.apache.spark.sql.catalyst.json.JacksonGenerator
import org.apache.spark.sql.catalyst.optimizer.CombineUnions
+import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.plans.physical.{Partitioning, PartitioningCollection}
@@ -2569,7 +2570,7 @@ class Dataset[T] private[sql](
* created it, i.e. it will be automatically dropped when the session terminates. It's not
* tied to any databases, i.e. we can't use `db1.view1` to reference a local temporary view.
*
- * @throws AnalysisException if the view name already exists
+ * @throws AnalysisException if the view name is invalid or already exists
*
* @group basic
* @since 2.0.0
@@ -2601,7 +2602,7 @@ class Dataset[T] private[sql](
* preserved database `_global_temp`, and we must use the qualified name to refer a global temp
* view, e.g. `SELECT * FROM _global_temp.view1`.
*
- * @throws AnalysisException if the view name already exists
+ * @throws AnalysisException if the view name is invalid or already exists
*
* @group basic
* @since 2.1.0
@@ -2616,8 +2617,14 @@ class Dataset[T] private[sql](
replace: Boolean,
global: Boolean): CreateViewCommand = {
val viewType = if (global) GlobalTempView else LocalTempView
+
+ val tableIdentifier = try {
+ sparkSession.sessionState.sqlParser.parseTableIdentifier(viewName)
+ } catch {
+ case _: ParseException => throw new AnalysisException(s"Invalid view name: $viewName")
+ }
CreateViewCommand(
- name = sparkSession.sessionState.sqlParser.parseTableIdentifier(viewName),
+ name = tableIdentifier,
userSpecifiedColumns = Nil,
comment = None,
properties = Map.empty,
http://git-wip-us.apache.org/repos/asf/spark/blob/752d9eeb/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
index 312cd17..f4df80f 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
@@ -1518,14 +1518,16 @@ class DataFrameSuite extends QueryTest with SharedSQLContext {
test("SPARK-12982: Add table name validation in temp table registration") {
val df = Seq("foo", "bar").map(Tuple1.apply).toDF("col")
- // invalid table name test as below
- intercept[AnalysisException](df.createOrReplaceTempView("t~"))
- // valid table name test as below
- df.createOrReplaceTempView("table1")
- // another invalid table name test as below
- intercept[AnalysisException](df.createOrReplaceTempView("#$@sum"))
- // another invalid table name test as below
- intercept[AnalysisException](df.createOrReplaceTempView("table!#"))
+ // invalid table names
+ Seq("11111", "t~", "#$@sum", "table!#").foreach { name =>
+ val m = intercept[AnalysisException](df.createOrReplaceTempView(name)).getMessage
+ assert(m.contains(s"Invalid view name: $name"))
+ }
+
+ // valid table names
+ Seq("table1", "`11111`", "`t~`", "`#$@sum`", "`table!#`").foreach { name =>
+ df.createOrReplaceTempView(name)
+ }
}
test("assertAnalyzed shouldn't replace original stack trace") {
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org