You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by yh...@apache.org on 2015/10/14 03:21:29 UTC

spark git commit: [SPARK-11091] [SQL] Change spark.sql.canonicalizeView to spark.sql.nativeView.

Repository: spark
Updated Branches:
  refs/heads/master 15ff85b31 -> ce3f9a806


[SPARK-11091] [SQL] Change spark.sql.canonicalizeView to spark.sql.nativeView.

https://issues.apache.org/jira/browse/SPARK-11091

Author: Yin Huai <yh...@databricks.com>

Closes #9103 from yhuai/SPARK-11091.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ce3f9a80
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ce3f9a80
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ce3f9a80

Branch: refs/heads/master
Commit: ce3f9a80657751ee0bc0ed6a9b6558acbb40af4f
Parents: 15ff85b
Author: Yin Huai <yh...@databricks.com>
Authored: Tue Oct 13 18:21:24 2015 -0700
Committer: Yin Huai <yh...@databricks.com>
Committed: Tue Oct 13 18:21:24 2015 -0700

----------------------------------------------------------------------
 .../src/main/scala/org/apache/spark/sql/SQLConf.scala |  4 ++--
 .../apache/spark/sql/hive/HiveMetastoreCatalog.scala  |  2 +-
 .../main/scala/org/apache/spark/sql/hive/HiveQl.scala |  2 +-
 .../spark/sql/hive/execution/SQLQuerySuite.scala      | 14 +++++++-------
 4 files changed, 11 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/ce3f9a80/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
index f62df9b..b08cc8e 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
@@ -328,7 +328,7 @@ private[spark] object SQLConf {
     doc = "When true, some predicates will be pushed down into the Hive metastore so that " +
           "unmatching partitions can be eliminated earlier.")
 
-  val CANONICALIZE_VIEW = booleanConf("spark.sql.canonicalizeView",
+  val NATIVE_VIEW = booleanConf("spark.sql.nativeView",
     defaultValue = Some(false),
     doc = "When true, CREATE VIEW will be handled by Spark SQL instead of Hive native commands.  " +
           "Note that this function is experimental and should ony be used when you are using " +
@@ -489,7 +489,7 @@ private[sql] class SQLConf extends Serializable with CatalystConf {
 
   private[spark] def metastorePartitionPruning: Boolean = getConf(HIVE_METASTORE_PARTITION_PRUNING)
 
-  private[spark] def canonicalizeView: Boolean = getConf(CANONICALIZE_VIEW)
+  private[spark] def nativeView: Boolean = getConf(NATIVE_VIEW)
 
   private[spark] def sortMergeJoinEnabled: Boolean = getConf(SORTMERGE_JOIN)
 

http://git-wip-us.apache.org/repos/asf/spark/blob/ce3f9a80/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
index cf59bc0..1f8223e 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
@@ -591,7 +591,7 @@ private[hive] class HiveMetastoreCatalog(val client: ClientInterface, hive: Hive
       case p: LogicalPlan if p.resolved => p
 
       case CreateViewAsSelect(table, child, allowExisting, replace, sql) =>
-        if (conf.canonicalizeView) {
+        if (conf.nativeView) {
           if (allowExisting && replace) {
             throw new AnalysisException(
               "It is not allowed to define a view with both IF NOT EXISTS and OR REPLACE.")

http://git-wip-us.apache.org/repos/asf/spark/blob/ce3f9a80/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
index 250c232..1d50501 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
@@ -537,7 +537,7 @@ https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
       serde = None,
       viewText = Some(originalText))
 
-    // We need to keep the original SQL string so that if `spark.sql.canonicalizeView` is
+    // We need to keep the original SQL string so that if `spark.sql.nativeView` is
     // false, we can fall back to use hive native command later.
     // We can remove this when parser is configurable(can access SQLConf) in the future.
     val sql = context.getTokenRewriteStream

http://git-wip-us.apache.org/repos/asf/spark/blob/ce3f9a80/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index 51b63f3..6aa3460 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -1282,7 +1282,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
   }
 
   test("correctly parse CREATE VIEW statement") {
-    withSQLConf(SQLConf.CANONICALIZE_VIEW.key -> "true") {
+    withSQLConf(SQLConf.NATIVE_VIEW.key -> "true") {
       withTable("jt") {
         val df = (1 until 10).map(i => i -> i).toDF("i", "j")
         df.write.format("json").saveAsTable("jt")
@@ -1299,7 +1299,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
   }
 
   test("correctly handle CREATE VIEW IF NOT EXISTS") {
-    withSQLConf(SQLConf.CANONICALIZE_VIEW.key -> "true") {
+    withSQLConf(SQLConf.NATIVE_VIEW.key -> "true") {
       withTable("jt", "jt2") {
         sqlContext.range(1, 10).write.format("json").saveAsTable("jt")
         sql("CREATE VIEW testView AS SELECT id FROM jt")
@@ -1316,7 +1316,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
   }
 
   test("correctly handle CREATE OR REPLACE VIEW") {
-    withSQLConf(SQLConf.CANONICALIZE_VIEW.key -> "true") {
+    withSQLConf(SQLConf.NATIVE_VIEW.key -> "true") {
       withTable("jt", "jt2") {
         sqlContext.range(1, 10).write.format("json").saveAsTable("jt")
         sql("CREATE OR REPLACE VIEW testView AS SELECT id FROM jt")
@@ -1339,7 +1339,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
   }
 
   test("correctly handle ALTER VIEW") {
-    withSQLConf(SQLConf.CANONICALIZE_VIEW.key -> "true") {
+    withSQLConf(SQLConf.NATIVE_VIEW.key -> "true") {
       withTable("jt", "jt2") {
         sqlContext.range(1, 10).write.format("json").saveAsTable("jt")
         sql("CREATE VIEW testView AS SELECT id FROM jt")
@@ -1357,7 +1357,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
 
   test("create hive view for json table") {
     // json table is not hive-compatible, make sure the new flag fix it.
-    withSQLConf(SQLConf.CANONICALIZE_VIEW.key -> "true") {
+    withSQLConf(SQLConf.NATIVE_VIEW.key -> "true") {
       withTable("jt") {
         sqlContext.range(1, 10).write.format("json").saveAsTable("jt")
         sql("CREATE VIEW testView AS SELECT id FROM jt")
@@ -1369,7 +1369,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
 
   test("create hive view for partitioned parquet table") {
     // partitioned parquet table is not hive-compatible, make sure the new flag fix it.
-    withSQLConf(SQLConf.CANONICALIZE_VIEW.key -> "true") {
+    withSQLConf(SQLConf.NATIVE_VIEW.key -> "true") {
       withTable("parTable") {
         val df = Seq(1 -> "a").toDF("i", "j")
         df.write.format("parquet").partitionBy("i").saveAsTable("parTable")
@@ -1382,7 +1382,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
 
   test("create hive view for joined tables") {
     // make sure the new flag can handle some complex cases like join and schema change.
-    withSQLConf(SQLConf.CANONICALIZE_VIEW.key -> "true") {
+    withSQLConf(SQLConf.NATIVE_VIEW.key -> "true") {
       withTable("jt1", "jt2") {
         sqlContext.range(1, 10).toDF("id1").write.format("json").saveAsTable("jt1")
         sqlContext.range(1, 10).toDF("id2").write.format("json").saveAsTable("jt2")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org