You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by jiangxb1987 <gi...@git.apache.org> on 2017/02/08 07:07:29 UTC

[GitHub] spark pull request #16674: [SPARK-19331][SQL][TESTS] Improve the test covera...

Github user jiangxb1987 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/16674#discussion_r100002440
  
    --- Diff: sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala ---
    @@ -452,311 +506,96 @@ class SQLViewSuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
         }
       }
     
    -  test("create hive view for joined tables") {
    -    // make sure the new flag can handle some complex cases like join and schema change.
    -    withTable("jt1", "jt2") {
    -      spark.range(1, 10).toDF("id1").write.format("json").saveAsTable("jt1")
    -      spark.range(1, 10).toDF("id2").write.format("json").saveAsTable("jt2")
    -      sql("CREATE VIEW testView AS SELECT * FROM jt1 JOIN jt2 ON id1 == id2")
    -      checkAnswer(sql("SELECT * FROM testView ORDER BY id1"), (1 to 9).map(i => Row(i, i)))
    -
    -      val df = (1 until 10).map(i => i -> i).toDF("id1", "newCol")
    -      df.write.format("json").mode(SaveMode.Overwrite).saveAsTable("jt1")
    -      checkAnswer(sql("SELECT * FROM testView ORDER BY id1"), (1 to 9).map(i => Row(i, i)))
    -
    -      sql("DROP VIEW testView")
    -    }
    -  }
    -
    -  test("SPARK-14933 - create view from hive parquet table") {
    -    withTable("t_part") {
    -      withView("v_part") {
    -        spark.sql("create table t_part stored as parquet as select 1 as a, 2 as b")
    -        spark.sql("create view v_part as select * from t_part")
    -        checkAnswer(
    -          sql("select * from t_part"),
    -          sql("select * from v_part"))
    -      }
    -    }
    -  }
    -
    -  test("SPARK-14933 - create view from hive orc table") {
    -    withTable("t_orc") {
    -      withView("v_orc") {
    -        spark.sql("create table t_orc stored as orc as select 1 as a, 2 as b")
    -        spark.sql("create view v_orc as select * from t_orc")
    -        checkAnswer(
    -          sql("select * from t_orc"),
    -          sql("select * from v_orc"))
    -      }
    -    }
    -  }
    -
    -  test("create a permanent/temp view using a hive, built-in, and permanent user function") {
    -    val permanentFuncName = "myUpper"
    -    val permanentFuncClass =
    -      classOf[org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper].getCanonicalName
    -    val builtInFuncNameInLowerCase = "abs"
    -    val builtInFuncNameInMixedCase = "aBs"
    -    val hiveFuncName = "histogram_numeric"
    -
    -    withUserDefinedFunction(permanentFuncName -> false) {
    -      sql(s"CREATE FUNCTION $permanentFuncName AS '$permanentFuncClass'")
    -      withTable("tab1") {
    -        (1 to 10).map(i => (s"$i", i)).toDF("str", "id").write.saveAsTable("tab1")
    -        Seq("VIEW", "TEMPORARY VIEW").foreach { viewMode =>
    -          withView("view1") {
    -            sql(
    -              s"""
    -                 |CREATE $viewMode view1
    -                 |AS SELECT
    -                 |$permanentFuncName(str),
    -                 |$builtInFuncNameInLowerCase(id),
    -                 |$builtInFuncNameInMixedCase(id) as aBs,
    -                 |$hiveFuncName(id, 5) over()
    -                 |FROM tab1
    -               """.stripMargin)
    -            checkAnswer(sql("select count(*) FROM view1"), Row(10))
    -          }
    -        }
    -      }
    -    }
    -  }
    -
    -  test("create a permanent/temp view using a temporary function") {
    -    val tempFunctionName = "temp"
    -    val functionClass =
    -      classOf[org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper].getCanonicalName
    -    withUserDefinedFunction(tempFunctionName -> true) {
    -      sql(s"CREATE TEMPORARY FUNCTION $tempFunctionName AS '$functionClass'")
    -      withView("view1", "tempView1") {
    -        withTable("tab1") {
    -          (1 to 10).map(i => s"$i").toDF("id").write.saveAsTable("tab1")
    -
    -          // temporary view
    -          sql(s"CREATE TEMPORARY VIEW tempView1 AS SELECT $tempFunctionName(id) from tab1")
    -          checkAnswer(sql("select count(*) FROM tempView1"), Row(10))
    -
    -          // permanent view
    -          val e = intercept[AnalysisException] {
    -            sql(s"CREATE VIEW view1 AS SELECT $tempFunctionName(id) from tab1")
    -          }.getMessage
    -          assert(e.contains("Not allowed to create a permanent view `view1` by referencing " +
    -            s"a temporary function `$tempFunctionName`"))
    -        }
    -      }
    -    }
    -  }
    -
    -  test("correctly resolve a nested view") {
    --- End diff --
    
    This test case is rewritten to `test("correctly parse a nested view")`


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org