You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by li...@apache.org on 2017/01/26 05:50:54 UTC

spark git commit: [TESTS][SQL] Setup testdata at the beginning for tests to run independently

Repository: spark
Updated Branches:
  refs/heads/master 256a3a801 -> 9effc2cdc


[TESTS][SQL] Setup testdata at the beginning for tests to run independently

## What changes were proposed in this pull request?

In CachedTableSuite, we are not setting up the test data at the beginning. Some tests fail while trying to run individually. When running the entire suite they run fine.

Here are some of the tests that fail -

- test("SELECT star from cached table")
- test("Self-join cached")

As part of this simplified a couple of tests by calling a support method to count the number of
InMemoryRelations.

## How was this patch tested?

Ran the failing tests individually.

Author: Dilip Biswal <db...@us.ibm.com>

Closes #16688 from dilipbiswal/cachetablesuite_simple.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/9effc2cd
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/9effc2cd
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/9effc2cd

Branch: refs/heads/master
Commit: 9effc2cdcb3d68db8b6b5b3abd75968633b583c8
Parents: 256a3a8
Author: Dilip Biswal <db...@us.ibm.com>
Authored: Wed Jan 25 21:50:45 2017 -0800
Committer: gatorsmile <ga...@gmail.com>
Committed: Wed Jan 25 21:50:45 2017 -0800

----------------------------------------------------------------------
 .../src/main/scala/org/apache/spark/sql/Dataset.scala    |  4 ++--
 .../scala/org/apache/spark/sql/catalog/Catalog.scala     |  4 ++--
 .../org/apache/spark/sql/execution/command/views.scala   |  4 ++--
 .../scala/org/apache/spark/sql/CachedTableSuite.scala    | 11 ++++-------
 4 files changed, 10 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/9effc2cd/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
index 24b9b81..5ee173f 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
@@ -2588,8 +2588,8 @@ class Dataset[T] private[sql](
    *
    * Global temporary view is cross-session. Its lifetime is the lifetime of the Spark application,
    * i.e. it will be automatically dropped when the application terminates. It's tied to a system
-   * preserved database `_global_temp`, and we must use the qualified name to refer a global temp
-   * view, e.g. `SELECT * FROM _global_temp.view1`.
+   * preserved database `global_temp`, and we must use the qualified name to refer a global temp
+   * view, e.g. `SELECT * FROM global_temp.view1`.
    *
    * @throws AnalysisException if the view name is invalid or already exists
    *

http://git-wip-us.apache.org/repos/asf/spark/blob/9effc2cd/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala
index 41e781e..50252db 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala
@@ -380,8 +380,8 @@ abstract class Catalog {
    *
    * Global temporary view is cross-session. Its lifetime is the lifetime of the Spark application,
    * i.e. it will be automatically dropped when the application terminates. It's tied to a system
-   * preserved database `_global_temp`, and we must use the qualified name to refer a global temp
-   * view, e.g. `SELECT * FROM _global_temp.view1`.
+   * preserved database `global_temp`, and we must use the qualified name to refer a global temp
+   * view, e.g. `SELECT * FROM global_temp.view1`.
    *
    * @param viewName the name of the view to be dropped.
    * @return true if the view is dropped successfully, false otherwise.

http://git-wip-us.apache.org/repos/asf/spark/blob/9effc2cd/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala
index bb166f1..921c848 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala
@@ -47,8 +47,8 @@ object LocalTempView extends ViewType
 /**
  * GlobalTempView means cross-session global temporary views. Its lifetime is the lifetime of the
  * Spark application, i.e. it will be automatically dropped when the application terminates. It's
- * tied to a system preserved database `_global_temp`, and we must use the qualified name to refer a
- * global temp view, e.g. SELECT * FROM _global_temp.view1.
+ * tied to a system preserved database `global_temp`, and we must use the qualified name to refer a
+ * global temp view, e.g. SELECT * FROM global_temp.view1.
  */
 object GlobalTempView extends ViewType
 

http://git-wip-us.apache.org/repos/asf/spark/blob/9effc2cd/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
index 339262a..1af1a36 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
@@ -39,6 +39,8 @@ private case class BigData(s: String)
 class CachedTableSuite extends QueryTest with SQLTestUtils with SharedSQLContext {
   import testImplicits._
 
+  setupTestData()
+
   override def afterEach(): Unit = {
     try {
       spark.catalog.clearCache()
@@ -185,9 +187,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with SharedSQLContext
     assertCached(spark.table("testData"))
 
     assertResult(1, "InMemoryRelation not found, testData should have been cached") {
-      spark.table("testData").queryExecution.withCachedData.collect {
-        case r: InMemoryRelation => r
-      }.size
+      getNumInMemoryRelations(spark.table("testData").queryExecution.withCachedData)
     }
 
     spark.catalog.cacheTable("testData")
@@ -580,10 +580,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with SharedSQLContext
     localRelation.createOrReplaceTempView("localRelation")
 
     spark.catalog.cacheTable("localRelation")
-    assert(
-      localRelation.queryExecution.withCachedData.collect {
-        case i: InMemoryRelation => i
-      }.size == 1)
+    assert(getNumInMemoryRelations(localRelation.queryExecution.withCachedData) == 1)
   }
 
   test("SPARK-19093 Caching in side subquery") {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org