You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by va...@apache.org on 2018/05/14 21:35:13 UTC

spark git commit: [SPARK-23852][SQL] Add withSQLConf(...) to test case

Repository: spark
Updated Branches:
  refs/heads/master 8cd83acf4 -> 061e0084c


[SPARK-23852][SQL] Add withSQLConf(...) to test case

## What changes were proposed in this pull request?

Add a `withSQLConf(...)` wrapper to force Parquet filter pushdown for a test that relies on it.

## How was this patch tested?

Test passes

Author: Henry Robinson <he...@apache.org>

Closes #21323 from henryr/spark-23582.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/061e0084
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/061e0084
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/061e0084

Branch: refs/heads/master
Commit: 061e0084ce19c1384ba271a97a0aa1f87abe879d
Parents: 8cd83ac
Author: Henry Robinson <he...@apache.org>
Authored: Mon May 14 14:35:08 2018 -0700
Committer: Marcelo Vanzin <va...@cloudera.com>
Committed: Mon May 14 14:35:08 2018 -0700

----------------------------------------------------------------------
 .../datasources/parquet/ParquetFilterSuite.scala      | 14 ++++++++------
 1 file changed, 8 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/061e0084/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala
index 4d0ecde..90da7eb 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala
@@ -650,13 +650,15 @@ class ParquetFilterSuite extends QueryTest with ParquetTest with SharedSQLContex
   }
 
   test("SPARK-23852: Broken Parquet push-down for partially-written stats") {
-    // parquet-1217.parquet contains a single column with values -1, 0, 1, 2 and null.
-    // The row-group statistics include null counts, but not min and max values, which
-    // triggers PARQUET-1217.
-    val df = readResourceParquetFile("test-data/parquet-1217.parquet")
+    withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") {
+      // parquet-1217.parquet contains a single column with values -1, 0, 1, 2 and null.
+      // The row-group statistics include null counts, but not min and max values, which
+      // triggers PARQUET-1217.
+      val df = readResourceParquetFile("test-data/parquet-1217.parquet")
 
-    // Will return 0 rows if PARQUET-1217 is not fixed.
-    assert(df.where("col > 0").count() === 2)
+      // Will return 0 rows if PARQUET-1217 is not fixed.
+      assert(df.where("col > 0").count() === 2)
+    }
   }
 }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org