You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2014/12/17 23:27:06 UTC

spark git commit: [HOTFIX][SQL] Fix parquet filter suite

Repository: spark
Updated Branches:
  refs/heads/master affc3f460 -> 19c0faad6


[HOTFIX][SQL] Fix parquet filter suite

Author: Michael Armbrust <mi...@databricks.com>

Closes #3727 from marmbrus/parquetNotEq and squashes the following commits:

2157bfc [Michael Armbrust] Fix parquet filter suite


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/19c0faad
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/19c0faad
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/19c0faad

Branch: refs/heads/master
Commit: 19c0faad6d04349952ec25ce5ae94b718d7e8518
Parents: affc3f4
Author: Michael Armbrust <mi...@databricks.com>
Authored: Wed Dec 17 14:27:02 2014 -0800
Committer: Michael Armbrust <mi...@databricks.com>
Committed: Wed Dec 17 14:27:02 2014 -0800

----------------------------------------------------------------------
 .../spark/sql/parquet/ParquetFilterSuite.scala  | 24 ++++++++++----------
 1 file changed, 12 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/19c0faad/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetFilterSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetFilterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetFilterSuite.scala
index 111a459..b173004 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetFilterSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetFilterSuite.scala
@@ -87,14 +87,14 @@ class ParquetFilterSuite extends QueryTest with ParquetTest {
   test("filter pushdown - boolean") {
     withParquetRDD((true :: false :: Nil).map(Tuple1.apply)) { rdd =>
       checkFilterPushdown(rdd, '_1)('_1 === true, classOf[Eq[java.lang.Boolean]])(true)
-      checkFilterPushdown(rdd, '_1)('_1 !== true, classOf[Operators.Not])(false)
+      checkFilterPushdown(rdd, '_1)('_1 !== true, classOf[Operators.NotEq[java.lang.Boolean]])(false)
     }
   }
 
   test("filter pushdown - integer") {
     withParquetRDD((1 to 4).map(Tuple1.apply)) { rdd =>
       checkFilterPushdown(rdd, '_1)('_1 === 1, classOf[Eq[Integer]])(1)
-      checkFilterPushdown(rdd, '_1)('_1 !== 1, classOf[Operators.Not]) {
+      checkFilterPushdown(rdd, '_1)('_1 !== 1, classOf[Operators.NotEq[Integer]]) {
         (2 to 4).map(Row.apply(_))
       }
 
@@ -118,9 +118,9 @@ class ParquetFilterSuite extends QueryTest with ParquetTest {
   }
 
   test("filter pushdown - long") {
-    withParquetRDD((1 to 4).map(i => Tuple1.apply(i.toLong))) { rdd =>
+    withParquetRDD((1 to 4).map(i => Tuple1(i.toLong))) { rdd =>
       checkFilterPushdown(rdd, '_1)('_1 === 1, classOf[Eq[java.lang.Long]])(1)
-      checkFilterPushdown(rdd, '_1)('_1 !== 1, classOf[Operators.Not]) {
+      checkFilterPushdown(rdd, '_1)('_1 !== 1, classOf[Operators.NotEq[java.lang.Long]]) {
         (2 to 4).map(Row.apply(_))
       }
 
@@ -144,9 +144,9 @@ class ParquetFilterSuite extends QueryTest with ParquetTest {
   }
 
   test("filter pushdown - float") {
-    withParquetRDD((1 to 4).map(i => Tuple1.apply(i.toFloat))) { rdd =>
+    withParquetRDD((1 to 4).map(i => Tuple1(i.toFloat))) { rdd =>
       checkFilterPushdown(rdd, '_1)('_1 === 1, classOf[Eq[java.lang.Float]])(1)
-      checkFilterPushdown(rdd, '_1)('_1 !== 1, classOf[Operators.Not]) {
+      checkFilterPushdown(rdd, '_1)('_1 !== 1, classOf[Operators.NotEq[java.lang.Float]]) {
         (2 to 4).map(Row.apply(_))
       }
 
@@ -170,9 +170,9 @@ class ParquetFilterSuite extends QueryTest with ParquetTest {
   }
 
   test("filter pushdown - double") {
-    withParquetRDD((1 to 4).map(i => Tuple1.apply(i.toDouble))) { rdd =>
+    withParquetRDD((1 to 4).map(i => Tuple1(i.toDouble))) { rdd =>
       checkFilterPushdown(rdd, '_1)('_1 === 1, classOf[Eq[java.lang.Double]])(1)
-      checkFilterPushdown(rdd, '_1)('_1 !== 1, classOf[Operators.Not]) {
+      checkFilterPushdown(rdd, '_1)('_1 !== 1, classOf[Operators.NotEq[java.lang.Double]]) {
         (2 to 4).map(Row.apply(_))
       }
 
@@ -196,9 +196,9 @@ class ParquetFilterSuite extends QueryTest with ParquetTest {
   }
 
   test("filter pushdown - string") {
-    withParquetRDD((1 to 4).map(i => Tuple1.apply(i.toString))) { rdd =>
+    withParquetRDD((1 to 4).map(i => Tuple1(i.toString))) { rdd =>
       checkFilterPushdown(rdd, '_1)('_1 === "1", classOf[Eq[String]])("1")
-      checkFilterPushdown(rdd, '_1)('_1 !== "1", classOf[Operators.Not]) {
+      checkFilterPushdown(rdd, '_1)('_1 !== "1", classOf[Operators.NotEq[String]]) {
         (2 to 4).map(i => Row.apply(i.toString))
       }
 
@@ -226,9 +226,9 @@ class ParquetFilterSuite extends QueryTest with ParquetTest {
       def b: Array[Byte] = int.toString.getBytes("UTF-8")
     }
 
-    withParquetRDD((1 to 4).map(i => Tuple1.apply(i.b))) { rdd =>
+    withParquetRDD((1 to 4).map(i => Tuple1(i.b))) { rdd =>
       checkBinaryFilterPushdown(rdd, '_1)('_1 === 1.b, classOf[Eq[Array[Byte]]])(1.b)
-      checkBinaryFilterPushdown(rdd, '_1)('_1 !== 1.b, classOf[Operators.Not]) {
+      checkBinaryFilterPushdown(rdd, '_1)('_1 !== 1.b, classOf[Operators.NotEq[Array[Byte]]]) {
         (2 to 4).map(i => Row.apply(i.b)).toSeq
       }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org