You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2014/11/11 00:56:47 UTC

spark git commit: [SPARK-4319][SQL] Enable an ignored test "null count".

Repository: spark
Updated Branches:
  refs/heads/master 6e7a309b8 -> dbf10588d


[SPARK-4319][SQL] Enable an ignored test "null count".

Author: Takuya UESHIN <ue...@happy-camper.st>

Closes #3185 from ueshin/issues/SPARK-4319 and squashes the following commits:

a44a38e [Takuya UESHIN] Enable an ignored test "null count".


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/dbf10588
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/dbf10588
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/dbf10588

Branch: refs/heads/master
Commit: dbf10588de03e8ea993fff687a78727eff55db1f
Parents: 6e7a309
Author: Takuya UESHIN <ue...@happy-camper.st>
Authored: Mon Nov 10 15:55:15 2014 -0800
Committer: Michael Armbrust <mi...@databricks.com>
Committed: Mon Nov 10 15:55:15 2014 -0800

----------------------------------------------------------------------
 .../src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala | 9 ++++-----
 sql/core/src/test/scala/org/apache/spark/sql/TestData.scala | 9 +++++----
 2 files changed, 9 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/dbf10588/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index 702714a..8a80724 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -281,14 +281,13 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
       3)
   }
 
-  // No support for primitive nulls yet.
-  ignore("null count") {
+  test("null count") {
     checkAnswer(
-      sql("SELECT a, COUNT(b) FROM testData3"),
-      Seq((1,0), (2, 1)))
+      sql("SELECT a, COUNT(b) FROM testData3 GROUP BY a"),
+      Seq((1, 0), (2, 1)))
 
     checkAnswer(
-      testData3.groupBy()(Count('a), Count('b), Count(1), CountDistinct('a :: Nil), CountDistinct('b :: Nil)),
+      sql("SELECT COUNT(a), COUNT(b), COUNT(1), COUNT(DISTINCT a), COUNT(DISTINCT b) FROM testData3"),
       (2, 1, 2, 2, 1) :: Nil)
   }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/dbf10588/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
index ef87a23..92b49e8 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
@@ -64,11 +64,12 @@ object TestData {
       BinaryData("123".getBytes(), 4) :: Nil).toSchemaRDD
   binaryData.registerTempTable("binaryData")
 
-  // TODO: There is no way to express null primitives as case classes currently...
+  case class TestData3(a: Int, b: Option[Int])
   val testData3 =
-    logical.LocalRelation('a.int, 'b.int).loadData(
-      (1, null) ::
-      (2, 2) :: Nil)
+    TestSQLContext.sparkContext.parallelize(
+      TestData3(1, None) ::
+      TestData3(2, Some(2)) :: Nil).toSchemaRDD
+  testData3.registerTempTable("testData3")
 
   val emptyTableData = logical.LocalRelation('a.int, 'b.int)
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org