You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2016/04/26 03:22:09 UTC

spark git commit: [SPARK-14870][SQL][FOLLOW-UP] Move decimalDataWithNulls in DataFrameAggregateSuite

Repository: spark
Updated Branches:
  refs/heads/master cfa64882f -> c71c6853f


[SPARK-14870][SQL][FOLLOW-UP] Move decimalDataWithNulls in DataFrameAggregateSuite

## What changes were proposed in this pull request?

Minor followup to https://github.com/apache/spark/pull/12651

## How was this patch tested?

Test-only change

Author: Sameer Agarwal <sa...@databricks.com>

Closes #12674 from sameeragarwal/tpcds-fix-2.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/c71c6853
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/c71c6853
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/c71c6853

Branch: refs/heads/master
Commit: c71c6853fcecd3b41de0e8273329ea83a9779320
Parents: cfa6488
Author: Sameer Agarwal <sa...@databricks.com>
Authored: Mon Apr 25 18:22:06 2016 -0700
Committer: Reynold Xin <rx...@databricks.com>
Committed: Mon Apr 25 18:22:06 2016 -0700

----------------------------------------------------------------------
 .../apache/spark/sql/DataFrameAggregateSuite.scala   |  9 +++++++++
 .../org/apache/spark/sql/test/SQLTestData.scala      | 15 ---------------
 2 files changed, 9 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/c71c6853/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala
index 0fcfb97..2f685c5 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala
@@ -21,6 +21,7 @@ import org.apache.spark.sql.expressions.Window
 import org.apache.spark.sql.functions._
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.test.SharedSQLContext
+import org.apache.spark.sql.test.SQLTestData.DecimalData
 import org.apache.spark.sql.types.DecimalType
 
 case class Fact(date: Int, hour: Int, minute: Int, room_name: String, temp: Double)
@@ -69,6 +70,14 @@ class DataFrameAggregateSuite extends QueryTest with SharedSQLContext {
         Row(new java.math.BigDecimal(3.0), new java.math.BigDecimal(3.0)))
     )
 
+    val decimalDataWithNulls = sqlContext.sparkContext.parallelize(
+      DecimalData(1, 1) ::
+      DecimalData(1, null) ::
+      DecimalData(2, 1) ::
+      DecimalData(2, null) ::
+      DecimalData(3, 1) ::
+      DecimalData(3, 2) ::
+      DecimalData(null, 2) :: Nil).toDF()
     checkAnswer(
       decimalDataWithNulls.groupBy("a").agg(sum("b")),
       Seq(Row(new java.math.BigDecimal(1.0), new java.math.BigDecimal(1.0)),

http://git-wip-us.apache.org/repos/asf/spark/blob/c71c6853/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestData.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestData.scala b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestData.scala
index c5f25fa..7fa6760 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestData.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestData.scala
@@ -103,19 +103,6 @@ private[sql] trait SQLTestData { self =>
     df
   }
 
-  protected lazy val decimalDataWithNulls: DataFrame = {
-    val df = sqlContext.sparkContext.parallelize(
-      DecimalDataWithNulls(1, 1) ::
-      DecimalDataWithNulls(1, null) ::
-      DecimalDataWithNulls(2, 1) ::
-      DecimalDataWithNulls(2, null) ::
-      DecimalDataWithNulls(3, 1) ::
-      DecimalDataWithNulls(3, 2) ::
-      DecimalDataWithNulls(null, 2) :: Nil).toDF()
-    df.registerTempTable("decimalDataWithNulls")
-    df
-  }
-
   protected lazy val binaryData: DataFrame = {
     val df = sqlContext.sparkContext.parallelize(
       BinaryData("12".getBytes(StandardCharsets.UTF_8), 1) ::
@@ -280,7 +267,6 @@ private[sql] trait SQLTestData { self =>
     negativeData
     largeAndSmallInts
     decimalData
-    decimalDataWithNulls
     binaryData
     upperCaseData
     lowerCaseData
@@ -310,7 +296,6 @@ private[sql] object SQLTestData {
   case class TestData3(a: Int, b: Option[Int])
   case class LargeAndSmallInts(a: Int, b: Int)
   case class DecimalData(a: BigDecimal, b: BigDecimal)
-  case class DecimalDataWithNulls(a: BigDecimal, b: BigDecimal)
   case class BinaryData(a: Array[Byte], b: Int)
   case class UpperCaseData(N: Int, L: String)
   case class LowerCaseData(n: Int, l: String)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org