You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2014/12/30 20:30:49 UTC

spark git commit: [SPARK-4975][SQL] Fix HiveInspectorSuite test failure

Repository: spark
Updated Branches:
  refs/heads/master 94d60b702 -> 65357f11c


[SPARK-4975][SQL] Fix HiveInspectorSuite test failure

HiveInspectorSuite test failureļ¼š
[info] - wrap / unwrap null, constant null and writables *** FAILED *** (21 milliseconds)
[info] 1 did not equal 0 (HiveInspectorSuite.scala:136)
this is because the origin date(is 3914-10-23) not equals the date returned by ```unwrap```(is 3914-10-22).

Setting TimeZone and Locale fix this.
Another minor change here is rename ```def checkValues(v1: Any, v2: Any): Unit```  to  ```def checkValue(v1: Any, v2: Any): Unit ``` to make the code more clear

Author: scwf <wa...@huawei.com>
Author: Fei Wang <wa...@huawei.com>

Closes #3814 from scwf/fix-inspectorsuite and squashes the following commits:

d8531ef [Fei Wang] Delete test.log
72b19a9 [scwf] fix HiveInspectorSuite test error


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/65357f11
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/65357f11
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/65357f11

Branch: refs/heads/master
Commit: 65357f11c25a7c91577df5da31ebf349d7845eef
Parents: 94d60b7
Author: scwf <wa...@huawei.com>
Authored: Tue Dec 30 11:30:47 2014 -0800
Committer: Michael Armbrust <mi...@databricks.com>
Committed: Tue Dec 30 11:30:47 2014 -0800

----------------------------------------------------------------------
 .../spark/sql/hive/HiveInspectorSuite.scala     | 28 ++++++++++++--------
 1 file changed, 17 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/65357f11/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala
index bfe608a..f90d360 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala
@@ -19,6 +19,7 @@ package org.apache.spark.sql.hive
 
 import java.sql.Date
 import java.util
+import java.util.{Locale, TimeZone}
 
 import org.apache.hadoop.hive.serde2.io.DoubleWritable
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory
@@ -63,6 +64,11 @@ class HiveInspectorSuite extends FunSuite with HiveInspectors {
       .get())
   }
 
+  // Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
+  TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
+  // Add Locale setting
+  Locale.setDefault(Locale.US)
+
   val data =
     Literal(true) ::
     Literal(0.asInstanceOf[Byte]) ::
@@ -121,11 +127,11 @@ class HiveInspectorSuite extends FunSuite with HiveInspectors {
 
   def checkValues(row1: Seq[Any], row2: Seq[Any]): Unit = {
     row1.zip(row2).map {
-      case (r1, r2) => checkValues(r1, r2)
+      case (r1, r2) => checkValue(r1, r2)
     }
   }
 
-  def checkValues(v1: Any, v2: Any): Unit = {
+  def checkValue(v1: Any, v2: Any): Unit = {
     (v1, v2) match {
       case (r1: Decimal, r2: Decimal) =>
         // Ignore the Decimal precision
@@ -195,26 +201,26 @@ class HiveInspectorSuite extends FunSuite with HiveInspectors {
     })
 
     checkValues(row, unwrap(wrap(row, toInspector(dt)), toInspector(dt)).asInstanceOf[Row])
-    checkValues(null, unwrap(wrap(null, toInspector(dt)), toInspector(dt)))
+    checkValue(null, unwrap(wrap(null, toInspector(dt)), toInspector(dt)))
   }
 
   test("wrap / unwrap Array Type") {
     val dt = ArrayType(dataTypes(0))
 
     val d = row(0) :: row(0) :: Nil
-    checkValues(d, unwrap(wrap(d, toInspector(dt)), toInspector(dt)))
-    checkValues(null, unwrap(wrap(null, toInspector(dt)), toInspector(dt)))
-    checkValues(d, unwrap(wrap(d, toInspector(Literal(d, dt))), toInspector(Literal(d, dt))))
-    checkValues(d, unwrap(wrap(null, toInspector(Literal(d, dt))), toInspector(Literal(d, dt))))
+    checkValue(d, unwrap(wrap(d, toInspector(dt)), toInspector(dt)))
+    checkValue(null, unwrap(wrap(null, toInspector(dt)), toInspector(dt)))
+    checkValue(d, unwrap(wrap(d, toInspector(Literal(d, dt))), toInspector(Literal(d, dt))))
+    checkValue(d, unwrap(wrap(null, toInspector(Literal(d, dt))), toInspector(Literal(d, dt))))
   }
 
   test("wrap / unwrap Map Type") {
     val dt = MapType(dataTypes(0), dataTypes(1))
 
     val d = Map(row(0) -> row(1))
-    checkValues(d, unwrap(wrap(d, toInspector(dt)), toInspector(dt)))
-    checkValues(null, unwrap(wrap(null, toInspector(dt)), toInspector(dt)))
-    checkValues(d, unwrap(wrap(d, toInspector(Literal(d, dt))), toInspector(Literal(d, dt))))
-    checkValues(d, unwrap(wrap(null, toInspector(Literal(d, dt))), toInspector(Literal(d, dt))))
+    checkValue(d, unwrap(wrap(d, toInspector(dt)), toInspector(dt)))
+    checkValue(null, unwrap(wrap(null, toInspector(dt)), toInspector(dt)))
+    checkValue(d, unwrap(wrap(d, toInspector(Literal(d, dt))), toInspector(Literal(d, dt))))
+    checkValue(d, unwrap(wrap(null, toInspector(Literal(d, dt))), toInspector(Literal(d, dt))))
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org