You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2016/11/07 06:43:15 UTC

spark git commit: [SPARK-18167][SQL] Disable flaky hive partition pruning test.

Repository: spark
Updated Branches:
  refs/heads/master 46b2e4999 -> 07ac3f09d


[SPARK-18167][SQL] Disable flaky hive partition pruning test.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/07ac3f09
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/07ac3f09
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/07ac3f09

Branch: refs/heads/master
Commit: 07ac3f09daf2b28436bc69f76badd1e36d756e4d
Parents: 46b2e49
Author: Reynold Xin <rx...@databricks.com>
Authored: Sun Nov 6 22:42:05 2016 -0800
Committer: Reynold Xin <rx...@databricks.com>
Committed: Sun Nov 6 22:42:05 2016 -0800

----------------------------------------------------------------------
 .../scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/07ac3f09/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index 28e5dff..5e08ef3 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -1569,7 +1569,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
     ).map(i => Row(i._1, i._2, i._3, i._4)))
   }
 
-  test("SPARK-10562: partition by column with mixed case name") {
+  ignore("SPARK-10562: partition by column with mixed case name") {
     withTable("tbl10562") {
       val df = Seq(2012 -> "a").toDF("Year", "val")
       df.write.partitionBy("Year").saveAsTable("tbl10562")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org