You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2019/02/18 03:25:36 UTC
[spark] branch branch-2.4 updated:
[SPARK-26897][SQL][TEST][FOLLOW-UP] Remove workaround for 2.2.0 and 2.1.x
in HiveExternalCatalogVersionsSuite
This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch branch-2.4
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-2.4 by this push:
new 094cabc [SPARK-26897][SQL][TEST][FOLLOW-UP] Remove workaround for 2.2.0 and 2.1.x in HiveExternalCatalogVersionsSuite
094cabc is described below
commit 094cabc3f72da765cf2b4adab9bae61d05aaef45
Author: Takeshi Yamamuro <ya...@apache.org>
AuthorDate: Mon Feb 18 11:24:36 2019 +0800
[SPARK-26897][SQL][TEST][FOLLOW-UP] Remove workaround for 2.2.0 and 2.1.x in HiveExternalCatalogVersionsSuite
## What changes were proposed in this pull request?
This pr just removed workaround for 2.2.0 and 2.1.x in HiveExternalCatalogVersionsSuite.
## How was this patch tested?
Pass the Jenkins.
Closes #23817 from maropu/SPARK-26607-FOLLOWUP.
Authored-by: Takeshi Yamamuro <ya...@apache.org>
Signed-off-by: Hyukjin Kwon <gu...@apache.org>
(cherry picked from commit e2b8cc65cd579374ddbd70b93c9fcefe9b8873d9)
Signed-off-by: Hyukjin Kwon <gu...@apache.org>
---
.../sql/hive/HiveExternalCatalogVersionsSuite.scala | 17 ++++-------------
1 file changed, 4 insertions(+), 13 deletions(-)
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
index 598b08b..0ede33d 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
@@ -257,19 +257,10 @@ object PROCESS_TABLES extends QueryTest with SQLTestUtils {
// SPARK-22356: overlapped columns between data and partition schema in data source tables
val tbl_with_col_overlap = s"tbl_with_col_overlap_$index"
- // For Spark 2.2.0 and 2.1.x, the behavior is different from Spark 2.0, 2.2.1, 2.3+
- if (testingVersions(index).startsWith("2.1") || testingVersions(index) == "2.2.0") {
- spark.sql("msck repair table " + tbl_with_col_overlap)
- assert(spark.table(tbl_with_col_overlap).columns === Array("i", "j", "p"))
- checkAnswer(spark.table(tbl_with_col_overlap), Row(1, 1, 1) :: Row(1, 1, 1) :: Nil)
- assert(sql("desc " + tbl_with_col_overlap).select("col_name")
- .as[String].collect().mkString(",").contains("i,j,p"))
- } else {
- assert(spark.table(tbl_with_col_overlap).columns === Array("i", "p", "j"))
- checkAnswer(spark.table(tbl_with_col_overlap), Row(1, 1, 1) :: Row(1, 1, 1) :: Nil)
- assert(sql("desc " + tbl_with_col_overlap).select("col_name")
- .as[String].collect().mkString(",").contains("i,p,j"))
- }
+ assert(spark.table(tbl_with_col_overlap).columns === Array("i", "p", "j"))
+ checkAnswer(spark.table(tbl_with_col_overlap), Row(1, 1, 1) :: Row(1, 1, 1) :: Nil)
+ assert(sql("desc " + tbl_with_col_overlap).select("col_name")
+ .as[String].collect().mkString(",").contains("i,p,j"))
}
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org