You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2021/04/20 10:18:32 UTC
[spark] branch master updated: [SPARK-35068][SQL] Add tests for
ANSI intervals to HiveThriftBinaryServerSuite
This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new b219e37 [SPARK-35068][SQL] Add tests for ANSI intervals to HiveThriftBinaryServerSuite
b219e37 is described below
commit b219e37af3cfe41e54962e310e9947ba3605d566
Author: Angerszhuuuu <an...@gmail.com>
AuthorDate: Tue Apr 20 13:17:59 2021 +0300
[SPARK-35068][SQL] Add tests for ANSI intervals to HiveThriftBinaryServerSuite
### What changes were proposed in this pull request?
After the PR https://github.com/apache/spark/pull/32209, this should be possible now.
We can add test case for ANSI intervals to HiveThriftBinaryServerSuite
### Why are the changes needed?
Add more test case
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
Added UT
Closes #32250 from AngersZhuuuu/SPARK-35068.
Authored-by: Angerszhuuuu <an...@gmail.com>
Signed-off-by: Max Gekk <ma...@gmail.com>
---
.../thriftserver/HiveThriftServer2Suites.scala | 42 +++++++++++++++++++---
1 file changed, 37 insertions(+), 5 deletions(-)
diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
index 602b7e5..792aea1 100644
--- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
+++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
@@ -675,21 +675,53 @@ class HiveThriftBinaryServerSuite extends HiveThriftServer2Test {
}
assert(e.getMessage.contains("org.apache.spark.sql.catalyst.parser.ParseException"))
}
+
+ withJdbcStatement() { statement =>
+ val rs = statement.executeQuery("SELECT interval '3-1' year to month;")
+ assert(rs.next())
+ assert(rs.getString(1) === "3-1")
+ }
+
+ withJdbcStatement() { statement =>
+ val rs = statement.executeQuery("SELECT interval '3 1:1:1' day to second;")
+ assert(rs.next())
+ assert(rs.getString(1) === "3 01:01:01.000000000")
+ }
}
test("Query Intervals in VIEWs through thrift server") {
val viewName1 = "view_interval_1"
val viewName2 = "view_interval_2"
- val ddl1 = s"CREATE GLOBAL TEMP VIEW $viewName1 AS SELECT INTERVAL 1 DAY AS i"
+ val ddl1 =
+ s"""
+ |CREATE GLOBAL TEMP VIEW $viewName1
+ |AS SELECT
+ | INTERVAL 1 DAY AS a,
+ | INTERVAL '2-1' YEAR TO MONTH AS b,
+ | INTERVAL '3 1:1:1' DAY TO SECOND AS c
+ """.stripMargin
val ddl2 = s"CREATE TEMP VIEW $viewName2 as select * from global_temp.$viewName1"
withJdbcStatement(viewName1, viewName2) { statement =>
statement.executeQuery(ddl1)
statement.executeQuery(ddl2)
- val rs = statement.executeQuery(s"SELECT v1.i as a, v2.i as b FROM global_temp.$viewName1" +
- s" v1 join $viewName2 v2 on date_part('DAY', v1.i) = date_part('DAY', v2.i)")
+ val rs = statement.executeQuery(
+ s"""
+ |SELECT v1.a AS a1, v2.a AS a2,
+ | v1.b AS b1, v2.b AS b2,
+ | v1.c AS c1, v2.c AS c2
+ |FROM global_temp.$viewName1 v1
+ |JOIN $viewName2 v2
+ |ON date_part('DAY', v1.a) = date_part('DAY', v2.a)
+ | AND v1.b = v2.b
+ | AND v1.c = v2.c
+ |""".stripMargin)
while (rs.next()) {
- assert(rs.getString("a") === "1 days")
- assert(rs.getString("b") === "1 days")
+ assert(rs.getString("a1") === "1 days")
+ assert(rs.getString("a2") === "1 days")
+ assert(rs.getString("b1") === "2-1")
+ assert(rs.getString("b2") === "2-1")
+ assert(rs.getString("c1") === "3 01:01:01.000000000")
+ assert(rs.getString("c2") === "3 01:01:01.000000000")
}
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org