You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by li...@apache.org on 2019/07/26 17:30:24 UTC
[spark] branch master updated: [SPARK-28463][SQL] Thriftserver
throws BigDecimal incompatible with HiveDecimal
This is an automated email from the ASF dual-hosted git repository.
lixiao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 545c7ee [SPARK-28463][SQL] Thriftserver throws BigDecimal incompatible with HiveDecimal
545c7ee is described below
commit 545c7ee00b5d4c5b848be6b27b3820955a0803d6
Author: Yuming Wang <yu...@ebay.com>
AuthorDate: Fri Jul 26 10:30:01 2019 -0700
[SPARK-28463][SQL] Thriftserver throws BigDecimal incompatible with HiveDecimal
## What changes were proposed in this pull request?
How to reproduce this issue:
```shell
build/sbt clean package -Phive -Phive-thriftserver -Phadoop-3.2
export SPARK_PREPEND_CLASSES=true
sbin/start-thriftserver.sh
[rootspark-3267648 spark]# bin/beeline -u jdbc:hive2://localhost:10000/default -e "select cast(1 as decimal(38, 18));"
Connecting to jdbc:hive2://localhost:10000/default
Connected to: Spark SQL (version 3.0.0-SNAPSHOT)
Driver: Hive JDBC (version 2.3.5)
Transaction isolation: TRANSACTION_REPEATABLE_READ
Error: java.lang.ClassCastException: java.math.BigDecimal incompatible with org.apache.hadoop.hive.common.type.HiveDecimal (state=,code=0)
Closing: 0: jdbc:hive2://localhost:10000/default
```
This pr fix this issue.
## How was this patch tested?
unit tests
Closes #25217 from wangyum/SPARK-28463.
Authored-by: Yuming Wang <yu...@ebay.com>
Signed-off-by: gatorsmile <ga...@gmail.com>
---
.../spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala | 8 ++++++++
.../main/java/org/apache/hive/service/cli/ColumnBasedSet.java | 9 +--------
2 files changed, 9 insertions(+), 8 deletions(-)
diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
index dd18add..9c53e90 100644
--- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
+++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
@@ -654,6 +654,14 @@ class HiveThriftBinaryServerSuite extends HiveThriftJdbcTest {
assert(resultSet.getString(1) === "4.56")
}
}
+
+ test("SPARK-28463: Thriftserver throws BigDecimal incompatible with HiveDecimal") {
+ withJdbcStatement() { statement =>
+ val rs = statement.executeQuery("SELECT CAST(1 AS decimal(38, 18))")
+ assert(rs.next())
+ assert(rs.getBigDecimal(1) === new java.math.BigDecimal("1.000000000000000000"))
+ }
+ }
}
class SingleSessionSuite extends HiveThriftJdbcTest {
diff --git a/sql/hive-thriftserver/v2.3.5/src/main/java/org/apache/hive/service/cli/ColumnBasedSet.java b/sql/hive-thriftserver/v2.3.5/src/main/java/org/apache/hive/service/cli/ColumnBasedSet.java
index 5546060..3ca18f0 100644
--- a/sql/hive-thriftserver/v2.3.5/src/main/java/org/apache/hive/service/cli/ColumnBasedSet.java
+++ b/sql/hive-thriftserver/v2.3.5/src/main/java/org/apache/hive/service/cli/ColumnBasedSet.java
@@ -23,9 +23,7 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.serde2.thrift.ColumnBuffer;
-import org.apache.hadoop.hive.serde2.thrift.Type;
import org.apache.hive.service.rpc.thrift.TColumn;
import org.apache.hive.service.rpc.thrift.TRow;
import org.apache.hive.service.rpc.thrift.TRowSet;
@@ -105,12 +103,7 @@ public class ColumnBasedSet implements RowSet {
} else {
for (int i = 0; i < fields.length; i++) {
TypeDescriptor descriptor = descriptors[i];
- Object field = fields[i];
- if (field != null && descriptor.getType() == Type.DECIMAL_TYPE) {
- int scale = descriptor.getDecimalDigits();
- field = ((HiveDecimal) field).toFormatString(scale);
- }
- columns.get(i).addValue(descriptor.getType(), field);
+ columns.get(i).addValue(descriptor.getType(), fields[i]);
}
}
return this;
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org