You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2014/07/18 23:38:20 UTC
git commit: [SPARK-2540] [SQL] Add HiveDecimal & HiveVarchar support
in unwrapping data
Repository: spark
Updated Branches:
refs/heads/master 3a1709fa5 -> 7f1720813
[SPARK-2540] [SQL] Add HiveDecimal & HiveVarchar support in unwrapping data
Author: Cheng Hao <ha...@intel.com>
Closes #1436 from chenghao-intel/unwrapdata and squashes the following commits:
34cc21a [Cheng Hao] update the table scan accodringly since the unwrapData function changed
afc39da [Cheng Hao] Polish the code
39d6475 [Cheng Hao] Add HiveDecimal & HiveVarchar support in unwrap data
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/7f172081
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/7f172081
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/7f172081
Branch: refs/heads/master
Commit: 7f1720813793e155743b58eae5228298e894b90d
Parents: 3a1709f
Author: Cheng Hao <ha...@intel.com>
Authored: Fri Jul 18 16:38:11 2014 -0500
Committer: Michael Armbrust <mi...@databricks.com>
Committed: Fri Jul 18 16:38:11 2014 -0500
----------------------------------------------------------------------
.../apache/spark/sql/hive/execution/HiveTableScan.scala | 12 +-----------
.../main/scala/org/apache/spark/sql/hive/hiveUdfs.scala | 4 ++++
2 files changed, 5 insertions(+), 11 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/7f172081/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScan.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScan.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScan.scala
index ef8bae7..e7016fa 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScan.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScan.scala
@@ -96,19 +96,9 @@ case class HiveTableScan(
.getOrElse(sys.error(s"Can't find attribute $a"))
val fieldObjectInspector = ref.getFieldObjectInspector
- val unwrapHiveData = fieldObjectInspector match {
- case _: HiveVarcharObjectInspector =>
- (value: Any) => value.asInstanceOf[HiveVarchar].getValue
- case _: HiveDecimalObjectInspector =>
- (value: Any) => BigDecimal(value.asInstanceOf[HiveDecimal].bigDecimalValue())
- case _ =>
- identity[Any] _
- }
-
(row: Any, _: Array[String]) => {
val data = objectInspector.getStructFieldData(row, ref)
- val hiveData = unwrapData(data, fieldObjectInspector)
- if (hiveData != null) unwrapHiveData(hiveData) else null
+ unwrapData(data, fieldObjectInspector)
}
}
}
http://git-wip-us.apache.org/repos/asf/spark/blob/7f172081/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUdfs.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUdfs.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUdfs.scala
index 9b10530..fc33c5b 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUdfs.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUdfs.scala
@@ -280,6 +280,10 @@ private[hive] case class HiveGenericUdf(name: String, children: Seq[Expression])
private[hive] trait HiveInspectors {
def unwrapData(data: Any, oi: ObjectInspector): Any = oi match {
+ case hvoi: HiveVarcharObjectInspector =>
+ if (data == null) null else hvoi.getPrimitiveJavaObject(data).getValue
+ case hdoi: HiveDecimalObjectInspector =>
+ if (data == null) null else BigDecimal(hdoi.getPrimitiveJavaObject(data).bigDecimalValue())
case pi: PrimitiveObjectInspector => pi.getPrimitiveJavaObject(data)
case li: ListObjectInspector =>
Option(li.getList(data))