You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by vo...@apache.org on 2020/09/14 16:31:43 UTC
[drill] branch master updated: DRILL-7785: Some hive tables fail
with UndeclaredThrowableException
This is an automated email from the ASF dual-hosted git repository.
volodymyr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git
The following commit(s) were added to refs/heads/master by this push:
new 314fe4d DRILL-7785: Some hive tables fail with UndeclaredThrowableException
314fe4d is described below
commit 314fe4da33065c0eb151b173e8301b9352e4b767
Author: Volodymyr Vysotskyi <vv...@gmail.com>
AuthorDate: Tue Sep 1 17:36:42 2020 +0300
DRILL-7785: Some hive tables fail with UndeclaredThrowableException
---
.../drill/exec/store/hive/readers/HiveDefaultRecordReader.java | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/readers/HiveDefaultRecordReader.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/readers/HiveDefaultRecordReader.java
index d490f5d..8598643 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/readers/HiveDefaultRecordReader.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/readers/HiveDefaultRecordReader.java
@@ -271,10 +271,13 @@ public class HiveDefaultRecordReader extends AbstractRecordReader {
this.partitionToTableSchemaConverter = (obj) -> obj;
this.finalObjInspector = tableObjInspector;
+ HiveUtilities.addConfToJob(job, hiveTableProperties);
job.setInputFormat(HiveUtilities.getInputFormatClass(job, hiveTable.getSd(), hiveTable));
HiveUtilities.verifyAndAddTransactionalProperties(job, hiveTable.getSd());
} else {
- this.partitionDeserializer = createDeserializer(job, partition.getSd(), HiveUtilities.getPartitionMetadata(partition, hiveTable));
+ Properties partitionProperties = HiveUtilities.getPartitionMetadata(partition, hiveTable);
+ HiveUtilities.addConfToJob(job, partitionProperties);
+ this.partitionDeserializer = createDeserializer(job, partition.getSd(), partitionProperties);
this.partitionObjInspector = getStructOI(partitionDeserializer);
this.finalObjInspector = (StructObjectInspector) ObjectInspectorConverters.getConvertedOI(partitionObjInspector, tableObjInspector);