You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by ar...@apache.org on 2018/03/03 18:47:10 UTC

[04/17] drill git commit: DRILL-6204: Pass tables columns without partition columns to empty Hive reader

DRILL-6204: Pass tables columns without partition columns to empty Hive reader

closes #1146


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/1697e531
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/1697e531
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/1697e531

Branch: refs/heads/master
Commit: 1697e531dce5fd094f3b5d4a29448df704ed6c4a
Parents: 6bf04ec
Author: Arina Ielchiieva <ar...@gmail.com>
Authored: Fri Mar 2 13:38:00 2018 +0200
Committer: Arina Ielchiieva <ar...@gmail.com>
Committed: Sat Mar 3 19:47:36 2018 +0200

----------------------------------------------------------------------
 .../exec/store/hive/HiveDrillNativeScanBatchCreator.java  | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/1697e531/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveDrillNativeScanBatchCreator.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveDrillNativeScanBatchCreator.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveDrillNativeScanBatchCreator.java
index 3861aa0..43318d1 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveDrillNativeScanBatchCreator.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveDrillNativeScanBatchCreator.java
@@ -76,11 +76,11 @@ public class HiveDrillNativeScanBatchCreator implements BatchCreator<HiveDrillNa
 
     final List<String[]> partitionColumns = Lists.newArrayList();
     final List<Integer> selectedPartitionColumns = Lists.newArrayList();
-    List<SchemaPath> newColumns = columns;
+    List<SchemaPath> tableColumns = columns;
     if (!selectAllQuery) {
       // Separate out the partition and non-partition columns. Non-partition columns are passed directly to the
       // ParquetRecordReader. Partition columns are passed to ScanBatch.
-      newColumns = Lists.newArrayList();
+      tableColumns = Lists.newArrayList();
       Pattern pattern = Pattern.compile(String.format("%s[0-9]+", partitionDesignator));
       for (SchemaPath column : columns) {
         Matcher m = pattern.matcher(column.getRootSegmentPath());
@@ -88,7 +88,7 @@ public class HiveDrillNativeScanBatchCreator implements BatchCreator<HiveDrillNa
           selectedPartitionColumns.add(
               Integer.parseInt(column.getRootSegmentPath().substring(partitionDesignator.length())));
         } else {
-          newColumns.add(column);
+          tableColumns.add(column);
         }
       }
     }
@@ -139,7 +139,7 @@ public class HiveDrillNativeScanBatchCreator implements BatchCreator<HiveDrillNa
                 CodecFactory.createDirectCodecFactory(fs.getConf(),
                     new ParquetDirectByteBufferAllocator(oContext.getAllocator()), 0),
                 parquetMetadata,
-                newColumns,
+                tableColumns,
                 containsCorruptDates)
             );
             Map<String, String> implicitValues = Maps.newLinkedHashMap();
@@ -174,7 +174,7 @@ public class HiveDrillNativeScanBatchCreator implements BatchCreator<HiveDrillNa
     // If there are no readers created (which is possible when the table is empty or no row groups are matched),
     // create an empty RecordReader to output the schema
     if (readers.size() == 0) {
-      readers.add(new HiveDefaultReader(table, null, null, columns, context, conf,
+      readers.add(new HiveDefaultReader(table, null, null, tableColumns, context, conf,
         ImpersonationUtil.createProxyUgi(config.getUserName(), context.getQueryUserName())));
     }