You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by am...@apache.org on 2018/10/15 19:25:48 UTC

[drill] 01/04: DRILL-6473: Update MapR Hive

This is an automated email from the ASF dual-hosted git repository.

amansinha pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 0a3cfdebd25ea4186c2a2b75524c34323984ffca
Author: Bohdan Kazydub <bo...@gmail.com>
AuthorDate: Mon May 21 19:11:23 2018 +0300

    DRILL-6473: Update MapR Hive
    
    close apache/drill#1307
---
 .../exec/store/hive/readers/HiveAbstractReader.java   | 19 +++++--------------
 contrib/storage-hive/hive-exec-shade/pom.xml          |  3 +++
 pom.xml                                               |  4 ++--
 3 files changed, 10 insertions(+), 16 deletions(-)

diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/readers/HiveAbstractReader.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/readers/HiveAbstractReader.java
index ba1cd30..5ed6c3b 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/readers/HiveAbstractReader.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/readers/HiveAbstractReader.java
@@ -25,6 +25,7 @@ import java.util.List;
 import java.util.Properties;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutionException;
+import java.util.stream.Collectors;
 
 import org.apache.drill.shaded.guava.com.google.common.util.concurrent.ListenableFuture;
 import io.netty.buffer.DrillBuf;
@@ -203,20 +204,10 @@ public abstract class HiveAbstractReader extends AbstractRecordReader {
           }
         }
       }
-      ColumnProjectionUtils.appendReadColumns(job, columnIds);
-
-      // TODO: Use below overloaded method instead of above simpler version of it, once Hive client dependencies
-      // (from all profiles) will be updated to 2.3 version or above
-//      ColumnProjectionUtils.appendReadColumns(job, columnIds, selectedColumnNames,
-//          Lists.newArrayList(Iterables.transform(getColumns(), new Function<SchemaPath, String>()
-//      {
-//        @Nullable
-//        @Override
-//        public String apply(@Nullable SchemaPath path)
-//        {
-//          return path.getRootSegmentPath();
-//        }
-//      })));
+      List<String> paths = getColumns().stream()
+          .map(SchemaPath::getRootSegmentPath)
+          .collect(Collectors.toList());
+      ColumnProjectionUtils.appendReadColumns(job, columnIds, selectedColumnNames, paths);
 
       for (String columnName : selectedColumnNames) {
         StructField fieldRef = finalOI.getStructFieldRef(columnName);
diff --git a/contrib/storage-hive/hive-exec-shade/pom.xml b/contrib/storage-hive/hive-exec-shade/pom.xml
index 3ef1839..b857758 100644
--- a/contrib/storage-hive/hive-exec-shade/pom.xml
+++ b/contrib/storage-hive/hive-exec-shade/pom.xml
@@ -139,6 +139,9 @@
             <filter>
               <artifact>org.apache.hive:hive-exec</artifact>
               <excludes>
+                <!-- This exclusion can be removed once hive-exec uses parquet-hadoop-bundle 1.8.2 or higher.
+                 It can be so, for example, after upgrading Hive to 3.0. To check if it's safe to remove the exclusion
+                 you can use TestHiveStorage.readFromAlteredPartitionedTableWithEmptyGroupType() test case. -->
                 <exclude>org/apache/parquet/schema/*</exclude>
               </excludes>
             </filter>
diff --git a/pom.xml b/pom.xml
index 5d00ee6..3a2d21e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -2439,9 +2439,9 @@
       <properties>
         <alt-hadoop>mapr</alt-hadoop>
         <rat.excludeSubprojects>true</rat.excludeSubprojects>
-        <hive.version>2.1.1-mapr-1710</hive.version>
+        <hive.version>2.3.3-mapr-1808</hive.version>
         <hbase.version>1.1.1-mapr-1602-m7-5.2.0</hbase.version>
-        <hadoop.version>2.7.0-mapr-1707</hadoop.version>
+        <hadoop.version>2.7.0-mapr-1808</hadoop.version>
         <zookeeper.version>3.4.11-mapr-1808</zookeeper.version>
       </properties>
       <dependencyManagement>