You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by gp...@apache.org on 2019/01/04 06:53:59 UTC

[drill] 04/10: DRILL-6929: Exclude maprfs jar for default profile closes #1586

This is an automated email from the ASF dual-hosted git repository.

gparai pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit e7558b7909e855d36d5664b93e9b565f8cedca19
Author: Volodymyr Vysotskyi <vv...@gmail.com>
AuthorDate: Wed Dec 26 20:31:58 2018 +0200

    DRILL-6929: Exclude maprfs jar for default profile
    closes #1586
---
 contrib/format-maprdb/pom.xml                      |  3 +-
 contrib/storage-hive/core/pom.xml                  | 33 +++++++++++++++++----
 ...ertHiveMapRDBJsonScanToDrillMapRDBJsonScan.java | 24 +++++----------
 .../drill/exec/store/hive/HiveStoragePlugin.java   | 34 +++++++++++++++-------
 contrib/storage-jdbc/pom.xml                       |  1 -
 exec/java-exec/pom.xml                             |  1 -
 pom.xml                                            |  5 ++++
 7 files changed, 66 insertions(+), 35 deletions(-)

diff --git a/contrib/format-maprdb/pom.xml b/contrib/format-maprdb/pom.xml
index 5f238e1..0a09c3d 100644
--- a/contrib/format-maprdb/pom.xml
+++ b/contrib/format-maprdb/pom.xml
@@ -100,7 +100,6 @@
       <plugin>
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>build-helper-maven-plugin</artifactId>
-        <version>1.9.1</version>
         <executions>
           <execution>
             <id>add-sources-as-resources</id>
@@ -187,12 +186,14 @@
       <artifactId>maprdb</artifactId>
       <version>${mapr.release.version}</version>
       <classifier>tests</classifier>
+      <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>com.mapr.hadoop</groupId>
       <artifactId>maprfs</artifactId>
       <version>${mapr.release.version}</version>
       <classifier>tests</classifier>
+      <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>com.jcraft</groupId>
diff --git a/contrib/storage-hive/core/pom.xml b/contrib/storage-hive/core/pom.xml
index edb6a50..0203efb 100644
--- a/contrib/storage-hive/core/pom.xml
+++ b/contrib/storage-hive/core/pom.xml
@@ -142,11 +142,6 @@
         </exclusion>
       </exclusions>
     </dependency>
-    <dependency>
-      <groupId>org.apache.drill.contrib</groupId>
-      <artifactId>drill-format-mapr</artifactId>
-      <version>${project.version}</version>
-    </dependency>
   </dependencies>
 
   <build>
@@ -175,15 +170,41 @@
   <profiles>
     <profile>
       <id>mapr</id>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.codehaus.mojo</groupId>
+            <artifactId>build-helper-maven-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>add-mapr-sources</id>
+                <phase>generate-sources</phase>
+                <goals>
+                  <goal>add-source</goal>
+                </goals>
+                <configuration>
+                  <sources>
+                    <source>scrMapr/main/java</source>
+                  </sources>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
       <dependencies>
         <dependency>
+          <groupId>org.apache.drill.contrib</groupId>
+          <artifactId>drill-format-mapr</artifactId>
+          <version>${project.version}</version>
+        </dependency>
+        <dependency>
           <groupId>com.tdunning</groupId>
           <artifactId>json</artifactId>
         </dependency>
         <dependency>
           <groupId>org.apache.hive</groupId>
           <artifactId>hive-maprdb-json-handler</artifactId>
-          <scope>runtime</scope>
         </dependency>
         <dependency>
           <groupId>com.mapr.db</groupId>
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan.java b/contrib/storage-hive/core/scrMapr/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan.java
similarity index 90%
rename from contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan.java
rename to contrib/storage-hive/core/scrMapr/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan.java
index 4994a72..b8c2675 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan.java
+++ b/contrib/storage-hive/core/scrMapr/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan.java
@@ -21,27 +21,26 @@ import org.apache.calcite.plan.RelOptRuleCall;
 import org.apache.calcite.rel.type.RelDataType;
 import org.apache.calcite.rel.type.RelDataTypeFactory;
 import org.apache.calcite.rel.type.RelDataTypeField;
-import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.common.expression.SchemaPath;
-import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.planner.logical.DrillScanRel;
 import org.apache.drill.exec.planner.logical.RelOptHelper;
 import org.apache.drill.exec.store.StoragePluginOptimizerRule;
 import org.apache.drill.exec.store.hive.HiveMetadataProvider;
 import org.apache.drill.exec.store.hive.HiveReadEntry;
 import org.apache.drill.exec.store.hive.HiveScan;
+import org.apache.drill.exec.store.hive.HiveUtilities;
 import org.apache.drill.exec.store.mapr.db.MapRDBFormatPlugin;
 import org.apache.drill.exec.store.mapr.db.MapRDBFormatPluginConfig;
 import org.apache.drill.exec.store.mapr.db.json.JsonScanSpec;
 import org.apache.drill.exec.store.mapr.db.json.JsonTableGroupScan;
+import org.apache.hadoop.hive.maprdb.json.input.HiveMapRDBJsonInputFormat;
 import org.ojai.DocumentConstants;
 
 import java.util.List;
 import java.util.Map;
 import java.util.stream.Collectors;
 
-import static org.apache.drill.exec.store.hive.HiveUtilities.nativeReadersRuleMatches;
-
 /**
  * Convert Hive scan to use Drill's native MapR-DB reader instead of Hive's MapR-DB JSON Handler.
  */
@@ -69,15 +68,7 @@ public class ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan extends StoragePlugi
    */
   @Override
   public boolean matches(RelOptRuleCall call) {
-    try {
-      return nativeReadersRuleMatches(call,
-          Class.forName("org.apache.hadoop.hive.maprdb.json.input.HiveMapRDBJsonInputFormat"));
-    } catch (ClassNotFoundException e) {
-      throw UserException.resourceError(e)
-          .message("Current Drill build is not designed for working with Hive MapR-DB tables. " +
-              "Please disable \"%s\" option", ExecConstants.HIVE_OPTIMIZE_MAPRDB_JSON_SCAN_WITH_NATIVE_READER)
-          .build(logger);
-    }
+    return HiveUtilities.nativeReadersRuleMatches(call, HiveMapRDBJsonInputFormat.class);
   }
 
   @Override
@@ -110,15 +101,16 @@ public class ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan extends StoragePlugi
         To ensure Drill MapR-DB Json scan will be chosen, reduce Hive scan importance to 0.
        */
       call.getPlanner().setImportance(hiveScanRel, 0.0);
-    } catch (final Exception e) {
-      logger.warn("Failed to convert HiveScan to JsonScanSpec", e);
+    } catch (DrillRuntimeException e) {
+      // TODO: Improve error handling after allowing to throw IOException from StoragePlugin.getFormatPlugin()
+      logger.warn("Failed to convert HiveScan to JsonScanSpec. Fallback to HiveMapR-DB connector.", e);
     }
   }
 
   /**
    * Helper method which creates a DrillScanRel with native Drill HiveScan.
    */
-  private DrillScanRel createNativeScanRel(final DrillScanRel hiveScanRel) throws Exception {
+  private DrillScanRel createNativeScanRel(final DrillScanRel hiveScanRel) {
     RelDataTypeFactory typeFactory = hiveScanRel.getCluster().getTypeFactory();
     HiveScan hiveScan = (HiveScan) hiveScanRel.getGroupScan();
     Map<String, String> parameters = hiveScan.getHiveReadEntry().getHiveTableWrapper().getParameters();
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveStoragePlugin.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveStoragePlugin.java
index a65a69e..a8c789d 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveStoragePlugin.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveStoragePlugin.java
@@ -27,6 +27,7 @@ import java.util.Set;
 import java.util.function.Function;
 import java.util.stream.Collectors;
 
+import org.apache.drill.common.logical.StoragePluginConfig;
 import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableSet;
 
 import org.apache.calcite.schema.Schema.TableType;
@@ -41,7 +42,6 @@ import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.ops.OptimizerRulesContext;
 import org.apache.drill.exec.physical.base.AbstractGroupScan;
-import org.apache.drill.exec.planner.sql.logical.ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan;
 import org.apache.drill.exec.planner.sql.logical.ConvertHiveParquetScanToDrillParquetScan;
 import org.apache.drill.exec.planner.sql.logical.HivePushPartitionFilterIntoScan;
 import org.apache.drill.exec.server.DrillbitContext;
@@ -55,8 +55,7 @@ import org.apache.drill.exec.store.hive.schema.HiveSchemaFactory;
 
 import com.fasterxml.jackson.core.type.TypeReference;
 import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.drill.exec.store.mapr.db.MapRDBFormatPlugin;
-import org.apache.drill.exec.store.mapr.db.MapRDBFormatPluginConfig;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.MetaException;
@@ -217,7 +216,14 @@ public class HiveStoragePlugin extends AbstractStoragePlugin {
       ruleBuilder.add(ConvertHiveParquetScanToDrillParquetScan.INSTANCE);
     }
     if (options.getBoolean(ExecConstants.HIVE_OPTIMIZE_MAPRDB_JSON_SCAN_WITH_NATIVE_READER)) {
-      ruleBuilder.add(ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan.INSTANCE);
+      try {
+        Class<?> hiveToDrillMapRDBJsonRuleClass =
+            Class.forName("org.apache.drill.exec.planner.sql.logical.ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan");
+        ruleBuilder.add((StoragePluginOptimizerRule) hiveToDrillMapRDBJsonRuleClass.getField("INSTANCE").get(null));
+      } catch (ReflectiveOperationException e) {
+        logger.warn("Current Drill build is not designed for working with Hive MapR-DB tables. " +
+            "Please disable {} option", ExecConstants.HIVE_OPTIMIZE_MAPRDB_JSON_SCAN_WITH_NATIVE_READER);
+      }
     }
     return ruleBuilder.build();
   }
@@ -225,13 +231,21 @@ public class HiveStoragePlugin extends AbstractStoragePlugin {
   @Override
   public FormatPlugin getFormatPlugin(FormatPluginConfig formatConfig) {
     //  TODO: implement formatCreator similar to FileSystemPlugin formatCreator. DRILL-6621
-    if (formatConfig instanceof MapRDBFormatPluginConfig) {
-      try {
-        return new MapRDBFormatPlugin(HIVE_MAPRDB_FORMAT_PLUGIN_NAME, context, hiveConf, config,
-            (MapRDBFormatPluginConfig) formatConfig);
-      } catch (IOException e) {
-        throw new DrillRuntimeException("The error is occurred while connecting to MapR-DB", e);
+    try {
+      Class<?> mapRDBFormatPluginConfigClass =
+          Class.forName("org.apache.drill.exec.store.mapr.db.MapRDBFormatPluginConfig");
+      Class<?> mapRDBFormatPluginClass =
+          Class.forName("org.apache.drill.exec.store.mapr.db.MapRDBFormatPlugin");
+
+      if (mapRDBFormatPluginConfigClass.isInstance(formatConfig)) {
+        return (FormatPlugin) mapRDBFormatPluginClass.getConstructor(
+              new Class[]{String.class, DrillbitContext.class, Configuration.class,
+                  StoragePluginConfig.class, mapRDBFormatPluginConfigClass})
+          .newInstance(
+              new Object[]{HIVE_MAPRDB_FORMAT_PLUGIN_NAME, context, hiveConf, config, formatConfig});
       }
+    } catch (ReflectiveOperationException e) {
+      throw new DrillRuntimeException("The error is occurred while connecting to MapR-DB or instantiating mapRDBFormatPlugin", e);
     }
     throw new DrillRuntimeException(String.format("Hive storage plugin doesn't support usage of %s format plugin",
         formatConfig.getClass().getName()));
diff --git a/contrib/storage-jdbc/pom.xml b/contrib/storage-jdbc/pom.xml
index eca206a..efaf5c1 100755
--- a/contrib/storage-jdbc/pom.xml
+++ b/contrib/storage-jdbc/pom.xml
@@ -153,7 +153,6 @@
         <!-- Allows us to reserve ports for external servers that we will launch  -->
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>build-helper-maven-plugin</artifactId>
-        <version>3.0.0</version>
         <executions>
           <execution>
             <id>reserve-network-port</id>
diff --git a/exec/java-exec/pom.xml b/exec/java-exec/pom.xml
index 6e6a9d8..eff336c 100644
--- a/exec/java-exec/pom.xml
+++ b/exec/java-exec/pom.xml
@@ -752,7 +752,6 @@
       <plugin> <!-- source file must end up in the jar for janino parsing -->
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>build-helper-maven-plugin</artifactId>
-        <version>1.9.1</version>
         <executions>
           <execution>
             <id>add-sources-as-resources</id>
diff --git a/pom.xml b/pom.xml
index a6466a4..f0ecf80 100644
--- a/pom.xml
+++ b/pom.xml
@@ -711,6 +711,11 @@
           <artifactId>maven-enforcer-plugin</artifactId>
           <version>3.0.0-M2</version>
         </plugin>
+        <plugin>
+          <groupId>org.codehaus.mojo</groupId>
+          <artifactId>build-helper-maven-plugin</artifactId>
+          <version>3.0.0</version>
+        </plugin>
         <plugin> <!-- classpath scanning  -->
           <groupId>org.codehaus.mojo</groupId>
           <artifactId>exec-maven-plugin</artifactId>