You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2016/09/08 01:52:43 UTC

[23/38] hive git commit: HIVE-14656: Clean up driver instance in get_splits (Jason Dere, reviewed by Prasanth Jayachandran)

HIVE-14656: Clean up driver instance in get_splits (Jason Dere, reviewed by Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/22bc78e5
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/22bc78e5
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/22bc78e5

Branch: refs/heads/hive-14535
Commit: 22bc78e5551d9ce633a465eaf240fa0ac2984e97
Parents: 5124621
Author: Jason Dere <jd...@hortonworks.com>
Authored: Tue Sep 6 10:33:07 2016 -0700
Committer: Jason Dere <jd...@hortonworks.com>
Committed: Tue Sep 6 10:36:00 2016 -0700

----------------------------------------------------------------------
 .../ql/udf/generic/GenericUDTFGetSplits.java    | 82 +++++++++++---------
 1 file changed, 44 insertions(+), 38 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/22bc78e5/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
index 3741ddf..9ddbd7e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
@@ -228,58 +228,64 @@ public class GenericUDTFGetSplits extends GenericUDTF {
     }
 
     Driver driver = new Driver(conf);
-    CommandProcessorResponse cpr = driver.compileAndRespond(query);
-    if (cpr.getResponseCode() != 0) {
-      throw new HiveException("Failed to compile query: " + cpr.getException());
-    }
+    try {
+      CommandProcessorResponse cpr = driver.compileAndRespond(query);
+      if (cpr.getResponseCode() != 0) {
+        throw new HiveException("Failed to compile query: " + cpr.getException());
+      }
 
-    QueryPlan plan = driver.getPlan();
-    List<Task<?>> roots = plan.getRootTasks();
-    Schema schema = convertSchema(plan.getResultSchema());
+      QueryPlan plan = driver.getPlan();
+      List<Task<?>> roots = plan.getRootTasks();
+      Schema schema = convertSchema(plan.getResultSchema());
 
-    if (roots == null || roots.size() != 1 || !(roots.get(0) instanceof TezTask)) {
-      throw new HiveException("Was expecting a single TezTask.");
-    }
+      if (roots == null || roots.size() != 1 || !(roots.get(0) instanceof TezTask)) {
+        throw new HiveException("Was expecting a single TezTask.");
+      }
 
-    TezWork tezWork = ((TezTask)roots.get(0)).getWork();
+      TezWork tezWork = ((TezTask)roots.get(0)).getWork();
 
-    if (tezWork.getAllWork().size() != 1) {
+      if (tezWork.getAllWork().size() != 1) {
 
-      String tableName = "table_"+UUID.randomUUID().toString().replaceAll("[^A-Za-z0-9 ]", "");
+        String tableName = "table_"+UUID.randomUUID().toString().replaceAll("[^A-Za-z0-9 ]", "");
 
-      String ctas = "create temporary table " + tableName + " as " + query;
-      LOG.info("Materializing the query for LLAPIF; CTAS: " + ctas);
+        String ctas = "create temporary table " + tableName + " as " + query;
+        LOG.info("Materializing the query for LLAPIF; CTAS: " + ctas);
 
-      try {
-        HiveConf.setVar(conf, ConfVars.HIVE_EXECUTION_MODE, originalMode);
-        cpr = driver.run(ctas, false);
-      } catch (CommandNeedRetryException e) {
-        throw new HiveException(e);
-      }
+        try {
+          driver.resetQueryState();
+          HiveConf.setVar(conf, ConfVars.HIVE_EXECUTION_MODE, originalMode);
+          cpr = driver.run(ctas, false);
+        } catch (CommandNeedRetryException e) {
+          throw new HiveException(e);
+        }
 
-      if(cpr.getResponseCode() != 0) {
-        throw new HiveException("Failed to create temp table: " + cpr.getException());
-      }
+        if(cpr.getResponseCode() != 0) {
+          throw new HiveException("Failed to create temp table: " + cpr.getException());
+        }
 
-      HiveConf.setVar(conf, ConfVars.HIVE_EXECUTION_MODE, "llap");
-      query = "select * from " + tableName;
-      cpr = driver.compileAndRespond(query);
-      if(cpr.getResponseCode() != 0) {
-        throw new HiveException("Failed to create temp table: "+cpr.getException());
-      }
+        HiveConf.setVar(conf, ConfVars.HIVE_EXECUTION_MODE, "llap");
+        query = "select * from " + tableName;
+        cpr = driver.compileAndRespond(query);
+        if(cpr.getResponseCode() != 0) {
+          throw new HiveException("Failed to create temp table: "+cpr.getException());
+        }
 
-      plan = driver.getPlan();
-      roots = plan.getRootTasks();
-      schema = convertSchema(plan.getResultSchema());
+        plan = driver.getPlan();
+        roots = plan.getRootTasks();
+        schema = convertSchema(plan.getResultSchema());
 
-      if (roots == null || roots.size() != 1 || !(roots.get(0) instanceof TezTask)) {
-        throw new HiveException("Was expecting a single TezTask.");
+        if (roots == null || roots.size() != 1 || !(roots.get(0) instanceof TezTask)) {
+          throw new HiveException("Was expecting a single TezTask.");
+        }
+
+        tezWork = ((TezTask)roots.get(0)).getWork();
       }
 
-      tezWork = ((TezTask)roots.get(0)).getWork();
+      return new PlanFragment(tezWork, schema, jc);
+    } finally {
+      driver.close();
+      driver.destroy();
     }
-
-    return new PlanFragment(tezWork, schema, jc);
   }
 
   public InputSplit[] getSplits(JobConf job, int numSplits, TezWork work, Schema schema)