You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2015/01/30 22:01:36 UTC

svn commit: r1656120 - in /hive/branches/branch-1.1: ./ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java

Author: brock
Date: Fri Jan 30 21:01:35 2015
New Revision: 1656120

URL: http://svn.apache.org/r1656120
Log:
HIVE-9477: No error thrown when global limit optimization failed to find enough number of rows [Spark Branch] (Rui via Xuefu)

Modified:
    hive/branches/branch-1.1/   (props changed)
    hive/branches/branch-1.1/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java
    hive/branches/branch-1.1/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java

Propchange: hive/branches/branch-1.1/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Fri Jan 30 21:01:35 2015
@@ -1,6 +1,6 @@
 /hive/branches/branch-0.11:1480385,1480458,1481120,1481344,1481346,1481348,1481352,1483872,1505184
 /hive/branches/cbo:1605012-1627125
-/hive/branches/spark:1608589-1654414,1654553,1654869,1654873,1655427
+/hive/branches/spark:1608589-1654414,1654553,1654869,1654873,1655427,1655468
 /hive/branches/tez:1494760-1622766
 /hive/branches/vectorization:1466908-1527856
 /hive/trunk:1655202,1655210,1655213,1655436,1655460,1655894-1655895,1656114

Modified: hive/branches/branch-1.1/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-1.1/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java?rev=1656120&r1=1656119&r2=1656120&view=diff
==============================================================================
--- hive/branches/branch-1.1/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java (original)
+++ hive/branches/branch-1.1/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java Fri Jan 30 21:01:35 2015
@@ -233,6 +233,7 @@ public class SparkPlanGenerator {
       throw new IllegalArgumentException(msg, e);
     }
     if (work instanceof MapWork) {
+      cloned.setBoolean("mapred.task.is.map", true);
       List<Path> inputPaths = Utilities.getInputPaths(cloned, (MapWork) work,
           scratchDir, context, false);
       Utilities.setInputPaths(cloned, inputPaths);
@@ -250,6 +251,7 @@ public class SparkPlanGenerator {
       // remember the JobConf cloned for each MapWork, so we won't clone for it again
       workToJobConf.put(work, cloned);
     } else if (work instanceof ReduceWork) {
+      cloned.setBoolean("mapred.task.is.map", false);
       Utilities.setReduceWork(cloned, (ReduceWork) work, scratchDir, false);
       Utilities.createTmpDirs(cloned, (ReduceWork) work);
       cloned.set(Utilities.MAPRED_REDUCER_CLASS, ExecReducer.class.getName());

Modified: hive/branches/branch-1.1/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-1.1/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java?rev=1656120&r1=1656119&r2=1656120&view=diff
==============================================================================
--- hive/branches/branch-1.1/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java (original)
+++ hive/branches/branch-1.1/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java Fri Jan 30 21:01:35 2015
@@ -41,6 +41,7 @@ import org.apache.hadoop.hive.ql.exec.Ta
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
+import org.apache.hadoop.hive.ql.exec.spark.SparkTask;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -280,6 +281,10 @@ public abstract class TaskCompiler {
       for (ExecDriver tsk : mrTasks) {
         tsk.setRetryCmdWhenFail(true);
       }
+      List<SparkTask> sparkTasks = Utilities.getSparkTasks(rootTasks);
+      for (SparkTask sparkTask : sparkTasks) {
+        sparkTask.setRetryCmdWhenFail(true);
+      }
     }
 
     Interner<TableDesc> interner = Interners.newStrongInterner();