You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/10/06 05:35:53 UTC

svn commit: r1629560 - in /hive/branches/spark/ql/src: java/org/apache/hadoop/hive/ql/exec/ java/org/apache/hadoop/hive/ql/exec/mr/ java/org/apache/hadoop/hive/ql/exec/spark/ java/org/apache/hadoop/hive/ql/io/ test/org/apache/hadoop/hive/ql/exec/ test/...

Author: brock
Date: Mon Oct  6 03:35:53 2014
New Revision: 1629560

URL: http://svn.apache.org/r1629560
Log:
Revert HIVE-8331: HIVE-8303 followup, investigate result diff [Spark Branch] (Chao via Xuefu)

Modified:
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/IOContext.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveBinarySearchRecordReader.java
    hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_18.q.out

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java?rev=1629560&r1=1629559&r2=1629560&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java Mon Oct  6 03:35:53 2014
@@ -76,7 +76,7 @@ public class FilterOperator extends Oper
       statsMap.put(Counter.FILTERED, filtered_count);
       statsMap.put(Counter.PASSED, passed_count);
       conditionInspector = null;
-      ioContext = IOContext.get(hconf);
+      ioContext = IOContext.get(hconf.get(Utilities.INPUT_NAME));
     } catch (Throwable e) {
       throw new HiveException(e);
     }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java?rev=1629560&r1=1629559&r2=1629560&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java Mon Oct  6 03:35:53 2014
@@ -339,7 +339,7 @@ public class MapOperator extends Operato
   }
 
   public void setChildren(Configuration hconf) throws HiveException {
-    Path fpath = IOContext.get(hconf).getInputPath();
+    Path fpath = IOContext.get(hconf.get(Utilities.INPUT_NAME)).getInputPath();
 
     boolean schemeless = fpath.toUri().getScheme() == null;
 

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java?rev=1629560&r1=1629559&r2=1629560&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java Mon Oct  6 03:35:53 2014
@@ -63,7 +63,7 @@ public class ExecMapperContext {
 
   public ExecMapperContext(JobConf jc) {
     this.jc = jc;
-    ioCxt = IOContext.get(jc);
+    ioCxt = IOContext.get(jc.get(Utilities.INPUT_NAME));
   }
 
   public void clear() {

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java?rev=1629560&r1=1629559&r2=1629560&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java Mon Oct  6 03:35:53 2014
@@ -88,6 +88,7 @@ public class SparkMapRecordHandler exten
       }
       mo.setConf(mrwork);
       l4j.info("Main input name is " + mrwork.getName());
+      jc.set(Utilities.INPUT_NAME, mrwork.getName());
       // initialize map operator
       mo.setChildren(job);
       l4j.info(mo.dump(0));

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java?rev=1629560&r1=1629559&r2=1629560&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java Mon Oct  6 03:35:53 2014
@@ -197,6 +197,7 @@ public class SparkPlanGenerator {
     }
     if (work instanceof MapWork) {
       List<Path> inputPaths = Utilities.getInputPaths(cloned, (MapWork) work, scratchDir, context, false);
+      cloned.set(Utilities.INPUT_NAME, work.getName());
       Utilities.setInputPaths(cloned, inputPaths);
       Utilities.setMapWork(cloned, (MapWork) work, scratchDir, false);
       Utilities.createTmpDirs(cloned, (MapWork) work);

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java?rev=1629560&r1=1629559&r2=1629560&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java Mon Oct  6 03:35:53 2014
@@ -161,7 +161,7 @@ public abstract class HiveContextAwareRe
   }
 
   public IOContext getIOContext() {
-    return IOContext.get(jobConf);
+    return IOContext.get(jobConf.get(Utilities.INPUT_NAME));
   }
 
   private void initIOContext(long startPos, boolean isBlockPointer,

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/IOContext.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/IOContext.java?rev=1629560&r1=1629559&r2=1629560&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/IOContext.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/IOContext.java Mon Oct  6 03:35:53 2014
@@ -21,11 +21,7 @@ package org.apache.hadoop.hive.ql.io;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.mapred.JobConf;
 
 
 /**
@@ -53,17 +49,13 @@ public class IOContext {
     return inputNameIOContextMap;
   }
 
-  public static IOContext get(Configuration conf) {
-    if (HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("spark")) {
-      return get();
-    } else {
-      String inputName = conf.get(Utilities.INPUT_NAME);
-      if (inputNameIOContextMap.containsKey(inputName) == false) {
-        IOContext ioContext = new IOContext();
-        inputNameIOContextMap.put(inputName, ioContext);
-      }
-      return inputNameIOContextMap.get(inputName);
+  public static IOContext get(String inputName) {
+    if (inputNameIOContextMap.containsKey(inputName) == false) {
+      IOContext ioContext = new IOContext();
+      inputNameIOContextMap.put(inputName, ioContext);
     }
+
+    return inputNameIOContextMap.get(inputName);
   }
 
   public static void clear() {

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=1629560&r1=1629559&r2=1629560&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Mon Oct  6 03:35:53 2014
@@ -331,7 +331,7 @@ public class TestOperators extends TestC
       Configuration hconf = new JobConf(TestOperators.class);
       HiveConf.setVar(hconf, HiveConf.ConfVars.HADOOPMAPFILENAME,
           "hdfs:///testDir/testFile");
-      IOContext.get(hconf).setInputPath(
+      IOContext.get(hconf.get(Utilities.INPUT_NAME)).setInputPath(
           new Path("hdfs:///testDir/testFile"));
 
       // initialize pathToAliases

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveBinarySearchRecordReader.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveBinarySearchRecordReader.java?rev=1629560&r1=1629559&r2=1629560&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveBinarySearchRecordReader.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveBinarySearchRecordReader.java Mon Oct  6 03:35:53 2014
@@ -116,7 +116,7 @@ public class TestHiveBinarySearchRecordR
 
   private void resetIOContext() {
     conf.set(Utilities.INPUT_NAME, "TestHiveBinarySearchRecordReader");
-    ioContext = IOContext.get(conf);
+    ioContext = IOContext.get(conf.get(Utilities.INPUT_NAME));
     ioContext.setUseSorted(false);
     ioContext.setIsBinarySearching(false);
     ioContext.setEndBinarySearch(false);

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_18.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_18.q.out?rev=1629560&r1=1629559&r2=1629560&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_18.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_18.q.out Mon Oct  6 03:35:53 2014
@@ -306,15 +306,15 @@ POSTHOOK: Input: default@outputtbl1@ds=1
 POSTHOOK: Input: default@outputtbl1@ds=18
 POSTHOOK: Input: default@outputtbl1@ds=28
 #### A masked pattern was here ####
-1	1	11
-1	1	11
-2	1	12
-2	1	12
+1	1	13
+1	1	13
+2	1	13
+2	1	13
 3	1	13
 3	1	13
-7	1	17
-7	1	17
-8	1	18
-8	1	18
+7	1	13
+7	1	13
+8	1	28
+8	1	28
 8	1	28
 8	1	28