You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/07/09 21:22:12 UTC

svn commit: r1501476 [1/2] - in /hive/trunk: cli/src/java/org/apache/hadoop/hive/cli/ contrib/src/test/results/clientnegative/ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/exec...

Author: hashutosh
Date: Tue Jul  9 19:22:10 2013
New Revision: 1501476

URL: http://svn.apache.org/r1501476
Log:
HIVE-4810 [jira] Refactor exec package
(Gunther Hagleitner via Ashutosh Chauhan)

Summary:
HIVE-4810

The exec package contains both operators and classes used to execute the job. Moving the latter into a sub package makes the package slightly more manageable and will make it easier to provide a tez-based implementation.

Test Plan: Refactoring

Reviewers: ashutoshc

Reviewed By: ashutoshc

Differential Revision: https://reviews.facebook.net/D11625

Added:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
      - copied, changed from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java
      - copied, changed from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java
      - copied, changed from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapperContext.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java
      - copied, changed from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
      - copied, changed from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHook.java
      - copied, changed from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHook.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobDebugger.java
      - copied, changed from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobTrackerURLResolver.java
      - copied, changed from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobTrackerURLResolver.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
      - copied, changed from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
      - copied, changed from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java
      - copied, changed from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Throttle.java
Removed:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapperContext.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHook.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobTrackerURLResolver.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Throttle.java
Modified:
    hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
    hive/trunk/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputSplit.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveRecordReader.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/BlockMergeTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingInferenceOptimizer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SamplingOptimizer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java
    hive/trunk/ql/src/test/results/clientnegative/autolocal1.q.out
    hive/trunk/ql/src/test/results/clientnegative/cachingprintstream.q.out
    hive/trunk/ql/src/test/results/clientnegative/cluster_tasklog_retrieval.q.out
    hive/trunk/ql/src/test/results/clientnegative/dyn_part3.q.out
    hive/trunk/ql/src/test/results/clientnegative/dyn_part_max_per_node.q.out
    hive/trunk/ql/src/test/results/clientnegative/fatal.q.out
    hive/trunk/ql/src/test/results/clientnegative/index_compact_entry_limit.q.out
    hive/trunk/ql/src/test/results/clientnegative/index_compact_size_limit.q.out
    hive/trunk/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out
    hive/trunk/ql/src/test/results/clientnegative/mapreduce_stack_trace.q.out
    hive/trunk/ql/src/test/results/clientnegative/mapreduce_stack_trace_hadoop20.q.out
    hive/trunk/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff.q.out
    hive/trunk/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff_hadoop20.q.out
    hive/trunk/ql/src/test/results/clientnegative/minimr_broken_pipe.q.out
    hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe1.q.out
    hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe2.q.out
    hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe3.q.out
    hive/trunk/ql/src/test/results/clientnegative/script_error.q.out
    hive/trunk/ql/src/test/results/clientnegative/serde_regex2.q.out
    hive/trunk/ql/src/test/results/clientnegative/stats_aggregator_error_2.q.out
    hive/trunk/ql/src/test/results/clientnegative/stats_publisher_error_1.q.out
    hive/trunk/ql/src/test/results/clientnegative/stats_publisher_error_2.q.out
    hive/trunk/ql/src/test/results/clientnegative/udf_assert_true.q.out
    hive/trunk/ql/src/test/results/clientnegative/udf_assert_true2.q.out
    hive/trunk/ql/src/test/results/clientnegative/udf_reflect_neg.q.out
    hive/trunk/ql/src/test/results/clientnegative/udf_test_error.q.out
    hive/trunk/ql/src/test/results/clientnegative/udf_test_error_reduce.q.out
    hive/trunk/ql/src/test/results/clientnegative/udfnull.q.out
    hive/trunk/ql/src/test/results/clientpositive/auto_join25.q.out
    hive/trunk/ql/src/test/results/clientpositive/infer_bucket_sort_convert_join.q.out
    hive/trunk/ql/src/test/results/clientpositive/loadpart_err.q.out
    hive/trunk/ql/src/test/results/clientpositive/mapjoin_hook.q.out
    hive/trunk/ql/src/test/results/compiler/plan/case_sensitivity.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/cast1.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/groupby4.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/groupby5.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/groupby6.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/input1.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/input2.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/input20.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/input3.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/input5.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/input6.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/input7.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/input8.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/input9.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/input_part1.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/input_testxpath.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/input_testxpath2.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/join1.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/join2.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/join3.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/join4.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/join5.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/join6.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/join7.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/sample1.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/sample2.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/sample3.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/sample4.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/sample5.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/sample6.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/sample7.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/subq.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/udf1.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/udf4.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/udf6.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/udf_case.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/udf_when.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/union.q.xml

Modified: hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
URL: http://svn.apache.org/viewvc/hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (original)
+++ hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java Tue Jul  9 19:22:10 2013
@@ -52,9 +52,9 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
-import org.apache.hadoop.hive.ql.exec.HadoopJobExecHelper;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.Utilities.StreamPrinter;
+import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.VariableSubstitution;
 import org.apache.hadoop.hive.ql.processors.CommandProcessor;

Modified: hive/trunk/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out Tue Jul  9 19:22:10 2013
@@ -25,4 +25,4 @@ Task ID:
 Logs:
 
 #### A masked pattern was here ####
-FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask
+FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Tue Jul  9 19:22:10 2013
@@ -46,7 +46,6 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Schema;
 import org.apache.hadoop.hive.ql.exec.ConditionalTask;
-import org.apache.hadoop.hive.ql.exec.ExecDriver;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
@@ -198,7 +197,7 @@ public class Driver implements CommandPr
   public ClusterStatus getClusterStatus() throws Exception {
     ClusterStatus cs;
     try {
-      JobConf job = new JobConf(conf, ExecDriver.class);
+      JobConf job = new JobConf(conf);
       JobClient jc = new JobClient(job);
       cs = jc.getClusterStatus();
     } catch (Exception e) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java Tue Jul  9 19:22:10 2013
@@ -38,10 +38,10 @@ import java.util.concurrent.ConcurrentHa
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.ConditionalTask;
-import org.apache.hadoop.hive.ql.exec.ExecDriver;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
 import org.apache.hadoop.hive.ql.hooks.LineageInfo;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java Tue Jul  9 19:22:10 2013
@@ -74,7 +74,7 @@ public class ColumnStatsTask extends Tas
     super.initialize(conf, queryPlan, ctx);
     work.initializeForFetch();
     try {
-      JobConf job = new JobConf(conf, ExecDriver.class);
+      JobConf job = new JobConf(conf);
       ftOp = new FetchOperator(work.getfWork(), job);
     } catch (Exception e) {
       LOG.error(StringUtils.stringifyException(e));

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java Tue Jul  9 19:22:10 2013
@@ -35,6 +35,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext;
 import org.apache.hadoop.hive.ql.io.HiveContextAwareRecordReader;
 import org.apache.hadoop.hive.ql.io.HiveInputFormat;
 import org.apache.hadoop.hive.ql.io.HiveRecordReader;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java Tue Jul  9 19:22:10 2013
@@ -65,7 +65,7 @@ public class FetchTask extends Task<Fetc
 
     try {
       // Create a file system handle
-      JobConf job = new JobConf(conf, ExecDriver.class);
+      JobConf job = new JobConf(conf);
 
       Operator<?> source = work.getSource();
       if (source instanceof TableScanOperator) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java Tue Jul  9 19:22:10 2013
@@ -338,7 +338,7 @@ public class FileSinkOperator extends Te
         jc = (JobConf) hconf;
       } else {
         // test code path
-        jc = new JobConf(hconf, ExecDriver.class);
+        jc = new JobConf(hconf);
       }
 
       if (multiFileSpray) {
@@ -808,7 +808,7 @@ public class FileSinkOperator extends Te
   private String lsDir() {
     String specPath = conf.getDirName();
     // need to get a JobConf here because it's not passed through at client side
-    JobConf jobConf = new JobConf(ExecDriver.class);
+    JobConf jobConf = new JobConf();
     Path tmpPath = Utilities.toTempPath(specPath);
     StringBuilder sb = new StringBuilder("\n");
     try {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java Tue Jul  9 19:22:10 2013
@@ -33,6 +33,7 @@ import java.util.Set;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
 import org.apache.hadoop.hive.ql.plan.MapredWork;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java Tue Jul  9 19:22:10 2013
@@ -40,17 +40,17 @@ public class MapredContext {
   private static final Log logger = LogFactory.getLog("MapredContext");
   private static final ThreadLocal<MapredContext> contexts = new ThreadLocal<MapredContext>();
 
-  static MapredContext get() {
+  public static MapredContext get() {
     return contexts.get();
   }
 
-  static MapredContext init(boolean isMap, JobConf jobConf) {
+  public static MapredContext init(boolean isMap, JobConf jobConf) {
     MapredContext context = new MapredContext(isMap, jobConf);
     contexts.set(context);
     return context;
   }
 
-  static void close() {
+  public static void close() {
     MapredContext context = contexts.get();
     if (context != null) {
       context.closeAll();
@@ -91,7 +91,7 @@ public class MapredContext {
     return jobConf;
   }
 
-  void setReporter(Reporter reporter) {
+  public void setReporter(Reporter reporter) {
     this.reporter = reporter;
   }
 
@@ -139,8 +139,8 @@ public class MapredContext {
     try {
       Method initMethod = func.getClass().getMethod("configure", MapredContext.class);
       return initMethod.getDeclaringClass() != GenericUDF.class &&
-          initMethod.getDeclaringClass() != GenericUDAFEvaluator.class &&
-          initMethod.getDeclaringClass() != GenericUDTF.class;
+        initMethod.getDeclaringClass() != GenericUDAFEvaluator.class &&
+        initMethod.getDeclaringClass() != GenericUDTF.class;
     } catch (Exception e) {
       return false;
     }
@@ -150,7 +150,7 @@ public class MapredContext {
     try {
       Method closeMethod = func.getClass().getMethod("close");
       return closeMethod.getDeclaringClass() != GenericUDF.class &&
-          closeMethod.getDeclaringClass() != GenericUDAFEvaluator.class;
+        closeMethod.getDeclaringClass() != GenericUDAFEvaluator.class;
     } catch (Exception e) {
       return false;
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java Tue Jul  9 19:22:10 2013
@@ -40,6 +40,8 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.Order;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
+import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
+import org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask;
 import org.apache.hadoop.hive.ql.hooks.LineageInfo.DataContainer;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java Tue Jul  9 19:22:10 2013
@@ -31,6 +31,7 @@ import java.util.Set;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext;
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java Tue Jul  9 19:22:10 2013
@@ -200,7 +200,7 @@ public class TableScanOperator extends O
       jc = (JobConf) hconf;
     } else {
       // test code path
-      jc = new JobConf(hconf, ExecDriver.class);
+      jc = new JobConf(hconf);
     }
 
     currentStat = null;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java Tue Jul  9 19:22:10 2013
@@ -31,7 +31,6 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
 import org.apache.hadoop.hive.ql.lib.Node;
@@ -50,6 +49,8 @@ import org.apache.hadoop.util.StringUtil
 public abstract class Task<T extends Serializable> implements Serializable, Node {
 
   private static final long serialVersionUID = 1L;
+  public transient HashMap<String, Long> taskCounters;
+  public transient TaskHandle taskHandle;
   protected transient boolean started;
   protected transient boolean initialized;
   protected transient boolean isdone;
@@ -58,8 +59,6 @@ public abstract class Task<T extends Ser
   protected transient Hive db;
   protected transient LogHelper console;
   protected transient QueryPlan queryPlan;
-  protected transient TaskHandle taskHandle;
-  protected transient HashMap<String, Long> taskCounters;
   protected transient DriverContext driverContext;
   protected transient boolean clonedConf = false;
   protected transient String jobID;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java Tue Jul  9 19:22:10 2013
@@ -23,6 +23,8 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
+import org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask;
 import org.apache.hadoop.hive.ql.io.rcfile.merge.BlockMergeTask;
 import org.apache.hadoop.hive.ql.io.rcfile.merge.MergeWork;
 import org.apache.hadoop.hive.ql.io.rcfile.stats.PartialScanTask;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Tue Jul  9 19:22:10 2013
@@ -97,6 +97,8 @@ import org.apache.hadoop.hive.ql.Context
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryPlan;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
+import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
+import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
 import org.apache.hadoop.hive.ql.io.ContentSummaryInputFormat;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
@@ -1404,7 +1406,7 @@ public final class Utilities {
       jc = new JobConf(hconf);
     } else {
       // test code path
-      jc = new JobConf(hconf, ExecDriver.class);
+      jc = new JobConf(hconf);
     }
     HiveOutputFormat<?, ?> hiveOutputFormat = null;
     Class<? extends Writable> outputClass = null;

Copied: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java (from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java)
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java?p2=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java&p1=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java&r1=1500997&r2=1501476&rev=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java Tue Jul  9 19:22:10 2013
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.exec;
+package org.apache.hadoop.hive.ql.exec.mr;
 
 import java.io.File;
 import java.io.IOException;
@@ -54,6 +54,15 @@ import org.apache.hadoop.hive.ql.Context
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.exec.FetchOperator;
+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
+import org.apache.hadoop.hive.ql.exec.HiveTotalOrderPartitioner;
+import org.apache.hadoop.hive.ql.exec.JobCloseFeedBack;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.PartitionKeySampler;
+import org.apache.hadoop.hive.ql.exec.TableScanOperator;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
 import org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat;
 import org.apache.hadoop.hive.ql.io.HiveKey;
@@ -93,7 +102,12 @@ import org.apache.log4j.LogManager;
 import org.apache.log4j.varia.NullAppender;
 
 /**
- * ExecDriver.
+ * ExecDriver is the central class in co-ordinating execution of any map-reduce task. 
+ * It's main responsabilities are:
+ *
+ * - Converting the plan (MapredWork) into a MR Job (JobConf)
+ * - Submitting a MR job to the cluster via JobClient and ExecHelper
+ * - Executing MR job in local execution mode (where applicable)
  *
  */
 public class ExecDriver extends Task<MapredWork> implements Serializable, HadoopJobExecHook {

Copied: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java (from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java)
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java?p2=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java&p1=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java&r1=1500997&r2=1501476&rev=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java Tue Jul  9 19:22:10 2013
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.exec;
+package org.apache.hadoop.hive.ql.exec.mr;
 
 import java.io.IOException;
 import java.lang.management.ManagementFactory;
@@ -28,6 +28,11 @@ import java.util.Map;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.FetchOperator;
+import org.apache.hadoop.hive.ql.exec.MapOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.MapredContext;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
 import org.apache.hadoop.hive.ql.plan.MapredWork;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
@@ -38,8 +43,16 @@ import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.util.StringUtils;
+
 /**
- * ExecMapper.
+ * ExecMapper is the generic Map class for Hive. Together with ExecReducer it is 
+ * the bridge between the map-reduce framework and the Hive operator pipeline at
+ * execution time. It's main responsabilities are:
+ * 
+ * - Load and setup the operator pipeline from XML
+ * - Run the pipeline by transforming key value pairs to records and forwarding them to the operators
+ * - Stop execution when the "limit" is reached
+ * - Catch and handle errors during execution of the operators.
  *
  */
 public class ExecMapper extends MapReduceBase implements Mapper {
@@ -50,7 +63,7 @@ public class ExecMapper extends MapReduc
   private JobConf jc;
   private boolean abort = false;
   private Reporter rp;
-  public static final Log l4j = LogFactory.getLog("ExecMapper");
+  public static final Log l4j = LogFactory.getLog(ExecMapper.class);
   private static boolean done;
 
   // used to log memory usage periodically

Copied: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java (from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapperContext.java)
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java?p2=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java&p1=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapperContext.java&r1=1500997&r2=1501476&rev=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapperContext.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java Tue Jul  9 19:22:10 2013
@@ -15,11 +15,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hive.ql.exec;
+package org.apache.hadoop.hive.ql.exec.mr;
 
 import java.util.Map;
 
 import org.apache.commons.logging.Log;
+import org.apache.hadoop.hive.ql.exec.FetchOperator;
 import org.apache.hadoop.hive.ql.io.IOContext;
 import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
 import org.apache.hadoop.mapred.JobConf;

Copied: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java (from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java)
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java?p2=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java&p1=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java&r1=1500997&r2=1501476&rev=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java Tue Jul  9 19:22:10 2013
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.exec;
+package org.apache.hadoop.hive.ql.exec.mr;
 
 import java.io.IOException;
 import java.lang.management.ManagementFactory;
@@ -25,11 +25,13 @@ import java.net.URLClassLoader;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Iterator;
-import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.ExecMapper.reportStats;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.MapredContext;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.mr.ExecMapper.reportStats;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.MapredWork;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
@@ -51,7 +53,14 @@ import org.apache.hadoop.util.Reflection
 import org.apache.hadoop.util.StringUtils;
 
 /**
- * ExecReducer.
+ * ExecReducer is the generic Reducer class for Hive. Together with ExecMapper it is 
+ * the bridge between the map-reduce framework and the Hive operator pipeline at
+ * execution time. It's main responsabilities are:
+ * 
+ * - Load and setup the operator pipeline from XML
+ * - Run the pipeline by transforming key, value pairs to records and forwarding them to the operators
+ * - Sending start and end group messages to separate records with same key from one another
+ * - Catch and handle errors during execution of the operators.
  *
  */
 public class ExecReducer extends MapReduceBase implements Reducer {

Copied: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java (from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java)
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java?p2=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java&p1=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java&r1=1500997&r2=1501476&rev=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java Tue Jul  9 19:22:10 2013
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.exec;
+package org.apache.hadoop.hive.ql.exec.mr;
 
 import java.io.IOException;
 import java.io.Serializable;
@@ -35,6 +35,9 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.MapRedStats;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskHandle;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.Operator.ProgressCounter;
 import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
 import org.apache.hadoop.hive.ql.plan.ReducerTimeStatsPerJob;

Copied: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHook.java (from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHook.java)
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHook.java?p2=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHook.java&p1=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHook.java&r1=1500997&r2=1501476&rev=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHook.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHook.java Tue Jul  9 19:22:10 2013
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.exec;
+package org.apache.hadoop.hive.ql.exec.mr;
 
 import java.io.IOException;
 

Copied: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobDebugger.java (from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java)
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobDebugger.java?p2=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobDebugger.java&p1=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java&r1=1500997&r2=1501476&rev=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobDebugger.java Tue Jul  9 19:22:10 2013
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.exec;
+package org.apache.hadoop.hive.ql.exec.mr;
 
 import java.io.IOException;
 import java.lang.Exception;

Copied: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobTrackerURLResolver.java (from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobTrackerURLResolver.java)
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobTrackerURLResolver.java?p2=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobTrackerURLResolver.java&p1=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobTrackerURLResolver.java&r1=1500997&r2=1501476&rev=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobTrackerURLResolver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobTrackerURLResolver.java Tue Jul  9 19:22:10 2013
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.exec;
+package org.apache.hadoop.hive.ql.exec.mr;
 
 import java.io.IOException;
 import java.net.InetSocketAddress;

Copied: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java (from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java)
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java?p2=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java&p1=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java&r1=1500997&r2=1501476&rev=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java Tue Jul  9 19:22:10 2013
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.exec;
+package org.apache.hadoop.hive.ql.exec.mr;
 
 import java.io.File;
 import java.io.IOException;
@@ -36,6 +36,8 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.Utilities.StreamPrinter;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.MapredWork;

Copied: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java (from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java)
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java?p2=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java&p1=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java&r1=1500997&r2=1501476&rev=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java Tue Jul  9 19:22:10 2013
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hive.ql.exec;
+package org.apache.hadoop.hive.ql.exec.mr;
 
 import java.io.File;
 import java.io.IOException;
@@ -43,6 +43,13 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.exec.BucketMatcher;
+import org.apache.hadoop.hive.ql.exec.FetchOperator;
+import org.apache.hadoop.hive.ql.exec.HashTableSinkOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.TableScanOperator;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.Utilities.StreamPrinter;
 import org.apache.hadoop.hive.ql.exec.persistence.AbstractMapJoinKey;
 import org.apache.hadoop.hive.ql.exec.persistence.HashMapWrapper;
@@ -63,6 +70,15 @@ import org.apache.hadoop.hive.shims.Shim
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.util.ReflectionUtils;
 
+
+/**
+ * MapredLocalTask represents any local work (i.e.: client side work) that hive needs to
+ * execute. E.g.: This is used for generating Hashtables for Mapjoins on the client
+ * before the Join is executed on the cluster.
+ * 
+ * MapRedLocalTask does not actually execute the work in process, but rather generates 
+ * a command using ExecDriver. ExecDriver is what will finally drive processing the records.
+ */
 public class MapredLocalTask extends Task<MapredLocalWork> implements Serializable {
 
   private Map<String, FetchOperator> fetchOperators;
@@ -202,7 +218,7 @@ public class MapredLocalTask extends Tas
       // This will be used by hadoop only in unsecure(/non kerberos) mode
       HadoopShims shim = ShimLoader.getHadoopShims();
       String endUserName = shim.getShortUserName(shim.getUGIForConf(job));
-      console.printInfo("setting HADOOP_USER_NAME\t" + endUserName);
+      LOG.debug("setting HADOOP_USER_NAME\t" + endUserName);
       variables.put("HADOOP_USER_NAME", endUserName);
 
       if (variables.containsKey(HADOOP_OPTS_KEY)) {

Copied: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java (from r1500997, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Throttle.java)
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java?p2=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java&p1=hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Throttle.java&r1=1500997&r2=1501476&rev=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Throttle.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java Tue Jul  9 19:22:10 2013
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.exec;
+package org.apache.hadoop.hive.ql.exec.mr;
 
 import java.io.IOException;
 import java.io.InputStream;
@@ -76,18 +76,18 @@ public final class Throttle {
 
         // fetch the xml tag <dogc>xxx</dogc>
         Pattern dowait = Pattern.compile("<dogc>", Pattern.CASE_INSENSITIVE
-            | Pattern.DOTALL | Pattern.MULTILINE);
+                                         | Pattern.DOTALL | Pattern.MULTILINE);
         String[] results = dowait.split(fetchString);
         if (results.length != 2) {
           throw new IOException("Throttle: Unable to parse response of URL "
-              + url + ". Get retuned " + fetchString);
+                                + url + ". Get retuned " + fetchString);
         }
         dowait = Pattern.compile("</dogc>", Pattern.CASE_INSENSITIVE
-            | Pattern.DOTALL | Pattern.MULTILINE);
+                                 | Pattern.DOTALL | Pattern.MULTILINE);
         results = dowait.split(results[1]);
         if (results.length < 1) {
           throw new IOException("Throttle: Unable to parse response of URL "
-              + url + ". Get retuned " + fetchString);
+                                + url + ". Get retuned " + fetchString);
         }
 
         // if the jobtracker signalled that the threshold is not exceeded,
@@ -99,7 +99,7 @@ public final class Throttle {
         // The JobTracker has exceeded its threshold and is doing a GC.
         // The client has to wait and retry.
         LOG.warn("Job is being throttled because of resource crunch on the "
-            + "JobTracker. Will retry in " + retry + " seconds..");
+                 + "JobTracker. Will retry in " + retry + " seconds..");
         Thread.sleep(retry * 1000L);
       }
     } catch (Exception e) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputSplit.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputSplit.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputSplit.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputSplit.java Tue Jul  9 19:22:10 2013
@@ -37,7 +37,6 @@ import org.apache.hadoop.conf.Configurab
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.hive.ql.exec.ExecMapper;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.Utilities;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java Tue Jul  9 19:22:10 2013
@@ -21,7 +21,7 @@ package org.apache.hadoop.hive.ql.io;
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.exec.ExecMapper;
+import org.apache.hadoop.hive.ql.exec.mr.ExecMapper;
 import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat.CombineHiveInputSplit;
 import org.apache.hadoop.hive.shims.HadoopShims.InputSplitShim;
 import org.apache.hadoop.io.Writable;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveRecordReader.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveRecordReader.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveRecordReader.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveRecordReader.java Tue Jul  9 19:22:10 2013
@@ -20,7 +20,7 @@ package org.apache.hadoop.hive.ql.io;
 
 import java.io.IOException;
 
-import org.apache.hadoop.hive.ql.exec.ExecMapper;
+import org.apache.hadoop.hive.ql.exec.mr.ExecMapper;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.JobConf;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/BlockMergeTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/BlockMergeTask.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/BlockMergeTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/BlockMergeTask.java Tue Jul  9 19:22:10 2013
@@ -35,11 +35,11 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
-import org.apache.hadoop.hive.ql.exec.HadoopJobExecHelper;
-import org.apache.hadoop.hive.ql.exec.HadoopJobExecHook;
 import org.apache.hadoop.hive.ql.exec.Task;
-import org.apache.hadoop.hive.ql.exec.Throttle;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper;
+import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHook;
+import org.apache.hadoop.hive.ql.exec.mr.Throttle;
 import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormatImpl;
 import org.apache.hadoop.hive.ql.plan.api.StageType;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java Tue Jul  9 19:22:10 2013
@@ -36,11 +36,11 @@ import org.apache.hadoop.hive.ql.Context
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryPlan;
-import org.apache.hadoop.hive.ql.exec.HadoopJobExecHelper;
-import org.apache.hadoop.hive.ql.exec.HadoopJobExecHook;
 import org.apache.hadoop.hive.ql.exec.Task;
-import org.apache.hadoop.hive.ql.exec.Throttle;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper;
+import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHook;
+import org.apache.hadoop.hive.ql.exec.mr.Throttle;
 import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormatImpl;
 import org.apache.hadoop.hive.ql.metadata.HiveException;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java Tue Jul  9 19:22:10 2013
@@ -28,11 +28,11 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
-import org.apache.hadoop.hive.ql.exec.HadoopJobExecHelper;
-import org.apache.hadoop.hive.ql.exec.HadoopJobExecHook;
 import org.apache.hadoop.hive.ql.exec.Task;
-import org.apache.hadoop.hive.ql.exec.Throttle;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper;
+import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHook;
+import org.apache.hadoop.hive.ql.exec.mr.Throttle;
 import org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormatImpl;
 import org.apache.hadoop.hive.ql.plan.api.StageType;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java Tue Jul  9 19:22:10 2013
@@ -33,7 +33,6 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.exec.ConditionalTask;
-import org.apache.hadoop.hive.ql.exec.ExecDriver;
 import org.apache.hadoop.hive.ql.exec.JoinOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.OperatorFactory;
@@ -43,6 +42,7 @@ import org.apache.hadoop.hive.ql.exec.Ta
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.UnionOperator;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java Tue Jul  9 19:22:10 2013
@@ -27,9 +27,9 @@ import java.util.Stack;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.exec.ConditionalTask;
-import org.apache.hadoop.hive.ql.exec.MapRedTask;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.lib.TaskGraphWalker.TaskGraphWalkerContext;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingInferenceOptimizer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingInferenceOptimizer.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingInferenceOptimizer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingInferenceOptimizer.java Tue Jul  9 19:22:10 2013
@@ -23,7 +23,6 @@ import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.hadoop.hive.ql.exec.ExecDriver;
 import org.apache.hadoop.hive.ql.exec.ExtractOperator;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
 import org.apache.hadoop.hive.ql.exec.FilterOperator;
@@ -36,6 +35,7 @@ import org.apache.hadoop.hive.ql.exec.Li
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.SelectOperator;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
 import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
 import org.apache.hadoop.hive.ql.lib.GraphWalker;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java Tue Jul  9 19:22:10 2013
@@ -34,12 +34,12 @@ import org.apache.hadoop.hive.ql.Context
 import org.apache.hadoop.hive.ql.exec.ConditionalTask;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
 import org.apache.hadoop.hive.ql.exec.JoinOperator;
-import org.apache.hadoop.hive.ql.exec.MapRedTask;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
 import org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils;
 import org.apache.hadoop.hive.ql.optimizer.MapJoinProcessor;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java Tue Jul  9 19:22:10 2013
@@ -28,12 +28,12 @@ import java.util.Stack;
 
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.exec.ConditionalTask;
-import org.apache.hadoop.hive.ql.exec.MapredLocalTask;
 import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask;
 import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
 import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SamplingOptimizer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SamplingOptimizer.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SamplingOptimizer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SamplingOptimizer.java Tue Jul  9 19:22:10 2013
@@ -20,12 +20,12 @@
 
 package org.apache.hadoop.hive.ql.optimizer.physical;
 
-import org.apache.hadoop.hive.ql.exec.MapRedTask;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.OperatorUtils;
 import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.MapredWork;
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java Tue Jul  9 19:22:10 2013
@@ -35,13 +35,13 @@ import org.apache.hadoop.hive.ql.exec.Co
 import org.apache.hadoop.hive.ql.exec.DummyStoreOperator;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
 import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
-import org.apache.hadoop.hive.ql.exec.MapRedTask;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
 import org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
 import org.apache.hadoop.hive.ql.optimizer.MapJoinProcessor;
 import org.apache.hadoop.hive.ql.parse.OpParseContext;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java Tue Jul  9 19:22:10 2013
@@ -31,10 +31,10 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.hive.metastore.api.Index;
-import org.apache.hadoop.hive.ql.exec.MapRedTask;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.index.HiveIndexHandler;
 import org.apache.hadoop.hive.ql.index.HiveIndexQueryContext;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java Tue Jul  9 19:22:10 2013
@@ -40,7 +40,6 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.hive.metastore.api.Index;
 import org.apache.hadoop.hive.ql.exec.FilterOperator;
-import org.apache.hadoop.hive.ql.exec.MapRedTask;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java Tue Jul  9 19:22:10 2013
@@ -50,8 +50,8 @@ import org.apache.hadoop.hive.ql.exec.Ta
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.UnionOperator;
 import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.exec.ExecDriver;
-import org.apache.hadoop.hive.ql.exec.MapRedTask;
+import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
+import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Tue Jul  9 19:22:10 2013
@@ -53,13 +53,11 @@ import org.apache.hadoop.hive.ql.QueryPr
 import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator;
 import org.apache.hadoop.hive.ql.exec.ArchiveUtils;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
-import org.apache.hadoop.hive.ql.exec.ExecDriver;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
 import org.apache.hadoop.hive.ql.exec.FunctionInfo;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.GroupByOperator;
 import org.apache.hadoop.hive.ql.exec.JoinOperator;
-import org.apache.hadoop.hive.ql.exec.MapRedTask;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.OperatorFactory;
 import org.apache.hadoop.hive.ql.exec.RecordReader;
@@ -74,6 +72,8 @@ import org.apache.hadoop.hive.ql.exec.UD
 import org.apache.hadoop.hive.ql.exec.UnionOperator;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.WindowFunctionInfo;
+import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
+import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat;

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Tue Jul  9 19:22:10 2013
@@ -31,6 +31,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.DriverContext;
+import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
+import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
 import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.Table;

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java Tue Jul  9 19:22:10 2013
@@ -35,9 +35,9 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.QueryPlan;
-import org.apache.hadoop.hive.ql.exec.ExecDriver;
-import org.apache.hadoop.hive.ql.exec.MapRedTask;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
+import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java Tue Jul  9 19:22:10 2013
@@ -38,8 +38,7 @@ public class DummyContextUDF extends Gen
 
   public Object evaluate(DeferredObject[] arguments) throws HiveException {
     Reporter reporter = context.getReporter();
-    Counters.Counter counter = reporter.getCounter(
-        "org.apache.hadoop.mapred.Task$Counter", "MAP_INPUT_RECORDS");
+    Counters.Counter counter = reporter.getCounter("org.apache.hadoop.mapred.Task$Counter", "MAP_INPUT_RECORDS");
     result.set(counter.getValue());
     return result;
   }
@@ -49,7 +48,7 @@ public class DummyContextUDF extends Gen
   }
 
   @Override
-  public void configure(MapredContext context) {
+    public void configure(MapredContext context) {
     this.context = context;
   }
 }

Modified: hive/trunk/ql/src/test/results/clientnegative/autolocal1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/autolocal1.q.out?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/autolocal1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/autolocal1.q.out Tue Jul  9 19:22:10 2013
@@ -12,4 +12,5 @@ SELECT key FROM src
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.MapRedTask
+Job Submission failed with exception 'java.lang.RuntimeException(Not a host:port pair: abracadabra)'
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask

Modified: hive/trunk/ql/src/test/results/clientnegative/cachingprintstream.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/cachingprintstream.q.out?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/cachingprintstream.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/cachingprintstream.q.out Tue Jul  9 19:22:10 2013
@@ -34,4 +34,4 @@ Logs:
 
 #### A masked pattern was here ####
 End cached logs.
-FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask
+FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask

Modified: hive/trunk/ql/src/test/results/clientnegative/cluster_tasklog_retrieval.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/cluster_tasklog_retrieval.q.out?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/cluster_tasklog_retrieval.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/cluster_tasklog_retrieval.q.out Tue Jul  9 19:22:10 2013
@@ -11,4 +11,4 @@ SELECT evaluate_npe(src.key) LIMIT 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask
+FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask

Modified: hive/trunk/ql/src/test/results/clientnegative/dyn_part3.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/dyn_part3.q.out?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/dyn_part3.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/dyn_part3.q.out Tue Jul  9 19:22:10 2013
@@ -17,4 +17,4 @@ Task ID:
 Logs:
 
 #### A masked pattern was here ####
-FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask
+FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask

Modified: hive/trunk/ql/src/test/results/clientnegative/dyn_part_max_per_node.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/dyn_part_max_per_node.q.out?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/dyn_part_max_per_node.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/dyn_part_max_per_node.q.out Tue Jul  9 19:22:10 2013
@@ -28,4 +28,4 @@ Task ID:
 Logs:
 
 #### A masked pattern was here ####
-FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask
+FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask

Modified: hive/trunk/ql/src/test/results/clientnegative/fatal.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/fatal.q.out?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/fatal.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/fatal.q.out Tue Jul  9 19:22:10 2013
@@ -2,4 +2,4 @@ PREHOOK: query: select /*+ mapjoin(b) */
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-02_13-41-52_752_1156521578782717030/-mr-10000
-FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask
+FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask

Modified: hive/trunk/ql/src/test/results/clientnegative/index_compact_entry_limit.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/index_compact_entry_limit.q.out?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/index_compact_entry_limit.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/index_compact_entry_limit.q.out Tue Jul  9 19:22:10 2013
@@ -42,4 +42,4 @@ Task ID:
 Logs:
 
 #### A masked pattern was here ####
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.MapRedTask
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask

Modified: hive/trunk/ql/src/test/results/clientnegative/index_compact_size_limit.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/index_compact_size_limit.q.out?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/index_compact_size_limit.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/index_compact_size_limit.q.out Tue Jul  9 19:22:10 2013
@@ -42,4 +42,4 @@ Task ID:
 Logs:
 
 #### A masked pattern was here ####
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.MapRedTask
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask

Modified: hive/trunk/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out Tue Jul  9 19:22:10 2013
@@ -20,4 +20,4 @@ org.apache.hadoop.hive.ql.metadata.HiveE
 org.apache.hadoop.hive.ql.metadata.HiveException: [Error 20003]: An error occurred when trying to close the Operator running your custom script.
 #### A masked pattern was here ####
 Error during job, obtaining debugging information...
-FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask
+FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask

Modified: hive/trunk/ql/src/test/results/clientnegative/mapreduce_stack_trace.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/mapreduce_stack_trace.q.out?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/mapreduce_stack_trace.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/mapreduce_stack_trace.q.out Tue Jul  9 19:22:10 2013
@@ -2,4 +2,4 @@ PREHOOK: query: FROM src SELECT TRANSFOR
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-FAILED: Execution Error, return code 20000 from org.apache.hadoop.hive.ql.exec.MapRedTask. Unable to initialize custom script.
+FAILED: Execution Error, return code 20000 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask. Unable to initialize custom script.

Modified: hive/trunk/ql/src/test/results/clientnegative/mapreduce_stack_trace_hadoop20.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/mapreduce_stack_trace_hadoop20.q.out?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/mapreduce_stack_trace_hadoop20.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/mapreduce_stack_trace_hadoop20.q.out Tue Jul  9 19:22:10 2013
@@ -2,12 +2,12 @@ PREHOOK: query: FROM src SELECT TRANSFOR
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-FATAL ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"}
+FATAL org.apache.hadoop.hive.ql.exec.mr.ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"}
 Hive Runtime Error while processing row {"key":"238","value":"val_238"}
-FATAL ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"}
+FATAL org.apache.hadoop.hive.ql.exec.mr.ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"}
 Hive Runtime Error while processing row {"key":"238","value":"val_238"}
-FATAL ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"}
+FATAL org.apache.hadoop.hive.ql.exec.mr.ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"}
 Hive Runtime Error while processing row {"key":"238","value":"val_238"}
-FATAL ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"}
+FATAL org.apache.hadoop.hive.ql.exec.mr.ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"}
 Hive Runtime Error while processing row {"key":"238","value":"val_238"}
-FAILED: Execution Error, return code 20000 from org.apache.hadoop.hive.ql.exec.MapRedTask. Unable to initialize custom script.
+FAILED: Execution Error, return code 20000 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask. Unable to initialize custom script.

Modified: hive/trunk/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff.q.out?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff.q.out Tue Jul  9 19:22:10 2013
@@ -2,4 +2,4 @@ PREHOOK: query: FROM src SELECT TRANSFOR
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-FAILED: Execution Error, return code 20000 from org.apache.hadoop.hive.ql.exec.MapRedTask. Unable to initialize custom script.
+FAILED: Execution Error, return code 20000 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask. Unable to initialize custom script.

Modified: hive/trunk/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff_hadoop20.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff_hadoop20.q.out?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff_hadoop20.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff_hadoop20.q.out Tue Jul  9 19:22:10 2013
@@ -2,4 +2,4 @@ PREHOOK: query: FROM src SELECT TRANSFOR
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-FAILED: Execution Error, return code 20000 from org.apache.hadoop.hive.ql.exec.MapRedTask. Unable to initialize custom script.
+FAILED: Execution Error, return code 20000 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask. Unable to initialize custom script.

Modified: hive/trunk/ql/src/test/results/clientnegative/minimr_broken_pipe.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/minimr_broken_pipe.q.out?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/minimr_broken_pipe.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/minimr_broken_pipe.q.out Tue Jul  9 19:22:10 2013
@@ -3,4 +3,4 @@ SELECT TRANSFORM(*) USING 'true' AS a, b
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-FAILED: Execution Error, return code 20003 from org.apache.hadoop.hive.ql.exec.MapRedTask. An error occurred when trying to close the Operator running your custom script.
+FAILED: Execution Error, return code 20003 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask. An error occurred when trying to close the Operator running your custom script.

Modified: hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe1.q.out?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe1.q.out Tue Jul  9 19:22:10 2013
@@ -13,4 +13,4 @@ Task ID:
 Logs:
 
 #### A masked pattern was here ####
-FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask
+FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask

Modified: hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe2.q.out?rev=1501476&r1=1501475&r2=1501476&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe2.q.out Tue Jul  9 19:22:10 2013
@@ -13,4 +13,4 @@ Task ID:
 Logs:
 
 #### A masked pattern was here ####
-FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask
+FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask