You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by nz...@apache.org on 2011/08/12 03:05:51 UTC

svn commit: r1156928 - in /hive/trunk: ./ common/src/java/org/apache/hadoop/hive/conf/ conf/ contrib/src/test/results/clientnegative/ ql/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/test/org/apache/ha...

Author: nzhang
Date: Fri Aug 12 01:05:50 2011
New Revision: 1156928

URL: http://svn.apache.org/viewvc?rev=1156928&view=rev
Log:
HIVE-2156. Improve error messages emitted during task execution (Syed S. Albiz via Ning Zhang)

Added:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java
    hive/trunk/ql/src/test/queries/clientnegative/minimr_broken_pipe.q
    hive/trunk/ql/src/test/results/clientnegative/minimr_broken_pipe.q.out
Modified:
    hive/trunk/build-common.xml
    hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/trunk/conf/hive-default.xml
    hive/trunk/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out
    hive/trunk/ql/build.xml
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
    hive/trunk/ql/src/test/results/clientnegative/dyn_part3.q.out
    hive/trunk/ql/src/test/results/clientnegative/index_compact_entry_limit.q.out
    hive/trunk/ql/src/test/results/clientnegative/index_compact_size_limit.q.out
    hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe1.q.out
    hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe2.q.out
    hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe3.q.out
    hive/trunk/ql/src/test/results/clientnegative/script_error.q.out
    hive/trunk/ql/src/test/results/clientnegative/udf_reflect_neg.q.out
    hive/trunk/ql/src/test/results/clientnegative/udf_test_error.q.out
    hive/trunk/ql/src/test/results/clientnegative/udf_test_error_reduce.q.out
    hive/trunk/ql/src/test/results/clientpositive/auto_join25.q.out
    hive/trunk/ql/src/test/results/clientpositive/mapjoin_hook.q.out
    hive/trunk/ql/src/test/templates/TestNegativeCliDriver.vm

Modified: hive/trunk/build-common.xml
URL: http://svn.apache.org/viewvc/hive/trunk/build-common.xml?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/build-common.xml (original)
+++ hive/trunk/build-common.xml Fri Aug 12 01:05:50 2011
@@ -57,6 +57,7 @@
   <property name="test.junit.output.format" value="xml"/>
   <property name="test.junit.output.usefile" value="true"/>
   <property name="minimr.query.files" value="input16_cc.q,scriptfile1.q,bucket4.q,bucketmapjoin6.q,disable_merge_for_bucketing.q,reduce_deduplicate.q,smb_mapjoin_8.q,join1.q,groupby2.q,bucketizedhiveinputformat.q"/>
+  <property name="minimr.query.negative.files" value="minimr_broken_pipe.q" />
   <property name="test.silent" value="true"/>
   <property name="hadoopVersion" value="${hadoop.version.ant-internal}"/>
   <property name="test.serialize.qplan" value="false"/>

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Fri Aug 12 01:05:50 2011
@@ -137,6 +137,8 @@ public class HiveConf extends Configurat
     DEFAULT_ZOOKEEPER_PARTITION_NAME("hive.lockmgr.zookeeper.default.partition.name", "__HIVE_DEFAULT_ZOOKEEPER_PARTITION__"),
     // Whether to show a link to the most failed task + debugging tips
     SHOW_JOB_FAIL_DEBUG_INFO("hive.exec.show.job.failure.debug.info", true),
+    JOB_DEBUG_TIMEOUT("hive.exec.job.debug.timeout", 30000),
+    TASKLOG_DEBUG_TIMEOUT("hive.exec.tasklog.debug.timeout", 20000),
 
     // should hive determine whether to run in local mode automatically ?
     LOCALMODEAUTO("hive.exec.mode.local.auto", false),
@@ -264,8 +266,8 @@ public class HiveConf extends Configurat
     HIVESCRIPTIDENVVAR("hive.script.operator.id.env.var", "HIVE_SCRIPT_OPERATOR_ID"),
     HIVEMAPREDMODE("hive.mapred.mode", "nonstrict"),
     HIVEALIAS("hive.alias", ""),
-    HIVEMAPSIDEAGGREGATE("hive.map.aggr", "true"),
-    HIVEGROUPBYSKEW("hive.groupby.skewindata", "false"),
+    HIVEMAPSIDEAGGREGATE("hive.map.aggr", true),
+    HIVEGROUPBYSKEW("hive.groupby.skewindata", false),
     HIVEJOINEMITINTERVAL("hive.join.emit.interval", 1000),
     HIVEJOINCACHESIZE("hive.join.cache.size", 25000),
     HIVEMAPJOINBUCKETCACHESIZE("hive.mapjoin.bucket.cache.size", 100),

Modified: hive/trunk/conf/hive-default.xml
URL: http://svn.apache.org/viewvc/hive/trunk/conf/hive-default.xml?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/conf/hive-default.xml (original)
+++ hive/trunk/conf/hive-default.xml Fri Aug 12 01:05:50 2011
@@ -936,7 +936,7 @@
 
 <property>
   <name>hive.exec.show.job.failure.debug.info</name>
-  <value>false</value>
+  <value>true</value>
   <description>
   	If a job fails, whether to provide a link in the CLI to the task with the
   	most failures, along with debugging hints if applicable.

Modified: hive/trunk/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out Fri Aug 12 01:05:50 2011
@@ -14,5 +14,15 @@ SELECT CASE WHEN 3 > 2 THEN 10 WHEN row_
 FROM src LIMIT 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/var/folders/7P/7PeC14kXFIWq0PIYyexGbmKuXUk/-Tmp-/jsichi/hive_2011-02-22_23-14-39_576_5692614556807208481/-mr-10000
+PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-05_12-14-46_686_3062563290565430667/-mr-10000
+Execution failed with exit status: 2
+Obtaining error information
+
+Task failed!
+Task ID:
+  Stage-1
+
+Logs:
+
+/data/users/salbiz/apache-hive/build/ql/tmp//hive.log
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask

Modified: hive/trunk/ql/build.xml
URL: http://svn.apache.org/viewvc/hive/trunk/ql/build.xml?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/ql/build.xml (original)
+++ hive/trunk/ql/build.xml Fri Aug 12 01:05:50 2011
@@ -119,6 +119,19 @@
                   logDirectory="${test.log.dir}/clientpositive"
                   hadoopVersion="${hadoopVersion}"
                   />
+
+        <qtestgen outputDirectory="${test.build.src}/org/apache/hadoop/hive/cli" 
+                  templatePath="${ql.test.template.dir}" template="TestNegativeCliDriver.vm" 
+                  queryDirectory="${ql.test.query.dir}/clientnegative" 
+                  queryFile="${minimr.query.negative.files}"
+                  queryFileRegex="${qfile_negative_regex}"
+                  clusterMode="miniMR"
+                  runDisabled="${run_disabled}"
+                  resultsDirectory="${ql.test.results.dir}/clientnegative" className="TestNegativeMinimrCliDriver"
+                  logFile="${test.log.dir}/testnegativeminimrclidrivergen.log"
+                  logDirectory="${test.log.dir}/clientnegative"
+                  hadoopVersion="${hadoopVersion}"
+                  />
       </then>
     </if>
 
@@ -127,6 +140,7 @@
               queryDirectory="${ql.test.query.dir}/clientnegative" 
               queryFile="${qfile}"
               queryFileRegex="${qfile_regex}"
+              excludeQueryFile="${minimr.query.negative.files}"
               runDisabled="${run_disabled}"
               resultsDirectory="${ql.test.results.dir}/clientnegative" className="TestNegativeCliDriver"
               logFile="${test.log.dir}/testnegclidrivergen.log"

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java Fri Aug 12 01:05:50 2011
@@ -28,6 +28,7 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.Enumeration;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.MapRedStats;
@@ -45,6 +46,11 @@ import org.apache.hadoop.mapred.RunningJ
 import org.apache.hadoop.mapred.TaskCompletionEvent;
 import org.apache.hadoop.mapred.TaskReport;
 import org.apache.hadoop.mapred.Counters.Counter;
+import org.apache.log4j.Appender;
+import org.apache.log4j.BasicConfigurator;
+import org.apache.log4j.FileAppender;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.PropertyConfigurator;
 
 public class HadoopJobExecHelper {
 
@@ -512,6 +518,7 @@ public class HadoopJobExecHelper {
 
     int startIndex = 0;
 
+    console.printError("Error during job, obtaining debugging information...");
     // Loop to get all task completion events because getTaskCompletionEvents
     // only returns a subset per call
     while (true) {
@@ -537,6 +544,7 @@ public class HadoopJobExecHelper {
         // and the logs
         String taskId = taskJobIds[0];
         String jobId = taskJobIds[1];
+        console.printError("Examining task ID: " + taskId + " from job " + jobId);
 
         TaskInfo ti = taskIdToInfo.get(taskId);
         if (ti == null) {
@@ -544,7 +552,7 @@ public class HadoopJobExecHelper {
           taskIdToInfo.put(taskId, ti);
         }
         // These tasks should have come from the same job.
-        assert (ti.getJobId() == jobId);
+        assert (ti.getJobId() != null && ti.getJobId().equals(jobId));
         ti.getLogUrls().add(getTaskAttemptLogUrl(t.getTaskTrackerHttp(), t.getTaskId()));
 
         // If a task failed, then keep track of the total number of failures
@@ -619,7 +627,6 @@ public class HadoopJobExecHelper {
         sb.append("-----\n");
 
         console.printError(sb.toString());
-
         // Only print out one task because that's good enough for debugging.
         break;
       }
@@ -628,6 +635,45 @@ public class HadoopJobExecHelper {
 
   }
 
+  public void localJobDebugger(int exitVal, String taskId) {
+    StringBuilder sb = new StringBuilder();
+    sb.append("\n");
+    sb.append("Task failed!\n");
+    sb.append("Task ID:\n  " + taskId + "\n\n");
+    sb.append("Logs:\n");
+    console.printError(sb.toString());
+
+    for (Appender a : Collections.list((Enumeration<Appender>)
+          LogManager.getRootLogger().getAllAppenders())) {
+      if (a instanceof FileAppender) {
+        console.printError(((FileAppender)a).getFile());
+      }
+    }
+  }
+
+  public int progressLocal(Process runningJob, String taskId) {
+    int exitVal = -101;
+    try {
+      exitVal = runningJob.waitFor(); //TODO: poll periodically
+    } catch (InterruptedException e) {
+    }
+
+    if (exitVal != 0) {
+      console.printError("Execution failed with exit status: " + exitVal);
+      console.printError("Obtaining error information");
+      if (HiveConf.getBoolVar(job, HiveConf.ConfVars.SHOW_JOB_FAIL_DEBUG_INFO)) {
+        // Since local jobs are run sequentially, all relevant information is already available
+        // Therefore, no need to fetch job debug info asynchronously
+        localJobDebugger(exitVal, taskId);
+      }
+    } else {
+      console.printInfo("Execution completed successfully");
+      console.printInfo("Mapred Local Task Succeeded . Convert the Join into MapJoin");
+    }
+    return exitVal;
+  }
+
+
   public int progress(RunningJob rj, JobClient jc) throws IOException {
     jobId = rj.getJobID();
 
@@ -666,7 +712,15 @@ public class HadoopJobExecHelper {
       returnVal = 2;
       console.printError(statusMesg);
       if (HiveConf.getBoolVar(job, HiveConf.ConfVars.SHOW_JOB_FAIL_DEBUG_INFO)) {
-        showJobFailDebugInfo(job, rj);
+        try {
+          JobDebugger jd = new JobDebugger(job, rj, console);
+          Thread t = new Thread(jd);
+          t.start();
+          t.join(HiveConf.getIntVar(job, HiveConf.ConfVars.JOB_DEBUG_TIMEOUT));
+        } catch (InterruptedException e) {
+          console.printError("Timed out trying to grab more detailed job failure"
+              + " information, please check jobtracker for more info");
+        }
       }
     } else {
       console.printInfo(statusMesg);

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java?rev=1156928&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java Fri Aug 12 01:05:50 2011
@@ -0,0 +1,245 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.Operator.ProgressCounter;
+import org.apache.hadoop.hive.ql.exec.errors.ErrorAndSolution;
+import org.apache.hadoop.hive.ql.exec.errors.TaskLogProcessor;
+import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.mapred.Counters;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.RunningJob;
+import org.apache.hadoop.mapred.TaskCompletionEvent;
+import org.apache.hadoop.mapred.TaskReport;
+
+/**
+ * JobDebugger takes a RunningJob that has failed and grabs the top 4 failing
+ * tasks and outputs this information to the Hive CLI.
+ */
+public class JobDebugger implements Runnable {
+  private JobConf conf;
+  private RunningJob rj;
+  private LogHelper console;
+  private Map<String, Integer> failures = new HashMap<String, Integer>(); // Mapping from task ID to the number of failures
+  private Set<String> successes = new HashSet<String>(); // Successful task ID's
+  private Map<String, TaskInfo> taskIdToInfo = new HashMap<String, TaskInfo>();
+
+  // Used for showJobFailDebugInfo
+  private static class TaskInfo {
+    String jobId;
+    Set<String> logUrls;
+
+    public TaskInfo(String jobId) {
+      this.jobId = jobId;
+      logUrls = new HashSet<String>();
+    }
+
+    public void addLogUrl(String logUrl) {
+      logUrls.add(logUrl);
+    }
+
+    public Set<String> getLogUrls() {
+      return logUrls;
+    }
+
+    public String getJobId() {
+      return jobId;
+    }
+  }
+
+  public JobDebugger(JobConf conf, RunningJob rj, LogHelper console) {
+    this.conf = conf;
+    this.rj = rj;
+    this.console = console;
+  }
+
+  public void run() {
+    try {
+      showJobFailDebugInfo();
+    } catch (IOException e) {
+      console.printError(e.getMessage());
+    }
+  }
+  private String getTaskAttemptLogUrl(String taskTrackerHttpAddress, String taskAttemptId) {
+    return taskTrackerHttpAddress + "/tasklog?taskid=" + taskAttemptId + "&start=-8193";
+  }
+
+  class TaskLogGrabber implements Runnable {
+
+    public void run() {
+      try {
+        getTaskLogs();
+      } catch (IOException e) {
+        console.printError(e.getMessage());
+      }
+    }
+
+    private void getTaskLogs() throws IOException {
+      int startIndex = 0;
+      while (true) {
+        TaskCompletionEvent[] taskCompletions = rj.getTaskCompletionEvents(startIndex);
+
+        if (taskCompletions == null || taskCompletions.length == 0) {
+          break;
+        }
+
+        boolean more = true;
+        for (TaskCompletionEvent t : taskCompletions) {
+          // getTaskJobIDs returns Strings for compatibility with Hadoop versions
+          // without TaskID or TaskAttemptID
+          String[] taskJobIds = ShimLoader.getHadoopShims().getTaskJobIDs(t);
+
+          if (taskJobIds == null) {
+            console.printError("Task attempt info is unavailable in this Hadoop version");
+            more = false;
+            break;
+          }
+
+          // For each task completion event, get the associated task id, job id
+          // and the logs
+          String taskId = taskJobIds[0];
+          String jobId = taskJobIds[1];
+          console.printError("Examining task ID: " + taskId + " from job " + jobId);
+
+          TaskInfo ti = taskIdToInfo.get(taskId);
+          if (ti == null) {
+            ti = new TaskInfo(jobId);
+            taskIdToInfo.put(taskId, ti);
+          }
+          // These tasks should have come from the same job.
+          assert (ti.getJobId() != null &&  ti.getJobId().equals(jobId));
+          ti.getLogUrls().add(getTaskAttemptLogUrl(t.getTaskTrackerHttp(), t.getTaskId()));
+
+          // If a task failed, then keep track of the total number of failures
+          // for that task (typically, a task gets re-run up to 4 times if it
+          // fails
+
+          if (t.getTaskStatus() != TaskCompletionEvent.Status.SUCCEEDED) {
+            Integer failAttempts = failures.get(taskId);
+            if (failAttempts == null) {
+              failAttempts = Integer.valueOf(0);
+            }
+            failAttempts = Integer.valueOf(failAttempts.intValue() + 1);
+            failures.put(taskId, failAttempts);
+          } else {
+            successes.add(taskId);
+          }
+        }
+        if (!more) {
+          break;
+        }
+        startIndex += taskCompletions.length;
+      }
+    }
+  }
+
+  @SuppressWarnings("deprecation")
+  private void showJobFailDebugInfo() throws IOException {
+
+
+    console.printError("Error during job, obtaining debugging information...");
+    // Loop to get all task completion events because getTaskCompletionEvents
+    // only returns a subset per call
+    TaskLogGrabber tlg = new TaskLogGrabber();
+    Thread t = new Thread(tlg);
+    try {
+      t.start();
+      t.join(HiveConf.getIntVar(conf, HiveConf.ConfVars.TASKLOG_DEBUG_TIMEOUT));
+    } catch (InterruptedException e) {
+      console.printError("Timed out trying to finish grabbing task log URLs, "
+          + "some task info may be missing");
+    }
+    
+    // Remove failures for tasks that succeeded
+    for (String task : successes) {
+      failures.remove(task);
+    }
+
+    if (failures.keySet().size() == 0) {
+      return;
+    }
+
+    // Find the highest failure count
+    int maxFailures = 0;
+    for (Integer failCount : failures.values()) {
+      if (maxFailures < failCount.intValue()) {
+        maxFailures = failCount.intValue();
+      }
+    }
+
+    // Display Error Message for tasks with the highest failure count
+    String jtUrl = JobTrackerURLResolver.getURL(conf);
+
+    for (String task : failures.keySet()) {
+      if (failures.get(task).intValue() == maxFailures) {
+        TaskInfo ti = taskIdToInfo.get(task);
+        String jobId = ti.getJobId();
+        String taskUrl = jtUrl + "/taskdetails.jsp?jobid=" + jobId + "&tipid=" + task.toString();
+
+        TaskLogProcessor tlp = new TaskLogProcessor(conf);
+        for (String logUrl : ti.getLogUrls()) {
+          tlp.addTaskAttemptLogUrl(logUrl);
+        }
+
+        List<ErrorAndSolution> errors = tlp.getErrors();
+
+        StringBuilder sb = new StringBuilder();
+        // We use a StringBuilder and then call printError only once as
+        // printError will write to both stderr and the error log file. In
+        // situations where both the stderr and the log file output is
+        // simultaneously output to a single stream, this will look cleaner.
+        sb.append("\n");
+        sb.append("Task with the most failures(" + maxFailures + "): \n");
+        sb.append("-----\n");
+        sb.append("Task ID:\n  " + task + "\n\n");
+        sb.append("URL:\n  " + taskUrl + "\n");
+
+        for (ErrorAndSolution e : errors) {
+          sb.append("\n");
+          sb.append("Possible error:\n  " + e.getError() + "\n\n");
+          sb.append("Solution:\n  " + e.getSolution() + "\n");
+        }
+        sb.append("-----\n");
+
+        console.printError(sb.toString());
+
+        // Only print out one task because that's good enough for debugging.
+        break;
+      }
+    }
+    return;
+
+  }
+}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java Fri Aug 12 01:05:50 2011
@@ -267,7 +267,7 @@ public class MapRedTask extends ExecDriv
       outPrinter.start();
       errPrinter.start();
 
-      int exitVal = executor.waitFor();
+      int exitVal = jobExecHelper.progressLocal(executor, getId());
 
       if (exitVal != 0) {
         LOG.error("Execution failed with exit status: " + exitVal);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java Fri Aug 12 01:05:50 2011
@@ -63,6 +63,7 @@ import org.apache.hadoop.util.Reflection
 public class MapredLocalTask extends Task<MapredLocalWork> implements Serializable {
 
   private Map<String, FetchOperator> fetchOperators;
+  protected HadoopJobExecHelper jobExecHelper;
   private JobConf job;
   public static transient final Log l4j = LogFactory.getLog(MapredLocalTask.class);
   static final String HADOOP_MEM_KEY = "HADOOP_HEAPSIZE";
@@ -89,6 +90,8 @@ public class MapredLocalTask extends Tas
   public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext driverContext) {
     super.initialize(conf, queryPlan, driverContext);
     job = new JobConf(conf, ExecDriver.class);
+    //we don't use the HadoopJobExecHooks for local tasks
+    this.jobExecHelper = new HadoopJobExecHelper(job, console, this, null);
   }
 
   public static String now() {
@@ -213,7 +216,7 @@ public class MapredLocalTask extends Tas
       outPrinter.start();
       errPrinter.start();
 
-      int exitVal = executor.waitFor();
+      int exitVal = jobExecHelper.progressLocal(executor, getId());
 
       if (exitVal != 0) {
         LOG.error("Execution failed with exit status: " + exitVal);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Fri Aug 12 01:05:50 2011
@@ -5812,8 +5812,7 @@ public class SemanticAnalyzer extends Ba
         if (qbp.getAggregationExprsForClause(dest).size() != 0
             || getGroupByForClause(qbp, dest).size() > 0) {
           //multiple distincts is not supported with skew in data
-          if (conf.getVar(HiveConf.ConfVars.HIVEGROUPBYSKEW)
-              .equalsIgnoreCase("true") &&
+          if (conf.getBoolVar(HiveConf.ConfVars.HIVEGROUPBYSKEW) &&
              qbp.getDistinctFuncExprsForClause(dest).size() > 1) {
             throw new SemanticException(ErrorMsg.UNSUPPORTED_MULTIPLE_DISTINCTS.
                 getMsg());
@@ -5821,16 +5820,13 @@ public class SemanticAnalyzer extends Ba
           // insert a select operator here used by the ColumnPruner to reduce
           // the data to shuffle
           curr = insertSelectAllPlanForGroupBy(dest, curr);
-          if (conf.getVar(HiveConf.ConfVars.HIVEMAPSIDEAGGREGATE)
-              .equalsIgnoreCase("true")) {
-            if (conf.getVar(HiveConf.ConfVars.HIVEGROUPBYSKEW)
-                .equalsIgnoreCase("false")) {
+          if (conf.getBoolVar(HiveConf.ConfVars.HIVEMAPSIDEAGGREGATE)) {
+            if (!conf.getBoolVar(HiveConf.ConfVars.HIVEGROUPBYSKEW)) {
               curr = genGroupByPlanMapAggr1MR(dest, qb, curr);
             } else {
               curr = genGroupByPlanMapAggr2MR(dest, qb, curr);
             }
-          } else if (conf.getVar(HiveConf.ConfVars.HIVEGROUPBYSKEW)
-              .equalsIgnoreCase("true")) {
+          } else if (conf.getBoolVar(HiveConf.ConfVars.HIVEGROUPBYSKEW)) {
             curr = genGroupByPlan2MR(dest, qb, curr);
           } else {
             curr = genGroupByPlan1MR(dest, qb, curr);

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Fri Aug 12 01:05:50 2011
@@ -950,6 +950,7 @@ public class QTestUtil {
         "-I", "LOCK_QUERYID:",
         "-I", "grantTime",
         "-I", "[.][.][.] [0-9]* more",
+        "-I", "job_[0-9]*_[0-9]*",
         "-I", "USING 'java -cp",
         (new File(logDir, tname + ".out")).getPath(),
         outFileName };

Added: hive/trunk/ql/src/test/queries/clientnegative/minimr_broken_pipe.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/minimr_broken_pipe.q?rev=1156928&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/minimr_broken_pipe.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/minimr_broken_pipe.q Fri Aug 12 01:05:50 2011
@@ -0,0 +1,4 @@
+set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
+set hive.exec.script.allow.partial.consumption = false;
+-- Tests exception in ScriptOperator.close() by passing to the operator a small amount of data
+SELECT TRANSFORM(*) USING 'true' AS a, b, c FROM (SELECT * FROM src LIMIT 1) tmp;

Modified: hive/trunk/ql/src/test/results/clientnegative/dyn_part3.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/dyn_part3.q.out?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/dyn_part3.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/dyn_part3.q.out Fri Aug 12 01:05:50 2011
@@ -7,4 +7,14 @@ PREHOOK: query: insert overwrite table n
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@nzhang_part
+Execution failed with exit status: 2
+Obtaining error information
+
+Task failed!
+Task ID:
+  Stage-1
+
+Logs:
+
+/data/users/salbiz/apache-hive/build/ql/tmp//hive.log
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask

Modified: hive/trunk/ql/src/test/results/clientnegative/index_compact_entry_limit.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/index_compact_entry_limit.q.out?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/index_compact_entry_limit.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/index_compact_entry_limit.q.out Fri Aug 12 01:05:50 2011
@@ -31,5 +31,15 @@ POSTHOOK: Lineage: default__src_src_inde
 PREHOOK: query: SELECT key, value FROM src WHERE key=100 ORDER BY key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-01_16-15-31_393_2323708709856396072/-mr-10000
+PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-05_12-04-13_955_1009283972520120333/-mr-10000
+Execution failed with exit status: 2
+Obtaining error information
+
+Task failed!
+Task ID:
+  Stage-1
+
+Logs:
+
+/data/users/salbiz/apache-hive/build/ql/tmp//hive.log
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask

Modified: hive/trunk/ql/src/test/results/clientnegative/index_compact_size_limit.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/index_compact_size_limit.q.out?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/index_compact_size_limit.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/index_compact_size_limit.q.out Fri Aug 12 01:05:50 2011
@@ -31,5 +31,15 @@ POSTHOOK: Lineage: default__src_src_inde
 PREHOOK: query: SELECT key, value FROM src WHERE key=100 ORDER BY key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-01_16-15-40_648_4958727540603697272/-mr-10000
+PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-05_12-04-23_821_264262594564060016/-mr-10000
+Execution failed with exit status: 2
+Obtaining error information
+
+Task failed!
+Task ID:
+  Stage-1
+
+Logs:
+
+/data/users/salbiz/apache-hive/build/ql/tmp//hive.log
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask

Added: hive/trunk/ql/src/test/results/clientnegative/minimr_broken_pipe.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/minimr_broken_pipe.q.out?rev=1156928&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/minimr_broken_pipe.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/minimr_broken_pipe.q.out Fri Aug 12 01:05:50 2011
@@ -0,0 +1,7 @@
+PREHOOK: query: -- Tests exception in ScriptOperator.close() by passing to the operator a small amount of data
+SELECT TRANSFORM(*) USING 'true' AS a, b, c FROM (SELECT * FROM src LIMIT 1) tmp
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: hdfs://localhost.localdomain:36146/data/users/salbiz/apache-hive/build/ql/scratchdir/hive_2011-08-10_11-22-50_431_7753055018861847257/-mr-10000
+Ended Job = job_20110810112236124_0001 with errors
+FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask

Modified: hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe1.q.out?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe1.q.out Fri Aug 12 01:05:50 2011
@@ -2,5 +2,15 @@ PREHOOK: query: -- Tests exception in Sc
 SELECT TRANSFORM(*) USING 'true' AS a, b, c FROM (SELECT * FROM src LIMIT 1) tmp
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/pyang/trunk/VENDOR.hive/trunk/build/ql/tmp/1832401066/10000
+PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-02_17-12-22_660_8303431302792802567/-mr-10000
+Execution failed with exit status: 2
+Obtaining error information
+
+Task failed!
+Task ID:
+  Stage-1
+
+Logs:
+
+/data/users/salbiz/apache-hive/build/ql/tmp//hive.log
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask

Modified: hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe2.q.out?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe2.q.out Fri Aug 12 01:05:50 2011
@@ -2,5 +2,15 @@ PREHOOK: query: -- Tests exception in Sc
 SELECT TRANSFORM(key, value, key, value, key, value, key, value, key, value, key, value, key, value, key, value, key, value, key, value, key, value, key, value) USING 'true' as a,b,c,d FROM src
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/pyang/script/trunk/VENDOR.hive/trunk/build/ql/tmp/1650258494/10000
+PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-02_17-12-25_737_1025187653570997701/-mr-10000
+Execution failed with exit status: 2
+Obtaining error information
+
+Task failed!
+Task ID:
+  Stage-1
+
+Logs:
+
+/data/users/salbiz/apache-hive/build/ql/tmp//hive.log
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask

Modified: hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe3.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe3.q.out?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe3.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/script_broken_pipe3.q.out Fri Aug 12 01:05:50 2011
@@ -2,5 +2,15 @@ PREHOOK: query: -- Test to ensure that a
 SELECT TRANSFORM(*) USING 'false' AS a, b, c FROM (SELECT * FROM src LIMIT 1) tmp
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/pyang/trunk/VENDOR.hive/trunk/build/ql/tmp/1937270363/10000
+PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-02_17-12-28_813_6316382116761729323/-mr-10000
+Execution failed with exit status: 2
+Obtaining error information
+
+Task failed!
+Task ID:
+  Stage-1
+
+Logs:
+
+/data/users/salbiz/apache-hive/build/ql/tmp//hive.log
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask

Modified: hive/trunk/ql/src/test/results/clientnegative/script_error.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/script_error.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/script_error.q.out Fri Aug 12 01:05:50 2011
@@ -48,5 +48,15 @@ PREHOOK: query: SELECT TRANSFORM(src.key
 FROM src
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_21-03-24_190_21878950587359648/-mr-10000
+PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-02_17-12-31_979_3869579274390938020/-mr-10000
+Execution failed with exit status: 2
+Obtaining error information
+
+Task failed!
+Task ID:
+  Stage-1
+
+Logs:
+
+/data/users/salbiz/apache-hive/build/ql/tmp//hive.log
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask

Modified: hive/trunk/ql/src/test/results/clientnegative/udf_reflect_neg.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/udf_reflect_neg.q.out?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/udf_reflect_neg.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/udf_reflect_neg.q.out Fri Aug 12 01:05:50 2011
@@ -8,5 +8,15 @@ PREHOOK: query: SELECT reflect("java.lan
 FROM src LIMIT 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/edward/hive_2010-08-28_18-06-12_525_3180708990156012812/-mr-10000
+PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-02_17-12-49_514_931146053236968163/-mr-10000
+Execution failed with exit status: 2
+Obtaining error information
+
+Task failed!
+Task ID:
+  Stage-1
+
+Logs:
+
+/data/users/salbiz/apache-hive/build/ql/tmp//hive.log
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask

Modified: hive/trunk/ql/src/test/results/clientnegative/udf_test_error.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/udf_test_error.q.out?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/udf_test_error.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/udf_test_error.q.out Fri Aug 12 01:05:50 2011
@@ -5,5 +5,15 @@ POSTHOOK: type: CREATEFUNCTION
 PREHOOK: query: SELECT test_error(key < 125 OR key > 130) FROM src
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-03-06_00-58-40_004_2624763517220611615/10000
+PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-02_17-12-52_827_5964651401121748786/-mr-10000
+Execution failed with exit status: 2
+Obtaining error information
+
+Task failed!
+Task ID:
+  Stage-1
+
+Logs:
+
+/data/users/salbiz/apache-hive/build/ql/tmp//hive.log
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask

Modified: hive/trunk/ql/src/test/results/clientnegative/udf_test_error_reduce.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/udf_test_error_reduce.q.out?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/udf_test_error_reduce.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/udf_test_error_reduce.q.out Fri Aug 12 01:05:50 2011
@@ -10,5 +10,15 @@ FROM (
 ) map_output
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-03-05_23-12-16_809_4809554819212794550/10000
+PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-02_17-12-55_772_7446403141126843360/-mr-10000
+Execution failed with exit status: 2
+Obtaining error information
+
+Task failed!
+Task ID:
+  Stage-1
+
+Logs:
+
+/data/users/salbiz/apache-hive/build/ql/tmp//hive.log
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask

Modified: hive/trunk/ql/src/test/results/clientpositive/auto_join25.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/auto_join25.q.out?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/auto_join25.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/auto_join25.q.out Fri Aug 12 01:05:50 2011
@@ -13,6 +13,16 @@ PREHOOK: Input: default@srcpart@ds=2008-
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
 PREHOOK: Output: default@dest1
+Execution failed with exit status: 2
+Obtaining error information
+
+Task failed!
+Task ID:
+  Stage-7
+
+Logs:
+
+/data/users/salbiz/apache-hive/build/ql/tmp//hive.log
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask
 ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask
 POSTHOOK: query: FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key)
@@ -30,11 +40,11 @@ POSTHOOK: Lineage: dest1.value SIMPLE [(
 PREHOOK: query: SELECT sum(hash(dest1.key,dest1.value)) FROM dest1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1
-PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-23_13-09-12_062_2731833788874193660/-mr-10000
+PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-03_11-26-47_118_1954071308745703162/-mr-10000
 POSTHOOK: query: SELECT sum(hash(dest1.key,dest1.value)) FROM dest1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1
-POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-23_13-09-12_062_2731833788874193660/-mr-10000
+POSTHOOK: Output: file:/tmp/salbiz/hive_2011-08-03_11-26-47_118_1954071308745703162/-mr-10000
 POSTHOOK: Lineage: dest1.key EXPRESSION [(srcpart)src1.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: dest1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ]
 407444119660
@@ -50,8 +60,28 @@ INSERT OVERWRITE TABLE dest_j2 SELECT sr
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@dest_j2
+Execution failed with exit status: 2
+Obtaining error information
+
+Task failed!
+Task ID:
+  Stage-14
+
+Logs:
+
+/data/users/salbiz/apache-hive/build/ql/tmp//hive.log
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask
 ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask
+Execution failed with exit status: 2
+Obtaining error information
+
+Task failed!
+Task ID:
+  Stage-12
+
+Logs:
+
+/data/users/salbiz/apache-hive/build/ql/tmp//hive.log
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask
 ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask
 POSTHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key + src2.key = src3.key)
@@ -66,11 +96,11 @@ POSTHOOK: Lineage: dest_j2.value SIMPLE 
 PREHOOK: query: SELECT sum(hash(dest_j2.key,dest_j2.value)) FROM dest_j2
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest_j2
-PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-23_13-09-36_524_5308749215651001089/-mr-10000
+PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-03_11-27-02_282_7965605157514278016/-mr-10000
 POSTHOOK: query: SELECT sum(hash(dest_j2.key,dest_j2.value)) FROM dest_j2
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest_j2
-POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-23_13-09-36_524_5308749215651001089/-mr-10000
+POSTHOOK: Output: file:/tmp/salbiz/hive_2011-08-03_11-27-02_282_7965605157514278016/-mr-10000
 POSTHOOK: Lineage: dest1.key EXPRESSION [(srcpart)src1.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: dest1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: dest_j2.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ]
@@ -90,6 +120,16 @@ INSERT OVERWRITE TABLE dest_j1 SELECT sr
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@dest_j1
+Execution failed with exit status: 2
+Obtaining error information
+
+Task failed!
+Task ID:
+  Stage-7
+
+Logs:
+
+/data/users/salbiz/apache-hive/build/ql/tmp//hive.log
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask
 ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask
 POSTHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key)
@@ -106,11 +146,11 @@ POSTHOOK: Lineage: dest_j2.value SIMPLE 
 PREHOOK: query: SELECT sum(hash(dest_j1.key,dest_j1.value)) FROM dest_j1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest_j1
-PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-23_13-09-49_448_5881123257419888652/-mr-10000
+PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-03_11-27-11_787_504951263993420939/-mr-10000
 POSTHOOK: query: SELECT sum(hash(dest_j1.key,dest_j1.value)) FROM dest_j1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest_j1
-POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-23_13-09-49_448_5881123257419888652/-mr-10000
+POSTHOOK: Output: file:/tmp/salbiz/hive_2011-08-03_11-27-11_787_504951263993420939/-mr-10000
 POSTHOOK: Lineage: dest1.key EXPRESSION [(srcpart)src1.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: dest1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientpositive/mapjoin_hook.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/mapjoin_hook.q.out?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/mapjoin_hook.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/mapjoin_hook.q.out Fri Aug 12 01:05:50 2011
@@ -25,6 +25,16 @@ PREHOOK: Input: default@srcpart@ds=2008-
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
 PREHOOK: Output: default@dest1
+Execution failed with exit status: 2
+Obtaining error information
+
+Task failed!
+Task ID:
+  Stage-7
+
+Logs:
+
+/data/users/salbiz/apache-hive/build/ql/tmp//hive.log
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask
 ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask
 [MapJoinCounter PostHook] CONVERTED_LOCAL_MAPJOIN: 1 CONVERTED_MAPJOIN: 0 LOCAL_MAPJOIN: 0 COMMON_JOIN: 0 BACKUP_COMMON_JOIN: 1
@@ -33,8 +43,28 @@ INSERT OVERWRITE TABLE dest1 SELECT src1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@dest1
+Execution failed with exit status: 2
+Obtaining error information
+
+Task failed!
+Task ID:
+  Stage-14
+
+Logs:
+
+/data/users/salbiz/apache-hive/build/ql/tmp//hive.log
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask
 ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask
+Execution failed with exit status: 2
+Obtaining error information
+
+Task failed!
+Task ID:
+  Stage-12
+
+Logs:
+
+/data/users/salbiz/apache-hive/build/ql/tmp//hive.log
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask
 ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask
 [MapJoinCounter PostHook] CONVERTED_LOCAL_MAPJOIN: 2 CONVERTED_MAPJOIN: 0 LOCAL_MAPJOIN: 0 COMMON_JOIN: 0 BACKUP_COMMON_JOIN: 2

Modified: hive/trunk/ql/src/test/templates/TestNegativeCliDriver.vm
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/templates/TestNegativeCliDriver.vm?rev=1156928&r1=1156927&r2=1156928&view=diff
==============================================================================
--- hive/trunk/ql/src/test/templates/TestNegativeCliDriver.vm (original)
+++ hive/trunk/ql/src/test/templates/TestNegativeCliDriver.vm Fri Aug 12 01:05:50 2011
@@ -19,7 +19,12 @@ public class $className extends TestCase
 
   static {
     try {
-      qt = new QTestUtil("$resultsDir.getCanonicalPath()", "$logDir.getCanonicalPath()", false, "0.20");
+      boolean miniMR = false;
+      String hadoopVer;
+      if ("$clusterMode".equals("miniMR"))
+        miniMR = true;
+      hadoopVer = "$hadoopVersion";
+      qt = new QTestUtil("$resultsDir.getCanonicalPath()", "$logDir.getCanonicalPath()", miniMR, hadoopVer);
       // do a one time initialization
       qt.cleanUp();
       qt.createSources();