You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by ol...@apache.org on 2008/10/07 02:44:41 UTC

svn commit: r702316 - in /incubator/pig/branches/types: CHANGES.txt src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/Launcher.java

Author: olga
Date: Mon Oct  6 17:44:41 2008
New Revision: 702316

URL: http://svn.apache.org/viewvc?rev=702316&view=rev
Log:
PIG-471: ignore status errors

Modified:
    incubator/pig/branches/types/CHANGES.txt
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/Launcher.java

Modified: incubator/pig/branches/types/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/pig/branches/types/CHANGES.txt?rev=702316&r1=702315&r2=702316&view=diff
==============================================================================
--- incubator/pig/branches/types/CHANGES.txt (original)
+++ incubator/pig/branches/types/CHANGES.txt Mon Oct  6 17:44:41 2008
@@ -273,3 +273,5 @@
     PIG-464: bag schema definition (pradeepk via olgan)
 
     PIG-457: report 100% on successful jobs only (shravanmn via olgan)
+
+    PIG-471: ignoring status errors from hadoop (pradeepk via olgan)

Modified: incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/Launcher.java
URL: http://svn.apache.org/viewvc/incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/Launcher.java?rev=702316&r1=702315&r2=702316&view=diff
==============================================================================
--- incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/Launcher.java (original)
+++ incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/Launcher.java Mon Oct  6 17:44:41 2008
@@ -85,14 +85,22 @@
         return (int)(Math.ceil(prog)) == (int)1;
     }
     
-    protected void getStats(Job job, JobClient jobClient, boolean errNotDbg) throws IOException{
+    protected void getStats(Job job, JobClient jobClient, boolean errNotDbg) {
         JobID MRJobID = job.getAssignedJobID();
-        TaskReport[] mapRep = jobClient.getMapTaskReports(MRJobID);
-        getErrorMessages(mapRep, "map", errNotDbg);
-        totalHadoopTimeSpent += computeTimeSpent(mapRep);
-        TaskReport[] redRep = jobClient.getReduceTaskReports(MRJobID);
-        getErrorMessages(redRep, "reduce", errNotDbg);
-        totalHadoopTimeSpent += computeTimeSpent(mapRep);
+        try {
+            TaskReport[] mapRep = jobClient.getMapTaskReports(MRJobID);
+            getErrorMessages(mapRep, "map", errNotDbg);
+            totalHadoopTimeSpent += computeTimeSpent(mapRep);
+            TaskReport[] redRep = jobClient.getReduceTaskReports(MRJobID);
+            getErrorMessages(redRep, "reduce", errNotDbg);
+            totalHadoopTimeSpent += computeTimeSpent(mapRep);
+        } catch (IOException e) {
+            if(job.getState() == Job.SUCCESS) {
+                // if the job succeeded, let the user know that
+                // we were unable to get statistics
+                log.warn("Unable to get job related diagnostics");
+            }
+        }
     }
     
     protected long computeTimeSpent(TaskReport[] mapReports) {