You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/10/17 01:15:01 UTC

svn commit: r464718 - in /lucene/hadoop/trunk: CHANGES.txt src/java/org/apache/hadoop/mapred/JobInProgress.java src/webapps/job/analysejobhistory.jsp

Author: cutting
Date: Mon Oct 16 16:15:00 2006
New Revision: 464718

URL: http://svn.apache.org/viewvc?view=rev&rev=464718
Log:
HADOOP-588.  Fix logging and accounting of failed tasks.  Contributed by Sanjay.

Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java
    lucene/hadoop/trunk/src/webapps/job/analysejobhistory.jsp

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=464718&r1=464717&r2=464718
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Mon Oct 16 16:15:00 2006
@@ -20,6 +20,9 @@
  5. HADOOP-514.  Make DFS heartbeat interval configurable.
     (Milind Bhandarkar via cutting)
 
+ 6. HADOOP-588.  Fix logging and accounting of failed tasks.
+    (Sanjay Dahiya via cutting)
+
 
 Release 0.7.1 - 2006-10-11
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java?view=diff&rev=464718&r1=464717&r2=464718
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java Mon Oct 16 16:15:00 2006
@@ -563,9 +563,8 @@
             for (int i = 0; i < reduces.length; i++) {
                 reduces[i].kill();
             }
-            JobHistory.JobInfo.logFinished(this.status.getJobId(), finishTime, 
-                this.finishedMapTasks, this.finishedReduceTasks, failedMapTasks, 
-                failedReduceTasks);
+            JobHistory.JobInfo.logFailed(this.status.getJobId(), finishTime, 
+                this.finishedMapTasks, this.finishedReduceTasks);
             garbageCollect();
         }
     }
@@ -638,15 +637,15 @@
         //
         if (tip.isFailed()) {
             LOG.info("Aborting job " + profile.getJobId());
+            JobHistory.Task.logFailed(profile.getJobId(), tip.getTIPId(), 
+                tip.isMapTask() ? Values.MAP.name():Values.REDUCE.name(),  
+                System.currentTimeMillis(), status.getDiagnosticInfo());
             JobHistory.JobInfo.logFailed(profile.getJobId(), 
                 System.currentTimeMillis(), this.finishedMapTasks, this.finishedReduceTasks);
             kill();
         }
 
         jobtracker.removeTaskEntry(taskid);
-        JobHistory.Task.logFailed(profile.getJobId(), tip.getTIPId(), 
-            tip.isMapTask() ? Values.MAP.name():Values.REDUCE.name(),  
-            System.currentTimeMillis(), status.getDiagnosticInfo());
  }
 
     /**

Modified: lucene/hadoop/trunk/src/webapps/job/analysejobhistory.jsp
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/webapps/job/analysejobhistory.jsp?view=diff&rev=464718&r1=464717&r2=464718
==============================================================================
--- lucene/hadoop/trunk/src/webapps/job/analysejobhistory.jsp (original)
+++ lucene/hadoop/trunk/src/webapps/job/analysejobhistory.jsp Mon Oct 16 16:15:00 2006
@@ -38,14 +38,13 @@
 <hr/>
 <center>
 <%
-	Map<String, JobHistory.Task> tasks = job.getAllTasks();
-	int finishedMaps = job.getInt(Keys.FINISHED_MAPS)  ;
-	int finishedReduces = job.getInt(Keys.FINISHED_REDUCES) ;
-	if( finishedMaps == 0 || finishedReduces == 0 ){
+	if( ! Values.SUCCESS.name().equals(job.get(Keys.JOB_STATUS)) ){
 	  out.print("<h3>No Analysis available as job did not finish</h3>");
 	  return ;
 	}
-	
+	Map<String, JobHistory.Task> tasks = job.getAllTasks();
+	int finishedMaps = job.getInt(Keys.FINISHED_MAPS)  ;
+	int finishedReduces = job.getInt(Keys.FINISHED_REDUCES) ;
 	JobHistory.Task [] mapTasks = new JobHistory.Task[finishedMaps]; 
 	JobHistory.Task [] reduceTasks = new JobHistory.Task[finishedReduces]; 
 	int mapIndex = 0 , reduceIndex=0;