You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by om...@apache.org on 2011/03/08 06:57:36 UTC

svn commit: r1079222 - in /hadoop/mapreduce/branches/yahoo-merge/src: java/org/apache/hadoop/mapred/ java/org/apache/hadoop/mapreduce/ test/mapred/org/apache/hadoop/mapreduce/

Author: omalley
Date: Tue Mar  8 05:57:36 2011
New Revision: 1079222

URL: http://svn.apache.org/viewvc?rev=1079222&view=rev
Log:
commit 3dc926247fa21934cb0176560535b1ffa876bf84
Author: Greg Roelofs <ro...@yahoo-inc.com>
Date:   Fri Dec 17 16:55:14 2010 -0800

    _All_ changes related to timeout issue (phase != CLEANUP), including vast
    quantities of debug noise (which will remove again in next checkin, leaving
    only "pristine" code).

Modified:
    hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/DefaultTaskController.java
    hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/JobInProgress.java
    hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/JobTracker.java
    hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/Task.java
    hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/TaskInProgress.java
    hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/TaskStatus.java
    hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/TaskTracker.java
    hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/UberTask.java
    hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapreduce/Job.java
    hadoop/mapreduce/branches/yahoo-merge/src/test/mapred/org/apache/hadoop/mapreduce/TestMRJobClient.java

Modified: hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/DefaultTaskController.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/DefaultTaskController.java?rev=1079222&r1=1079221&r2=1079222&view=diff
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/DefaultTaskController.java (original)
+++ hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/DefaultTaskController.java Tue Mar  8 05:57:36 2011
@@ -52,6 +52,48 @@ public class DefaultTaskController exten
   private static final Log LOG = 
       LogFactory.getLog(DefaultTaskController.class);
   private FileSystem fs;
+  /**
+   * Launch a new JVM for the task.
+   * 
+   * This method launches the new JVM for the task by executing the
+   * the JVM command using the {@link Shell.ShellCommandExecutor}
+   */
+  void launchTaskJVM(TaskController.TaskControllerContext context) 
+                                      throws IOException {
+    initializeTask(context);
+
+    JvmEnv env = context.env;
+    List<String> wrappedCommand = 
+      TaskLog.captureOutAndError(env.setup, env.vargs, env.stdout, env.stderr,
+          env.logSize, true);
+    ShellCommandExecutor shexec = 
+        new ShellCommandExecutor(wrappedCommand.toArray(new String[0]), 
+                                  env.workDir, env.env);
+    // set the ShellCommandExecutor for later use.
+    context.shExec = shexec;
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  DefaultTaskController ctor: about to call ShellCommandExecutor's execute() method");
+    shexec.execute();
+  }
+    
+  /**
+   * Initialize the task environment.
+   * 
+   * Since tasks are launched as the tasktracker user itself, this
+   * method has no action to perform.
+   */
+  void initializeTask(TaskController.TaskControllerContext context) {
+    // The default task controller does not need to set up
+    // any permissions for proper execution.
+    // So this is a dummy method.
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  DefaultTaskController initializeTask(): nothing to do here");
+    return;
+  }
+
+  /*
+   * No need to do anything as we don't need to do as we dont need anything
+   * extra from what TaskTracker has done.
+   */
+
   @Override
   public void setConf(Configuration conf) {
     super.setConf(conf);

Modified: hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/JobInProgress.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/JobInProgress.java?rev=1079222&r1=1079221&r2=1079222&view=diff
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/JobInProgress.java (original)
+++ hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/JobInProgress.java Tue Mar  8 05:57:36 2011
@@ -1163,6 +1163,7 @@ public class JobInProgress {
   public synchronized void updateTaskStatus(TaskInProgress tip, 
                                             TaskStatus status) {
 
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobInProgress updateTaskStatus() starting with TIP " + tip.getTIPId() + " and TaskStatus " + status + " (phase = " + status.getPhase() + ", state = " + status.getStateString() + ", progress = " + status.getProgress() + ", runState = " + status.getRunState() + ", TT = " + status.getTaskTracker() + ", diag = " + status.getDiagnosticInfo() + ")");
     double oldProgress = tip.getProgress();   // save old progress
     boolean wasRunning = tip.isRunning();
     boolean wasComplete = tip.isComplete();
@@ -1197,6 +1198,7 @@ public class JobInProgress {
     }
     
     boolean change = tip.updateStatus(status);
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobInProgress updateTaskStatus(): just called TIP updateStatus() with status " + status + " (phase = " + status.getPhase() + "); returned change = " + change);
     if (change) {
       TaskStatus.State state = status.getRunState();
       // get the TaskTrackerStatus where the task ran 
@@ -1241,6 +1243,7 @@ public class JobInProgress {
         return;
       } else if (state == TaskStatus.State.FAILED_UNCLEAN ||
                  state == TaskStatus.State.KILLED_UNCLEAN) {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobInProgress updateTaskStatus(): task state = *_UNCLEAN; calling TIP incompleteSubTask() and adding cleanup task " + taskid);
         tip.incompleteSubTask(taskid, this.status);
         // add this task, to be rescheduled as cleanup attempt
         if (tip.isMapTask()) {
@@ -1254,6 +1257,7 @@ public class JobInProgress {
       //For a failed task update the JT datastructures. 
       else if (state == TaskStatus.State.FAILED ||
                state == TaskStatus.State.KILLED) {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobInProgress updateTaskStatus(): task state = plain FAILED/KILLED; about to call failedTask() and make new TaskCompletionEvent");
         // Get the event number for the (possibly) previously successful
         // task. If there exists one, then set that status to OBSOLETE 
         int eventNumber;
@@ -2727,6 +2731,7 @@ public class JobInProgress {
   public synchronized boolean completedTask(TaskInProgress tip, 
                                             TaskStatus status)
   {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobInProgress completedTask() starting with TIP " + tip.getTIPId() + " and TaskStatus " + status);
     TaskAttemptID taskid = status.getTaskID();
     final JobTrackerInstrumentation metrics = jobtracker.getInstrumentation();
         
@@ -2805,6 +2810,7 @@ public class JobInProgress {
       killSetupTip(!tip.isMapTask());
       setupComplete();
     } else if (tip.isJobCleanupTask()) {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobInProgress completedTask(): TIP " + tip.getTIPId() + " is a cleanup TIP");
       // cleanup task has finished. Kill the extra cleanup tip
       if (tip.isMapTask()) {
         // kill the reduce tip
@@ -2816,13 +2822,16 @@ public class JobInProgress {
       // The Job is done
       // if the job is failed, then mark the job failed.
       if (jobFailed) {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobInProgress completedTask(): calling terminateJob() with JobStatus.FAILED");
         terminateJob(JobStatus.FAILED);
       }
       // if the job is killed, then mark the job killed.
       if (jobKilled) {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobInProgress completedTask(): calling terminateJob() with JobStatus.KILLED");
         terminateJob(JobStatus.KILLED);
       }
       else {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobInProgress completedTask(): calling jobComplete() (HUH??)");
         jobComplete();
       }
       // The job has been killed/failed/successful
@@ -2864,6 +2873,7 @@ public class JobInProgress {
     if (!jobSetupCleanupNeeded && canLaunchJobCleanupTask()) {
       jobComplete();
     }
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobInProgress completedTask(): done");
     return true;
   }
 
@@ -3000,9 +3010,11 @@ public class JobInProgress {
   }
   
   private synchronized void terminateJob(int jobTerminationState) {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobInProgress terminateJob() called with jobTerminationState = " + jobTerminationState);
     if ((status.getRunState() == JobStatus.RUNNING) ||
         (status.getRunState() == JobStatus.PREP)) {
 
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobInProgress terminateJob(): runState = " + status.getRunState());
       this.finishTime = JobTracker.getClock().getTime();
       this.status.setMapProgress(1.0f);
       this.status.setReduceProgress(1.0f);
@@ -3039,6 +3051,7 @@ public class JobInProgress {
             this.conf, this.status.getJobID());
       }
     }
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobInProgress terminateJob() done");
   }
 
   /**
@@ -3122,11 +3135,13 @@ public class JobInProgress {
     while (!mapCleanupTasks.isEmpty()) {
       taskid = mapCleanupTasks.remove(0);
       tip = maps[taskid.getTaskID().getId()];
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobInProgress clearUncleanTasks(): calling updateTaskStatus() on map cleanup task " + taskid);
       updateTaskStatus(tip, tip.getTaskStatus(taskid));
     }
     while (!reduceCleanupTasks.isEmpty()) {
       taskid = reduceCleanupTasks.remove(0);
       tip = reduces[taskid.getTaskID().getId()];
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobInProgress clearUncleanTasks(): calling updateTaskStatus() on reduce cleanup task " + taskid);
       updateTaskStatus(tip, tip.getTaskStatus(taskid));
     }
   }
@@ -3410,6 +3425,7 @@ public class JobInProgress {
   public synchronized void failedTask(TaskInProgress tip, TaskAttemptID taskid,
       String reason, TaskStatus.Phase phase, TaskStatus.State state, 
                          String trackerName) {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobInProgress failedTask(): called with phase = " + phase + " (creating new TaskStatus)");
     TaskStatus status = TaskStatus.createTaskStatus(tip.isMapTask(), 
                                                     taskid,
                                                     0.0f,
@@ -3429,6 +3445,7 @@ public class JobInProgress {
     status.setStartTime(startTime);
     status.setFinishTime(JobTracker.getClock().getTime());
     boolean wasComplete = tip.isComplete();
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobInProgress failedTask(): calling updateTaskStatus()");
     updateTaskStatus(tip, status);
     boolean isComplete = tip.isComplete();
     if (wasComplete && !isComplete) { // mark a successful tip as failed

Modified: hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/JobTracker.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/JobTracker.java?rev=1079222&r1=1079221&r2=1079222&view=diff
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/JobTracker.java (original)
+++ hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/JobTracker.java Tue Mar  8 05:57:36 2011
@@ -2480,12 +2480,14 @@ public class JobTracker implements MRCon
     // Check for tasks to be killed
     List<TaskTrackerAction> killTasksList = getTasksToKill(trackerName);
     if (killTasksList != null) {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobTracker heartbeat(): found non-empty killTasksList");
       actions.addAll(killTasksList);
     }
      
     // Check for jobs to be killed/cleanedup
     List<TaskTrackerAction> killJobsList = getJobsForCleanup(trackerName);
     if (killJobsList != null) {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobTracker heartbeat(): found non-empty killJobsList");
       actions.addAll(killJobsList);
     }
 
@@ -2558,6 +2560,7 @@ public class JobTracker implements MRCon
    */
   boolean updateTaskTrackerStatus(String trackerName,
                                           TaskTrackerStatus status) {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobTracker updateTaskTrackerStatus(): starting");
     TaskTracker tt = getTaskTracker(trackerName);
     TaskTrackerStatus oldStatus = (tt == null) ? null : tt.getStatus();
     if (oldStatus != null) {
@@ -2616,7 +2619,7 @@ public class JobTracker implements MRCon
       taskTracker.setStatus(status);
       taskTrackers.put(trackerName, taskTracker);
       
-      if (LOG.isDebugEnabled()) {
+      if (true/* GRR DEBUG LOG.isDebugEnabled()*/) {
         int runningMaps = 0, runningReduces = 0;
         int commitPendingMaps = 0, commitPendingReduces = 0;
         int unassignedMaps = 0, unassignedReduces = 0;
@@ -2640,7 +2643,7 @@ public class JobTracker implements MRCon
             else { ++miscReduces; } 
           }
         }
-        LOG.debug(trackerName + ": Status -" +
+        LOG.info/* GRR DEBUG .debug */(trackerName + ": Status -" +
                   " running(m) = " + runningMaps + 
                   " unassigned(m) = " + unassignedMaps + 
                   " commit_pending(m) = " + commitPendingMaps +
@@ -3952,9 +3955,11 @@ public class JobTracker implements MRCon
    */
   void updateTaskStatuses(TaskTrackerStatus status) {
     String trackerName = status.getTrackerName();
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobTracker updateTaskStatuses(): starting (TT = " + trackerName + ")");
     for (TaskStatus report : status.getTaskReports()) {
       report.setTaskTracker(trackerName);
       TaskAttemptID taskId = report.getTaskID();
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobTracker updateTaskStatuses(): processing TaskStatus for task ID " + taskId + " with phase (from TT) = " + report.getPhase());
       
       // expire it
       expireLaunchingTasks.removeTask(taskId);
@@ -3968,12 +3973,14 @@ public class JobTracker implements MRCon
             jobs = new HashSet<JobID>();
             trackerToJobsToCleanup.put(trackerName, jobs);
           }
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobTracker updateTaskStatuses(): adding job ID " + taskId.getJobID() + " to job-cleanup list");
           jobs.add(taskId.getJobID());
         }
         continue;
       }
       
       if (!job.inited()) {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobTracker updateTaskStatuses(): job " + job + " not initialized; killing attempt");
         // if job is not yet initialized ... kill the attempt
         synchronized (trackerToTasksToCleanup) {
           Set<TaskAttemptID> tasks = trackerToTasksToCleanup.get(trackerName);
@@ -3981,6 +3988,7 @@ public class JobTracker implements MRCon
             tasks = new HashSet<TaskAttemptID>();
             trackerToTasksToCleanup.put(trackerName, tasks);
           }
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobTracker updateTaskStatuses(): adding task ID " + taskId + " to task-cleanup list");
           tasks.add(taskId);
         }
         continue;
@@ -3995,11 +4003,13 @@ public class JobTracker implements MRCon
         // or TaskInProgress can modify this object and
         // the changes should not get reflected in TaskTrackerStatus.
         // An old TaskTrackerStatus is used later in countMapTasks, etc.
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobTracker updateTaskStatuses(): found TIP; calling updateTaskStatus() on " + taskId);
         job.updateTaskStatus(tip, (TaskStatus)report.clone());
         JobStatus newStatus = (JobStatus)job.getStatus().clone();
         
         // Update the listeners if an incomplete job completes
         if (prevStatus.getRunState() != newStatus.getRunState()) {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobTracker updateTaskStatuses(): creating JobStatusChangeEvent");
           JobStatusChangeEvent event = 
             new JobStatusChangeEvent(job, EventType.RUN_STATE_CHANGED, 
                                      prevStatus, newStatus);
@@ -4017,6 +4027,7 @@ public class JobTracker implements MRCon
           TaskInProgress failedFetchMap = taskidToTIPMap.get(mapTaskId);
           
           if (failedFetchMap != null) {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  JobTracker updateTaskStatuses(): doing 'failed fetch' stuff");
             // Gather information about the map which has to be failed, if need be
             String failedFetchTrackerName = getAssignedTracker(mapTaskId);
             if (failedFetchTrackerName == null) {

Modified: hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/Task.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/Task.java?rev=1079222&r1=1079221&r2=1079222&view=diff
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/Task.java (original)
+++ hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/Task.java Tue Mar  8 05:57:36 2011
@@ -319,6 +319,7 @@ abstract public class Task implements Wr
   }
 
   void setTaskCleanupTask() {
+System.out.println("GRR DEBUG:  Task setTaskCleanupTask(): setting taskCleanup = true (=> phase = CLEANUP on deserialization!)");
     taskCleanup = true;
   }
 	   
@@ -428,6 +429,7 @@ abstract public class Task implements Wr
     writeSkipRecs = in.readBoolean();
     taskCleanup = in.readBoolean();
     if (taskCleanup) {
+System.out.println("GRR DEBUG:  Task readFields(): setting phase to CLEANUP");
       setPhase(TaskStatus.Phase.CLEANUP);
     }
     user = Text.readString(in);
@@ -859,11 +861,11 @@ abstract public class Task implements Wr
                    TaskReporter reporter
                    ) throws IOException, InterruptedException {
     if (isUberTask()) {
-      LOG.info("UberTask:" + taskIdForUmbilical + " subtask:" + taskId
-               + "is done and is in the process of committing.");
+      LOG.info("UberTask '" + taskIdForUmbilical + "' subtask '" + taskId
+               + "' is done and is in the process of committing.");
     } else {
-      LOG.info("Task:" + taskId
-               + "is done and is in the process of committing.");
+      LOG.info("Task '" + taskId
+               + "' is done and is in the process of committing.");
     }
     updateCounters();
 
@@ -1013,6 +1015,7 @@ abstract public class Task implements Wr
       try {
         while (!umbilical.canCommit(taskIdForUmbilical)) {
           try {
+System.out.println("GRR DEBUG:  Task commitAfterApproval(): TT canCommit() returned false; sleeping 1 sec");
             // FIXME 1:  shouldn't this count down retries, too, in case JT glitched and no longer knows about us?  (else infinite loop)
             Thread.sleep(1000);  // FIXME 2:  shouldn't hardcoded 1-second sleep instead correspond to heartbeat interval for task?
           } catch(InterruptedException ie) {
@@ -1020,8 +1023,10 @@ abstract public class Task implements Wr
           }
           reporter.setProgressFlag();
         }
+System.out.println("GRR DEBUG:  Task commitAfterApproval(): TT canCommit() returned true");
         break;
       } catch (IOException ie) {
+System.out.println("GRR DEBUG:  Task commitAfterApproval(): TT canCommit() threw exception");
         LOG.warn("Failure asking whether task can commit: " +
             StringUtils.stringifyException(ie));
         if (--retries == 0) {
@@ -1039,6 +1044,7 @@ abstract public class Task implements Wr
   // this is protected (rather than private) solely for UberTask map-only case
   protected void commit(TaskUmbilicalProtocol umbilical,
                         TaskReporter reporter) throws IOException {
+System.out.println("GRR DEBUG:  Task commit(): about to call commitTask()");
     try {
       LOG.info("Task " + taskId + " is allowed to commit now");
       committer.commitTask(taskContext);
@@ -1072,6 +1078,7 @@ abstract public class Task implements Wr
   void taskCleanup(TaskUmbilicalProtocol umbilical) 
   throws IOException {
     // set phase for this task
+System.out.println("GRR DEBUG:  Task taskCleanup(): setting phase to CLEANUP");
     setPhase(TaskStatus.Phase.CLEANUP);
     getProgress().setStatus("cleanup");
     statusUpdate(umbilical);
@@ -1084,6 +1091,7 @@ abstract public class Task implements Wr
                                TaskReporter reporter
                               ) throws IOException, InterruptedException {
     // set phase for this task
+System.out.println("GRR DEBUG:  Task runJobCleanupTask(): setting phase to CLEANUP");
     setPhase(TaskStatus.Phase.CLEANUP);
     getProgress().setStatus("cleanup");
     statusUpdate(umbilical);

Modified: hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/TaskInProgress.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/TaskInProgress.java?rev=1079222&r1=1079221&r2=1079222&view=diff
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/TaskInProgress.java (original)
+++ hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/TaskInProgress.java Tue Mar  8 05:57:36 2011
@@ -718,7 +718,7 @@ class TaskInProgress {
         TaskStatus.State oldState = oldStatus.getRunState();
         TaskStatus.State newState = status.getRunState();
           
-        // We should never recieve a duplicate success/failure/killed
+        // We should never receive a duplicate success/failure/killed
         // status update for the same taskid! This is a safety check, 
         // and is addressed better at the TaskTracker to ensure this.
         // @see {@link TaskTracker.transmitHeartbeat()}
@@ -728,7 +728,7 @@ class TaskInProgress {
              newState != TaskStatus.State.KILLED_UNCLEAN && 
              newState != TaskStatus.State.UNASSIGNED) && 
             (oldState == newState)) {
-          LOG.warn("Recieved duplicate status update of '" + newState + 
+          LOG.warn("Received duplicate status update of '" + newState + 
                    "' for '" + taskid + "' of TIP '" + getTIPId() + "'" +
                    "oldTT=" + oldStatus.getTaskTracker() + 
                    " while newTT=" + status.getTaskTracker());
@@ -781,6 +781,7 @@ class TaskInProgress {
           setProgressRate(currProgRate);
         }
       } else {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TaskInProgress updateStatus(): calling TaskStatus statusUpdate() for " + taskid + " with phase " + status.getPhase());
         taskStatuses.get(taskid).statusUpdate(status.getRunState(),
                                               status.getProgress(), status.getStateString(), status.getPhase(),
                                               status.getFinishTime());

Modified: hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/TaskStatus.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/TaskStatus.java?rev=1079222&r1=1079221&r2=1079222&view=diff
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/TaskStatus.java (original)
+++ hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/TaskStatus.java Tue Mar  8 05:57:36 2011
@@ -95,7 +95,9 @@ public abstract class TaskStatus impleme
   public TaskStatus(TaskAttemptID taskid, float progress, int numSlots,
                     State runState, String diagnosticInfo,
                     String stateString, String taskTracker,
-                    Phase phase, Counters counters) {
+                    Phase phase, Counters counters)
+//throws IOException
+ {
     this.taskid = taskid;
     this.progress = progress;
     this.numSlots = numSlots;
@@ -103,9 +105,11 @@ public abstract class TaskStatus impleme
     setDiagnosticInfo(diagnosticInfo);
     setStateString(stateString);
     this.taskTracker = taskTracker;
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TaskStatus ctor: creating with runState = " + runState + " and phase = " + phase);
     this.phase = phase;
     this.counters = counters;
     this.includeAllCounters = true;
+//if (runState == TaskStatus.State.FAILED_UNCLEAN && phase == TaskStatus.Phase.MAP) { throw new IOException("GRR DEBUG:  TaskStatus ctor:  WHO'S THE LAMER THAT CALLED US???"); }
   }
   
   public TaskAttemptID getTaskID() { return taskid; }
@@ -307,6 +311,7 @@ public abstract class TaskStatus impleme
       }else if (phase == TaskStatus.Phase.REDUCE){
         setSortFinishTime(System.currentTimeMillis());
       }
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TaskStatus setPhase(): changing phase from " + getPhase() + " to " + phase);
       this.phase = phase;
     }
   }
@@ -408,6 +413,7 @@ public abstract class TaskStatus impleme
       this.setFinishTime(status.getFinishTime()); 
     }
     
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TaskStatus statusUpdate() #2: changing phase from " + getPhase() + " to " + status.getPhase());
     this.phase = status.getPhase();
     this.counters = status.getCounters();
     this.outputSize = status.outputSize;
@@ -433,6 +439,7 @@ public abstract class TaskStatus impleme
     setRunState(runState);
     setProgress(progress);
     setStateString(state);
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TaskStatus statusUpdate() #3: changing phase from " + getPhase() + " to " + phase);
     setPhase(phase);
     if (finishTime > 0) {
       setFinishTime(finishTime); 

Modified: hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/TaskTracker.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/TaskTracker.java?rev=1079222&r1=1079221&r2=1079222&view=diff
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/TaskTracker.java (original)
+++ hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/TaskTracker.java Tue Mar  8 05:57:36 2011
@@ -424,10 +424,12 @@ public class TaskTracker 
       if (!runningJobs.containsKey(jobId)) {
         rJob = new RunningJob(jobId);
         rJob.tasks = new HashSet<TaskInProgress>();
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TaskTracker addTaskToJob(): adding running job " + jobId + " to runningJobs");
         runningJobs.put(jobId, rJob);
       } else {
         rJob = runningJobs.get(jobId);
       }
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TaskTracker addTaskToJob(): adding Task/TIP " + tip.getTask().getTaskID() + " to runningJob " + jobId);
       synchronized (rJob) {
         rJob.tasks.add(tip);
       }
@@ -441,6 +443,7 @@ public class TaskTracker 
       if (rjob == null) {
         LOG.warn("Unknown job " + jobId + " being deleted.");
       } else {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TaskTracker removeTaskFromJob(): removing Task/TIP " + tip.getTask().getTaskID() + " from runningJob " + jobId);
         synchronized (rjob) {
           rjob.tasks.remove(tip);
         }
@@ -1670,6 +1673,7 @@ public class TaskTracker 
     //
     if (status == null) {
       synchronized (this) {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TaskTracker transmitHeartBeat(): regenerating TaskTrackerStatus");
         status = new TaskTrackerStatus(taskTrackerName, localHostname, 
                                        httpPort, 
                                        cloneAndResetRunningTaskStatuses(
@@ -1737,6 +1741,7 @@ public class TaskTracker 
     //
     // Xmit the heartbeat
     //
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TaskTracker transmitHeartBeat(): sending heartbeat");
     HeartbeatResponse heartbeatResponse = jobClient.heartbeat(status, 
                                                               justStarted,
                                                               justInited,
@@ -1750,6 +1755,7 @@ public class TaskTracker 
       
     synchronized (this) {
       for (TaskStatus taskStatus : status.getTaskReports()) {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TaskTracker transmitHeartBeat(): looping over TaskStatuses (task reports): Task/TIP " + taskStatus.getTaskID() + " runState = " + taskStatus.getRunState() + ", phase = " + taskStatus.getPhase());
         if (taskStatus.getRunState() != TaskStatus.State.RUNNING &&
             taskStatus.getRunState() != TaskStatus.State.UNASSIGNED &&
             taskStatus.getRunState() != TaskStatus.State.COMMIT_PENDING &&
@@ -1764,6 +1770,7 @@ public class TaskTracker 
           } catch (Exception e) {
             LOG.warn("Caught: " + StringUtils.stringifyException(e));
           }
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TaskTracker transmitHeartBeat(): removing Task/TIP " + taskStatus.getTaskID() + " from runningTasks (runState = " + taskStatus.getRunState() + ")");
           runningTasks.remove(taskStatus.getTaskID());
         }
       }
@@ -1989,6 +1996,7 @@ public class TaskTracker 
       }
     }
 
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TaskTracker addTaskToJob(): removing running job " + jobId + " from runningJobs");
     synchronized(runningJobs) {
       runningJobs.remove(jobId);
     }
@@ -2112,6 +2120,7 @@ public class TaskTracker 
         }
       }
     }
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TaskTracker findTaskToKill(): returning TIP " + killMe.getTask().getTaskID());
     return killMe;
   }
 
@@ -2305,6 +2314,7 @@ public class TaskTracker 
     LOG.info("LaunchTaskAction (registerTask): " + t.getTaskID() +
              " task's state:" + t.getState());
     TaskInProgress tip = new TaskInProgress(t, this.fConf, launcher);
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TaskTracker registerTask(): adding Task/TIP " + t.getTaskID() + " to runningTasks");
     synchronized (this) {
       tasks.put(t.getTaskID(), tip);
       runningTasks.put(t.getTaskID(), tip);
@@ -2500,7 +2510,10 @@ public class TaskTracker 
             task.getTaskID(), 0.0f,
             task.getNumSlotsRequired(), task.getState(),
             diagnosticInfo.toString(), "initializing", getName(),
-            TaskStatus.Phase.MAP, task.getCounters());
+            task.isTaskCleanupTask()
+              ? TaskStatus.Phase.CLEANUP  // JT reruns failed task as cleanup
+              : TaskStatus.Phase.MAP,
+            task.getCounters());
       } else {
         taskStatus = TaskStatus.createTaskStatus(
             task.isMapTask(), task.getTaskID(), 0.0f,
@@ -2633,6 +2646,7 @@ public class TaskTracker 
         return;
       }
       
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TaskTracker reportProgress(): calling TaskStatus statusUpdate() for " + task.getTaskID() + " with phase " + taskStatus.getPhase());
       this.taskStatus.statusUpdate(taskStatus);
       this.lastProgressReport = System.currentTimeMillis();
     }
@@ -3033,6 +3047,7 @@ public class TaskTracker 
         taskStatus.setProgress(0.0f);
         reportDiagnosticInfo("Map output lost, rescheduling: " + 
                              failure);
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TaskTracker mapOutputLost(): adding Task/TIP " + task.getTaskID() + " to runningTasks");
         runningTasks.put(task.getTaskID(), this);
         mapTotal++;
         myInstrumentation.statusUpdate(task, taskStatus);
@@ -3410,6 +3425,7 @@ public class TaskTracker 
       }
       result.add((TaskStatus)status.clone());
       status.clearStatus();
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TaskTracker cloneAndResetRunningTaskStatuses(): Task/TIP " + status.getTaskID() + " runState = " + status.getRunState() + ", phase = " + status.getPhase());
     }
     return result;
   }

Modified: hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/UberTask.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/UberTask.java?rev=1079222&r1=1079221&r2=1079222&view=diff
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/UberTask.java (original)
+++ hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapred/UberTask.java Tue Mar  8 05:57:36 2011
@@ -59,6 +59,7 @@ class UberTask extends Task {
   public UberTask() {
     super();
     this.taskStatus = new UberTaskStatus();
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  UberTask plain ctor finished");
   }
 
   public UberTask(String jobFile, TaskAttemptID taskId, int partition,
@@ -74,9 +75,10 @@ class UberTask extends Task {
                                          "", "", "", TaskStatus.Phase.MAP,
                                          getCounters());
     if (LOG.isDebugEnabled()) {
-      LOG.debug("UberTask " + taskId + " constructed with " + numMapTasks
+      LOG.debug("UberTask " + getTaskID() + " constructed with " + numMapTasks
                 + " sub-maps and " + numReduceTasks + " sub-reduces");
     }
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  UberTask normal ctor finished (" + getTaskID() + " constructed with " + numMapTasks + " sub-maps and " + numReduceTasks + " sub-reduces)");
   }
 
   @Override
@@ -106,6 +108,8 @@ class UberTask extends Task {
   throws IOException, ClassNotFoundException, InterruptedException {
     this.umbilical = umbilical;
 
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  UberTask run(): starting");
+
     // set up two-level Progress/phase tree:  getProgress() is root ("uber"),
     // and subtasks' "root node" Progress is second level (will override
     // native one when construct each subtask)
@@ -187,16 +191,20 @@ class UberTask extends Task {
     // clean up the job (switch phase to "cleanup" and delete staging dir, but
     // do NOT delete temp dir yet)
     if (jobSetupCleanupNeeded) {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  UberTask run(): about to call runCommitAbortJob() (=> switch phase to 'cleanup' and delete staging dir but NOT temp dir)");
       runCommitAbortJob(reporter);
     }
 
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  UberTask run(): about to call done() (=> commitTask() or abortTask())");
     // this is where commitTask() (or abortTask()) is called
     done(umbilical, reporter);
 
     // now finish cleaning up the job (delete temp dir:  results are committed)
     if (jobSetupCleanupNeeded) {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  UberTask run(): about to call commitJob() (=> delete temp dir)");
       commitJob();
     }
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  UberTask run(): totally done");
   }
 
   private TaskAttemptID[] createMapIds() {
@@ -431,6 +439,7 @@ class UberTask extends Task {
         splits[j] = splitIndex;
       }
     }
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  UberTask readFields() finished (" + getTaskID() + " restored with " + numMapTasks + " sub-maps and " + numReduceTasks + " sub-reduces; phase = " + getPhase() + ")");
   }
 
   /**

Modified: hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapreduce/Job.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapreduce/Job.java?rev=1079222&r1=1079221&r2=1079222&view=diff
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapreduce/Job.java (original)
+++ hadoop/mapreduce/branches/yahoo-merge/src/java/org/apache/hadoop/mapreduce/Job.java Tue Mar  8 05:57:36 2011
@@ -1118,7 +1118,11 @@ System.out.println("GRR DEBUG:  Job getC
     IntegerRanges reduceRanges = getProfileTaskRange(false);
     int progMonitorPollIntervalMillis = 
       Job.getProgressPollInterval(clientConf);
+//GRR DEBUG ONLY!
+long numIters_GRR_DEBUG = 0;
     while (!isComplete()) {
+++numIters_GRR_DEBUG;
+if (numIters_GRR_DEBUG < 10 || (numIters_GRR_DEBUG % 10) == 0) { System.out.println("GRR DEBUG:  Job monitorAndPrintJob(): while-loop iteration #" + numIters_GRR_DEBUG); }
       Thread.sleep(progMonitorPollIntervalMillis);
       String report = 
         (" map " + StringUtils.formatPercent(mapProgress(), 0)+
@@ -1133,6 +1137,7 @@ System.out.println("GRR DEBUG:  Job getC
         getTaskCompletionEvents(eventCounter, 10); 
       eventCounter += events.length;
       printTaskEvents(events, filter, profiling, mapRanges, reduceRanges);
+if (numIters_GRR_DEBUG > 40) { throw new InterruptedException(); } // then check TEST*.txt file and/or build/test/logs/.../* for additional logged clues
     }
     LOG.info("Job complete: " + jobId);
     Counters counters = getCounters();

Modified: hadoop/mapreduce/branches/yahoo-merge/src/test/mapred/org/apache/hadoop/mapreduce/TestMRJobClient.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/test/mapred/org/apache/hadoop/mapreduce/TestMRJobClient.java?rev=1079222&r1=1079221&r2=1079222&view=diff
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/test/mapred/org/apache/hadoop/mapreduce/TestMRJobClient.java (original)
+++ hadoop/mapreduce/branches/yahoo-merge/src/test/mapred/org/apache/hadoop/mapreduce/TestMRJobClient.java Tue Mar  8 05:57:36 2011
@@ -64,8 +64,10 @@ public class TestMRJobClient extends Clu
     }
   }
 
+/* GRR TEMP HACK
   @Test
   public void testJobClient() throws Exception {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TestMRJobClient testJobClient(): starting");
     Configuration conf = createJobConf();
     Job job = runJob(conf);
     String jobId = job.getJobID().toString();
@@ -77,6 +79,7 @@ public class TestMRJobClient extends Clu
   @Test
   public void testGetCounter(String jobId,
       Configuration conf) throws Exception {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TestMRJobClient testGetCounter(): starting");
     ByteArrayOutputStream out = new ByteArrayOutputStream();
     int exitCode = runTool(conf, createJobClient(),
         new String[] { "-counter", jobId,
@@ -89,6 +92,7 @@ public class TestMRJobClient extends Clu
   @Test
   public void testJobList(String jobId,
       Configuration conf) throws Exception {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TestMRJobClient testJobList(): starting");
     verifyJobPriority(jobId, "HIGH", conf, createJobClient());
   }
 
@@ -116,22 +120,26 @@ public class TestMRJobClient extends Clu
   @Test
   public void testChangingJobPriority(String jobId, Configuration conf)
       throws Exception {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TestMRJobClient testChangingJobPriority(): starting");
     int exitCode = runTool(conf, createJobClient(),
         new String[] { "-set-priority", jobId, "VERY_LOW" },
         new ByteArrayOutputStream());
     assertEquals("Exit code", 0, exitCode);
     verifyJobPriority(jobId, "VERY_LOW", conf, createJobClient());
   }
+END GRR TEMP HACK */
 
   @Test
   public void testMissingProfileOutput() throws Exception {
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TestMRJobClient testMissingProfileOutput(): starting");
     Configuration conf = createJobConf();
     final String input = "hello1\n";
 
     // Set a job to be profiled with an empty agentlib parameter.
-    // This will fail to create profile.out files for tasks.
-    // This will succeed by skipping the HTTP fetch of the
-    // profiler output.
+    // This will fail to create profile.out files for tasks (because
+    // child JVM will refuse to launch).
+    // This subtest will nevertheless succeed by skipping the HTTP
+    // fetch of the profiler output.
     Job job = MapReduceTestUtil.createJob(conf,
         getInputDir(), getOutputDir(), 1, 1, input);
     job.setJobName("disable-profile-fetch");
@@ -141,9 +149,11 @@ public class TestMRJobClient extends Clu
     job.setMaxReduceAttempts(1);
     job.setJobSetupCleanupNeeded(false);
     job.waitForCompletion(true);
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TestMRJobClient testMissingProfileOutput(): done with job 1; BAILING (TEMP HACK)"); if (job != null) return;
 
-    // Run another job with an hprof agentlib param; verify
-    // that the HTTP fetch works here.
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TestMRJobClient testMissingProfileOutput(): done with job 1; about to start job 2");
+    // Run another job with a (valid) hprof agentlib param; verify
+    // that the HTTP fetch (of task reports) works here.
     Job job2 = MapReduceTestUtil.createJob(conf,
         getInputDir(), getOutputDir(), 1, 1, input);
     job2.setJobName("enable-profile-fetch");
@@ -158,6 +168,7 @@ public class TestMRJobClient extends Clu
     job2.setJobSetupCleanupNeeded(false);
     job2.waitForCompletion(true);
 
+System.out.println("GRR DEBUG (" + String.format("%1$tF %1$tT,%1$tL", System.currentTimeMillis()) + "):  TestMRJobClient testMissingProfileOutput(): done with job 2; getting final task reports, etc.");
     // Find the first map task, verify that we got its profile output file.
     TaskReport [] reports = job2.getTaskReports(TaskType.MAP);
     assertTrue("No task reports found!", reports.length > 0);