You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by ac...@apache.org on 2011/03/17 21:21:54 UTC

svn commit: r1082677 [14/38] - in /hadoop/mapreduce/branches/MR-279: ./ assembly/ ivy/ mr-client/ mr-client/hadoop-mapreduce-client-app/ mr-client/hadoop-mapreduce-client-app/src/ mr-client/hadoop-mapreduce-client-app/src/main/ mr-client/hadoop-mapredu...

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/package-info.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/task/package-info.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/task/package-info.java:776175-785643

Copied: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/EventFetcher.java (from r1082666, hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/task/reduce/EventFetcher.java)
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/EventFetcher.java?p2=hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/EventFetcher.java&p1=hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/task/reduce/EventFetcher.java&r1=1082666&r2=1082677&rev=1082677&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/task/reduce/EventFetcher.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/EventFetcher.java Thu Mar 17 20:21:13 2011
@@ -24,7 +24,6 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapred.MapTaskCompletionEventsUpdate;
 import org.apache.hadoop.mapred.TaskCompletionEvent;
-import org.apache.hadoop.mapred.TaskTracker;
 import org.apache.hadoop.mapred.TaskUmbilicalProtocol;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
 

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/EventFetcher.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/task/reduce/EventFetcher.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/task/reduce/EventFetcher.java:776175-785643

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ExceptionReporter.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/task/reduce/ExceptionReporter.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/task/reduce/ExceptionReporter.java:776175-785643

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java:776175-785643

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/InMemoryReader.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/task/reduce/InMemoryReader.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/task/reduce/InMemoryReader.java:776175-785643

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/InMemoryWriter.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/task/reduce/InMemoryWriter.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/task/reduce/InMemoryWriter.java:776175-785643

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MapHost.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/task/reduce/MapHost.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/task/reduce/MapHost.java:776175-785643

Copied: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MapOutput.java (from r1082666, hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/task/reduce/MapOutput.java)
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MapOutput.java?p2=hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MapOutput.java&p1=hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/task/reduce/MapOutput.java&r1=1082666&r2=1082677&rev=1082677&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/task/reduce/MapOutput.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MapOutput.java Thu Mar 17 20:21:13 2011
@@ -30,7 +30,8 @@ import org.apache.hadoop.fs.LocalDirAllo
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.BoundedByteArrayOutputStream;
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.TaskTracker;
+import org.apache.hadoop.mapred.MapOutputFile;
+import org.apache.hadoop.mapred.Task;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
 
 class MapOutput<K,V> {
@@ -63,7 +64,7 @@ class MapOutput<K,V> {
   private final boolean primaryMapOutput;
   
   MapOutput(TaskAttemptID mapId, MergeManager<K,V> merger, long size, 
-            JobConf conf, LocalDirAllocator localDirAllocator,
+            JobConf conf, MapOutputFile mapOutputFile,
             int fetcher, boolean primaryMapOutput)  throws IOException {
     this.id = ID.incrementAndGet();
     this.mapId = mapId;
@@ -77,18 +78,23 @@ class MapOutput<K,V> {
     this.size = size;
     
     this.localFS = FileSystem.getLocal(conf);
-    String filename = "map_" + mapId.getTaskID().getId() + ".out";
-    String tmpOutput = Path.SEPARATOR +
-                            TaskTracker.getJobCacheSubdir(conf.getUser()) +
-                       Path.SEPARATOR + mapId.getJobID() +
-                       Path.SEPARATOR + merger.getReduceId() +
-                       Path.SEPARATOR + "output" + 
-                       Path.SEPARATOR + filename + 
-                       "." + fetcher; 
-
-    tmpOutputPath = 
-      localDirAllocator.getLocalPathForWrite(tmpOutput, size, conf);
-    outputPath = new Path(tmpOutputPath.getParent(), filename);
+//    String filename = "map_" + mapId.getTaskID().getId() + ".out";
+//    String tmpOutput = Path.SEPARATOR +
+//                            TaskTracker.getJobCacheSubdir(conf.getUser()) +
+//                       Path.SEPARATOR + mapId.getJobID() +
+//                       Path.SEPARATOR + merger.getReduceId() +
+//                       Path.SEPARATOR + "output" + 
+//                       Path.SEPARATOR + filename + 
+//                       "." + fetcher; 
+//
+//    tmpOutputPath = 
+//      localDirAllocator.getLocalPathForWrite(tmpOutput, size, conf);
+//    outputPath = new Path(tmpOutputPath.getParent(), filename);
+    
+    outputPath =
+        mapOutputFile.getInputFileForWrite(mapId.getTaskID(),size);
+    tmpOutputPath = outputPath.suffix(String.valueOf(fetcher));
+    
     disk = localFS.create(tmpOutputPath);
     
     this.primaryMapOutput = primaryMapOutput;

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MapOutput.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/task/reduce/MapOutput.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/task/reduce/MapOutput.java:776175-785643

Copied: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MergeManager.java (from r1082666, hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/task/reduce/MergeManager.java)
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MergeManager.java?p2=hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MergeManager.java&p1=hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/task/reduce/MergeManager.java&r1=1082666&r2=1082677&rev=1082677&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/task/reduce/MergeManager.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MergeManager.java Thu Mar 17 20:21:13 2011
@@ -225,7 +225,7 @@ public class MergeManager<K, V> {
                " is greater than maxSingleShuffleLimit (" + 
                maxSingleShuffleLimit + ")");
       return new MapOutput<K,V>(mapId, this, requestedSize, jobConf, 
-                                localDirAllocator, fetcher, true);
+                                mapOutputFile, fetcher, true);
     }
     
     // Stall shuffle if we are above the memory limit

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MergeManager.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/task/reduce/MergeManager.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/task/reduce/MergeManager.java:776175-785643

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MergeThread.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/task/reduce/MergeThread.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/task/reduce/MergeThread.java:776175-785643

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java:776175-785643

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleClientMetrics.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleClientMetrics.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/task/reduce/ShuffleClientMetrics.java:776175-785643

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleHeader.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleHeader.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/task/reduce/ShuffleHeader.java:776175-785643

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleScheduler.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleScheduler.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/task/reduce/ShuffleScheduler.java:776175-785643

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/package-info.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/task/reduce/package-info.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/task/reduce/package-info.java:776175-785643

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/tools/CLI.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/tools/CLI.java:776175-785643

Copied: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ConfigUtil.java (from r1082666, hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/util/ConfigUtil.java)
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ConfigUtil.java?p2=hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ConfigUtil.java&p1=hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/util/ConfigUtil.java&r1=1082666&r2=1082677&rev=1082677&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/util/ConfigUtil.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ConfigUtil.java Thu Mar 17 20:21:13 2011
@@ -23,8 +23,6 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.MRJobConfig;
-import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
-import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
 
 /**
  * Place holder for deprecated keys in the framework 
@@ -57,123 +55,6 @@ public class ConfigUtil {
     Configuration.addDeprecation("mapred.acls.enabled", 
         new String[] {MRConfig.MR_ACLS_ENABLED});
 
-    Configuration.addDeprecation("mapred.cluster.max.map.memory.mb", 
-      new String[] {JTConfig.JT_MAX_MAPMEMORY_MB});
-    Configuration.addDeprecation("mapred.cluster.max.reduce.memory.mb", 
-      new String[] {JTConfig.JT_MAX_REDUCEMEMORY_MB});
-
-    Configuration.addDeprecation("mapred.cluster.average.blacklist.threshold", 
-      new String[] {JTConfig.JT_AVG_BLACKLIST_THRESHOLD});
-    Configuration.addDeprecation("hadoop.job.history.location", 
-      new String[] {JTConfig.JT_JOBHISTORY_LOCATION});
-    Configuration.addDeprecation(
-      "mapred.job.tracker.history.completed.location", 
-      new String[] {JTConfig.JT_JOBHISTORY_COMPLETED_LOCATION});
-    Configuration.addDeprecation("mapred.jobtracker.job.history.block.size", 
-      new String[] {JTConfig.JT_JOBHISTORY_BLOCK_SIZE});
-    Configuration.addDeprecation("mapred.job.tracker.jobhistory.lru.cache.size", 
-      new String[] {JTConfig.JT_JOBHISTORY_CACHE_SIZE});
-    Configuration.addDeprecation("mapred.hosts", 
-      new String[] {JTConfig.JT_HOSTS_FILENAME});
-    Configuration.addDeprecation("mapred.hosts.exclude", 
-      new String[] {JTConfig.JT_HOSTS_EXCLUDE_FILENAME});
-    Configuration.addDeprecation("mapred.system.dir", 
-      new String[] {JTConfig.JT_SYSTEM_DIR});
-    Configuration.addDeprecation("mapred.max.tracker.blacklists", 
-      new String[] {JTConfig.JT_MAX_TRACKER_BLACKLISTS});
-    Configuration.addDeprecation("mapred.job.tracker", 
-      new String[] {JTConfig.JT_IPC_ADDRESS});
-    Configuration.addDeprecation("mapred.job.tracker.http.address", 
-      new String[] {JTConfig.JT_HTTP_ADDRESS});
-    Configuration.addDeprecation("mapred.job.tracker.handler.count", 
-      new String[] {JTConfig.JT_IPC_HANDLER_COUNT});
-    Configuration.addDeprecation("mapred.jobtracker.restart.recover", 
-      new String[] {JTConfig.JT_RESTART_ENABLED});
-    Configuration.addDeprecation("mapred.jobtracker.taskScheduler", 
-      new String[] {JTConfig.JT_TASK_SCHEDULER});
-    Configuration.addDeprecation(
-      "mapred.jobtracker.taskScheduler.maxRunningTasksPerJob", 
-      new String[] {JTConfig.JT_RUNNINGTASKS_PER_JOB});
-    Configuration.addDeprecation("mapred.jobtracker.instrumentation", 
-      new String[] {JTConfig.JT_INSTRUMENTATION});
-    Configuration.addDeprecation("mapred.jobtracker.maxtasks.per.job", 
-      new String[] {JTConfig.JT_TASKS_PER_JOB});
-    Configuration.addDeprecation("mapred.heartbeats.in.second", 
-      new String[] {JTConfig.JT_HEARTBEATS_IN_SECOND});
-    Configuration.addDeprecation("mapred.job.tracker.persist.jobstatus.active", 
-      new String[] {JTConfig.JT_PERSIST_JOBSTATUS});
-    Configuration.addDeprecation("mapred.job.tracker.persist.jobstatus.hours", 
-      new String[] {JTConfig.JT_PERSIST_JOBSTATUS_HOURS});
-    Configuration.addDeprecation("mapred.job.tracker.persist.jobstatus.dir", 
-      new String[] {JTConfig.JT_PERSIST_JOBSTATUS_DIR});
-    Configuration.addDeprecation("mapred.permissions.supergroup", 
-      new String[] {MRConfig.MR_SUPERGROUP});
-    Configuration.addDeprecation("mapreduce.jobtracker.permissions.supergroup",
-        new String[] {MRConfig.MR_SUPERGROUP});
-    Configuration.addDeprecation("mapred.task.cache.levels", 
-      new String[] {JTConfig.JT_TASKCACHE_LEVELS});
-    Configuration.addDeprecation("mapred.jobtracker.taskalloc.capacitypad", 
-      new String[] {JTConfig.JT_TASK_ALLOC_PAD_FRACTION});
-    Configuration.addDeprecation("mapred.jobinit.threads", 
-      new String[] {JTConfig.JT_JOBINIT_THREADS});
-    Configuration.addDeprecation("mapred.tasktracker.expiry.interval", 
-      new String[] {JTConfig.JT_TRACKER_EXPIRY_INTERVAL});
-    Configuration.addDeprecation("mapred.job.tracker.retiredjobs.cache.size", 
-      new String[] {JTConfig.JT_RETIREJOB_CACHE_SIZE});
-    Configuration.addDeprecation("mapred.job.tracker.retire.jobs", 
-      new String[] {JTConfig.JT_RETIREJOBS});
-    Configuration.addDeprecation("mapred.healthChecker.interval", 
-      new String[] {TTConfig.TT_HEALTH_CHECKER_INTERVAL});
-    Configuration.addDeprecation("mapred.healthChecker.script.args", 
-      new String[] {TTConfig.TT_HEALTH_CHECKER_SCRIPT_ARGS});
-    Configuration.addDeprecation("mapred.healthChecker.script.path", 
-      new String[] {TTConfig.TT_HEALTH_CHECKER_SCRIPT_PATH});
-    Configuration.addDeprecation("mapred.healthChecker.script.timeout", 
-      new String[] {TTConfig.TT_HEALTH_CHECKER_SCRIPT_TIMEOUT});
-    Configuration.addDeprecation("mapred.local.dir.minspacekill", 
-      new String[] {TTConfig.TT_LOCAL_DIR_MINSPACE_KILL});
-    Configuration.addDeprecation("mapred.local.dir.minspacestart", 
-      new String[] {TTConfig.TT_LOCAL_DIR_MINSPACE_START});
-    Configuration.addDeprecation("mapred.task.tracker.http.address", 
-      new String[] {TTConfig.TT_HTTP_ADDRESS});
-    Configuration.addDeprecation("mapred.task.tracker.report.address", 
-      new String[] {TTConfig.TT_REPORT_ADDRESS});
-    Configuration.addDeprecation("mapred.task.tracker.task-controller", 
-      new String[] {TTConfig.TT_TASK_CONTROLLER});
-    Configuration.addDeprecation("mapred.tasktracker.dns.interface", 
-      new String[] {TTConfig.TT_DNS_INTERFACE});
-    Configuration.addDeprecation("mapred.tasktracker.dns.nameserver", 
-      new String[] {TTConfig.TT_DNS_NAMESERVER});
-    Configuration.addDeprecation("mapred.tasktracker.events.batchsize", 
-      new String[] {TTConfig.TT_MAX_TASK_COMPLETION_EVENTS_TO_POLL});
-    Configuration.addDeprecation("mapred.tasktracker.indexcache.mb", 
-      new String[] {TTConfig.TT_INDEX_CACHE});
-    Configuration.addDeprecation("mapred.tasktracker.instrumentation", 
-      new String[] {TTConfig.TT_INSTRUMENTATION});
-    Configuration.addDeprecation("mapred.tasktracker.map.tasks.maximum", 
-      new String[] {TTConfig.TT_MAP_SLOTS});
-    Configuration.addDeprecation("mapred.tasktracker.memory_calculator_plugin", 
-      new String[] {TTConfig.TT_RESOURCE_CALCULATOR_PLUGIN});
-    Configuration.addDeprecation("mapred.tasktracker.memorycalculatorplugin", 
-      new String[] {TTConfig.TT_RESOURCE_CALCULATOR_PLUGIN});
-    Configuration.addDeprecation("mapred.tasktracker.reduce.tasks.maximum", 
-      new String[] {TTConfig.TT_REDUCE_SLOTS});
-    Configuration.addDeprecation(
-      "mapred.tasktracker.taskmemorymanager.monitoring-interval", 
-      new String[] {TTConfig.TT_MEMORY_MANAGER_MONITORING_INTERVAL});
-    Configuration.addDeprecation(
-      "mapred.tasktracker.tasks.sleeptime-before-sigkill", 
-      new String[] {TTConfig.TT_SLEEP_TIME_BEFORE_SIG_KILL});
-    Configuration.addDeprecation("slave.host.name", 
-      new String[] {TTConfig.TT_HOST_NAME});
-    Configuration.addDeprecation("tasktracker.http.threads", 
-      new String[] {TTConfig.TT_HTTP_THREADS});
-    Configuration.addDeprecation("hadoop.net.static.resolutions", 
-      new String[] {TTConfig.TT_STATIC_RESOLUTIONS});
-    Configuration.addDeprecation("local.cache.size", 
-      new String[] {TTConfig.TT_LOCAL_CACHE_SIZE});
-    Configuration.addDeprecation("tasktracker.contention.tracking", 
-      new String[] {TTConfig.TT_CONTENTION_TRACKING});
     Configuration.addDeprecation("hadoop.job.history.user.location", 
       new String[] {MRJobConfig.HISTORY_LOCATION});
     Configuration.addDeprecation("job.end.notification.url", 
@@ -513,6 +394,18 @@ public class ConfigUtil {
       new String[] {org.apache.hadoop.mapred.pipes.Submitter.PARTITIONER});
     Configuration.addDeprecation("mapred.pipes.user.inputformat", 
       new String[] {org.apache.hadoop.mapred.pipes.Submitter.INPUT_FORMAT});
+    
+    Configuration.addDeprecation("mapred.tasktracker.indexcache.mb", 
+        new String[] {MRConfig.JOB_INDEX_CACHE});
+    Configuration.addDeprecation("mapred.tasktracker.memory_calculator_plugin", 
+        new String[] {MRConfig.RESOURCE_CALCULATOR_PLUGIN});
+    Configuration.addDeprecation("mapred.tasktracker.memorycalculatorplugin", 
+        new String[] {MRConfig.RESOURCE_CALCULATOR_PLUGIN});
+    Configuration.addDeprecation("hadoop.net.static.resolutions", 
+        new String[] {MRConfig.STATIC_RESOLUTIONS});
+
+    Configuration.addDeprecation("mapred.job.tracker", 
+        new String[] {MRConfig.MASTER_ADDRESS});
   }
 }
 

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ConfigUtil.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/util/ConfigUtil.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/util/ConfigUtil.java:776175-785643

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/CountersStrings.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/util/CountersStrings.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/util/CountersStrings.java:776175-785643

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/HostUtil.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/HostUtil.java?rev=1082677&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/HostUtil.java (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/HostUtil.java Thu Mar 17 20:21:13 2011
@@ -0,0 +1,33 @@
+package org.apache.hadoop.mapreduce.util;
+
+import org.apache.hadoop.classification.InterfaceAudience.Private;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+
+@Private
+@Unstable
+public class HostUtil {
+
+  /**
+   * Construct the taskLogUrl
+   * @param taskTrackerHostName
+   * @param httpPort
+   * @param taskAttemptID
+   * @return the taskLogUrl
+   */
+  public static String getTaskLogUrl(String taskTrackerHostName,
+      String httpPort, String taskAttemptID) {
+    return ("http://" + taskTrackerHostName + ":" + httpPort
+        + "/tasklog?attemptid=" + taskAttemptID);
+  }
+
+  public static String convertTrackerNameToHostName(String trackerName) {
+    // Ugly!
+    // Convert the trackerName to its host name
+    int indexOfColon = trackerName.indexOf(":");
+    String trackerHostName = (indexOfColon == -1) ? 
+      trackerName : 
+      trackerName.substring(0, indexOfColon);
+    return trackerHostName.substring("tracker_".length());
+  }
+
+}

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceBundles.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/util/ResourceBundles.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/util/ResourceBundles.java:776175-785643

Copied: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceCalculatorPlugin.java (from r1082666, hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/util/ResourceCalculatorPlugin.java)
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceCalculatorPlugin.java?p2=hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceCalculatorPlugin.java&p1=hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/util/ResourceCalculatorPlugin.java&r1=1082666&r2=1082677&rev=1082677&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/util/ResourceCalculatorPlugin.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceCalculatorPlugin.java Thu Mar 17 20:21:13 2011
@@ -148,16 +148,17 @@ public abstract class ResourceCalculator
       return ReflectionUtils.newInstance(clazz, conf);
     }
 
+    // FIXME
     // No class given, try a os specific class
-    try {
-      String osName = System.getProperty("os.name");
-      if (osName.startsWith("Linux")) {
-        return new LinuxResourceCalculatorPlugin();
-      }
-    } catch (SecurityException se) {
-      // Failed to get Operating System name.
-      return null;
-    }
+//    try {
+//      String osName = System.getProperty("os.name");
+//      if (osName.startsWith("Linux")) {
+//        return new LinuxResourceCalculatorPlugin();
+//      }
+//    } catch (SecurityException se) {
+//      // Failed to get Operating System name.
+//      return null;
+//    }
 
     // Not supported on this system.
     return null;

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceCalculatorPlugin.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/util/ResourceCalculatorPlugin.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/util/ResourceCalculatorPlugin.java:776175-785643

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/package-info.java
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/org/apache/hadoop/mapreduce/util/package-info.java:713112
+/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/util/package-info.java:776175-785643

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/overview.html
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/overview.html?rev=1082677&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/overview.html (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/overview.html Thu Mar 17 20:21:13 2011
@@ -0,0 +1,292 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<head>
+   <title>Hadoop</title>
+</head>
+<body>
+
+Hadoop is a distributed computing platform.
+
+<p>Hadoop primarily consists of the <a 
+href="org/apache/hadoop/hdfs/package-summary.html">Hadoop Distributed FileSystem 
+(HDFS)</a> and an 
+implementation of the <a href="org/apache/hadoop/mapred/package-summary.html">
+Map-Reduce</a> programming paradigm.</p>
+
+
+<p>Hadoop is a software framework that lets one easily write and run applications 
+that process vast amounts of data. Here's what makes Hadoop especially useful:</p>
+<ul>
+  <li>
+    <b>Scalable</b>: Hadoop can reliably store and process petabytes.
+  </li>
+  <li>
+    <b>Economical</b>: It distributes the data and processing across clusters 
+    of commonly available computers. These clusters can number into the thousands 
+    of nodes.
+  </li>
+  <li>
+    <b>Efficient</b>: By distributing the data, Hadoop can process it in parallel 
+    on the nodes where the data is located. This makes it extremely rapid.
+  </li>
+  <li>
+    <b>Reliable</b>: Hadoop automatically maintains multiple copies of data and 
+    automatically redeploys computing tasks based on failures.
+  </li>
+</ul>  
+
+<h2>Requirements</h2>
+
+<h3>Platforms</h3>
+
+<ul>
+  <li>
+    Hadoop was been demonstrated on GNU/Linux clusters with 2000 nodes.
+  </li>
+  <li>
+    Win32 is supported as a <i>development</i> platform. Distributed operation 
+    has not been well tested on Win32, so this is not a <i>production</i> 
+    platform.
+  </li>  
+</ul>
+  
+<h3>Requisite Software</h3>
+
+<ol>
+  <li>
+    Java 1.6.x, preferably from 
+    <a href="http://java.sun.com/javase/downloads/">Sun</a>. 
+    Set <tt>JAVA_HOME</tt> to the root of your Java installation.
+  </li>
+  <li>
+    ssh must be installed and sshd must be running to use Hadoop's
+    scripts to manage remote Hadoop daemons.
+  </li>
+  <li>
+    rsync may be installed to use Hadoop's scripts to manage remote
+    Hadoop installations.
+  </li>
+</ol>
+
+<h4>Additional requirements for Windows</h4>
+
+<ol>
+  <li>
+    <a href="http://www.cygwin.com/">Cygwin</a> - Required for shell support in 
+    addition to the required software above.
+  </li>
+</ol>
+  
+<h3>Installing Required Software</h3>
+
+<p>If your platform does not have the required software listed above, you
+will have to install it.</p>
+
+<p>For example on Ubuntu Linux:</p>
+<p><blockquote><pre>
+$ sudo apt-get install ssh<br>
+$ sudo apt-get install rsync<br>
+</pre></blockquote></p>
+
+<p>On Windows, if you did not install the required software when you
+installed cygwin, start the cygwin installer and select the packages:</p>
+<ul>
+  <li>openssh - the "Net" category</li>
+  <li>rsync - the "Net" category</li>
+</ul>
+
+<h2>Getting Started</h2>
+
+<p>First, you need to get a copy of the Hadoop code.</p>
+
+<p>Edit the file <tt>conf/hadoop-env.sh</tt> to define at least
+<tt>JAVA_HOME</tt>.</p>
+
+<p>Try the following command:</p>
+<tt>bin/hadoop</tt>
+<p>This will display the documentation for the Hadoop command script.</p>
+
+<h2>Standalone operation</h2>
+
+<p>By default, Hadoop is configured to run things in a non-distributed
+mode, as a single Java process.  This is useful for debugging, and can
+be demonstrated as follows:</p>
+<tt>
+mkdir input<br>
+cp conf/*.xml input<br>
+bin/hadoop jar hadoop-*-examples.jar grep input output 'dfs[a-z.]+'<br>
+cat output/*
+</tt>
+<p>This will display counts for each match of the <a
+href="http://java.sun.com/j2se/1.4.2/docs/api/java/util/regex/Pattern.html">
+regular expression.</a></p>
+
+<p>Note that input is specified as a <em>directory</em> containing input
+files and that output is also specified as a directory where parts are
+written.</p>
+
+<h2>Distributed operation</h2>
+
+To configure Hadoop for distributed operation you must specify the
+following:
+
+<ol>
+
+<li>The NameNode (Distributed Filesystem master) host.  This is
+specified with the configuration property <tt><a
+ href="../core-default.html#fs.default.name">fs.default.name</a></tt>.
+</li>
+
+<li>The {@link org.apache.hadoop.mapred.JobTracker} (MapReduce master)
+host and port.  This is specified with the configuration property
+<tt><a
+href="../mapred-default.html#mapreduce.jobtracker.address">mapreduce.jobtracker.address</a></tt>.
+</li>
+
+<li>A <em>slaves</em> file that lists the names of all the hosts in
+the cluster.  The default slaves file is <tt>conf/slaves</tt>.
+
+</ol>
+
+<h3>Pseudo-distributed configuration</h3>
+
+You can in fact run everything on a single host.  To run things this
+way, put the following in:
+<br/>
+<br/>
+conf/core-site.xml:
+<xmp><configuration>
+
+  <property>
+    <name>fs.default.name</name>
+    <value>hdfs://localhost/</value>
+  </property>
+
+</configuration></xmp>
+
+conf/hdfs-site.xml:
+<xmp><configuration>
+
+  <property>
+    <name>dfs.replication</name>
+    <value>1</value>
+  </property>
+
+</configuration></xmp>
+
+conf/mapred-site.xml:
+<xmp><configuration>
+
+  <property>
+    <name>mapreduce.jobtracker.address</name>
+    <value>localhost:9001</value>
+  </property>
+
+</configuration></xmp>
+
+<p>(We also set the HDFS replication level to 1 in order to
+reduce warnings when running on a single node.)</p>
+
+<p>Now check that the command <br><tt>ssh localhost</tt><br> does not
+require a password.  If it does, execute the following commands:</p>
+
+<p><tt>ssh-keygen -t dsa -P '' -f ~/.ssh/id_dsa<br>
+cat ~/.ssh/id_dsa.pub >> ~/.ssh/authorized_keys
+</tt></p>
+
+<h3>Bootstrapping</h3>
+
+<p>A new distributed filesystem must be formatted with the following
+command, run on the master node:</p>
+
+<p><tt>bin/hadoop namenode -format</tt></p>
+
+<p>The Hadoop daemons are started with the following command:</p>
+
+<p><tt>bin/start-all.sh</tt></p>
+
+<p>Daemon log output is written to the <tt>logs/</tt> directory.</p>
+
+<p>Input files are copied into the distributed filesystem as follows:</p>
+
+<p><tt>bin/hadoop fs -put input input</tt></p>
+
+<h3>Distributed execution</h3>
+
+<p>Things are run as before, but output must be copied locally to
+examine it:</p>
+
+<tt>
+bin/hadoop jar hadoop-*-examples.jar grep input output 'dfs[a-z.]+'<br>
+bin/hadoop fs -get output output
+cat output/*
+</tt>
+
+<p>When you're done, stop the daemons with:</p>
+
+<p><tt>bin/stop-all.sh</tt></p>
+
+<h3>Fully-distributed operation</h3>
+
+<p>Fully distributed operation is just like the pseudo-distributed operation
+described above, except, specify:</p>
+
+<ol>
+
+<li>The hostname or IP address of your master server in the value
+for <tt><a
+href="../core-default.html#fs.default.name">fs.default.name</a></tt>,
+  as <tt><em>hdfs://master.example.com/</em></tt> in <tt>conf/core-site.xml</tt>.</li>
+
+<li>The host and port of the your master server in the value
+of <tt><a href="../mapred-default.html#mapreduce.jobtracker.address">mapreduce.jobtracker.address</a></tt>
+as <tt><em>master.example.com</em>:<em>port</em></tt> in <tt>conf/mapred-site.xml</tt>.</li>
+
+<li>Directories for <tt><a
+href="../hdfs-default.html#dfs.name.dir">dfs.name.dir</a></tt> and
+<tt><a href="../hdfs-default.html#dfs.data.dir">dfs.data.dir</a> 
+in <tt>conf/hdfs-site.xml</tt>.
+</tt>These are local directories used to hold distributed filesystem
+data on the master node and slave nodes respectively.  Note
+that <tt>dfs.data.dir</tt> may contain a space- or comma-separated
+list of directory names, so that data may be stored on multiple local
+devices.</li>
+
+<li><tt><a href="../mapred-default.html#mapreduce.cluster.local.dir">mapreduce.cluster.local.dir</a></tt>
+  in <tt>conf/mapred-site.xml</tt>, the local directory where temporary 
+  MapReduce data is stored.  It also may be a list of directories.</li>
+
+<li><tt><a
+href="../mapred-default.html#mapreduce.job.maps">mapreduce.job.maps</a></tt>
+and <tt><a
+href="../mapred-default.html#mapreduce.job.reduces">mapreduce.job.reduces</a></tt> 
+in <tt>conf/mapred-site.xml</tt>.
+As a rule of thumb, use 10x the
+number of slave processors for <tt>mapreduce.job.maps</tt>, and 2x the
+number of slave processors for <tt>mapreduce.job.reduces</tt>.</li>
+
+</ol>
+
+<p>Finally, list all slave hostnames or IP addresses in your
+<tt>conf/slaves</tt> file, one per line.  Then format your filesystem
+and start your cluster on your master node, as above.
+
+</body>
+</html>
+

Copied: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml (from r1082666, hadoop/mapreduce/branches/MR-279/src/java/mapred-default.xml)
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml?p2=hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml&p1=hadoop/mapreduce/branches/MR-279/src/java/mapred-default.xml&r1=1082666&r2=1082677&rev=1082677&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/java/mapred-default.xml (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml Thu Mar 17 20:21:13 2011
@@ -108,6 +108,13 @@
 </property>
 
 <property>
+  <name>mapreduce.local.clientfactory.class.name</name>
+  <value>org.apache.hadoop.mapred.LocalClientFactory</value>
+  <description>This the client factory that is responsible for 
+  creating local job runner client</description>
+</property>
+
+<property>
   <name>mapreduce.jobtracker.http.address</name>
   <value>0.0.0.0:50030</value>
   <description>

Propchange: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Thu Mar 17 20:21:13 2011
@@ -0,0 +1,2 @@
+/hadoop/core/branches/branch-0.19/mapred/src/java/mapred-default.xml:713112
+/hadoop/core/trunk/src/mapred/mapred-default.xml:776175-785643

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/pom.xml?rev=1082677&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/pom.xml (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/pom.xml Thu Mar 17 20:21:13 2011
@@ -0,0 +1,47 @@
+<?xml version="1.0"?>
+<project>
+  <parent>
+    <artifactId>hadoop-mapreduce-client</artifactId>
+    <groupId>org.apache.hadoop</groupId>
+    <version>${yarn.version}</version>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.hadoop</groupId>
+  <artifactId>hadoop-mapreduce-client-hs</artifactId>
+  <name>hadoop-mapreduce-client-hs</name>
+  <version>${yarn.version}</version>
+  <url>http://maven.apache.org</url>
+
+  <dependencies>
+    <!-- begin MNG-4223 workaround -->
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>yarn-api</artifactId>
+      <version>${yarn.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>yarn-common</artifactId>
+      <version>${yarn.version}</version>
+    </dependency>
+    <!-- end MNG-4223 workaround -->
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-common</artifactId>
+      <version>${yarn.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-app</artifactId>
+      <version>${yarn.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-app</artifactId>
+      <version>${yarn.version}</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+</project>

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java?rev=1082677&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java Thu Mar 17 20:21:13 2011
@@ -0,0 +1,228 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.mapreduce.v2.hs;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
+import org.apache.hadoop.mapreduce.v2.app.job.Task;
+import org.apache.hadoop.mapreduce.v2.lib.TypeConverter;
+import org.apache.hadoop.yarn.YarnException;
+import org.apache.hadoop.mapreduce.v2.api.Counters;
+import org.apache.hadoop.mapreduce.v2.api.JobID;
+import org.apache.hadoop.mapreduce.v2.api.JobReport;
+import org.apache.hadoop.mapreduce.v2.api.JobState;
+import org.apache.hadoop.mapreduce.v2.api.TaskAttemptCompletionEvent;
+import org.apache.hadoop.mapreduce.v2.api.TaskID;
+import org.apache.hadoop.mapreduce.v2.api.TaskType;
+
+/**
+ * Loads the basic job level data upfront.
+ * Data from job history file is loaded lazily.
+ */
+public class CompletedJob implements org.apache.hadoop.mapreduce.v2.app.job.Job {
+  
+  static final Log LOG = LogFactory.getLog(CompletedJob.class);
+  private final Counters counters;
+  private final Configuration conf;
+  private final JobID jobID;
+  private final List<String> diagnostics = new ArrayList<String>();
+  private final JobReport report;
+  private final Map<TaskID, Task> tasks = new HashMap<TaskID, Task>();
+  private final Map<TaskID, Task> mapTasks = new HashMap<TaskID, Task>();
+  private final Map<TaskID, Task> reduceTasks = new HashMap<TaskID, Task>();
+  
+  private TaskAttemptCompletionEvent[] completionEvents;
+  private JobInfo jobInfo;
+
+
+  public CompletedJob(Configuration conf, JobID jobID) throws IOException {
+    this.conf = conf;
+    this.jobID = jobID;
+    //TODO fix
+    /*
+    String  doneLocation =
+      conf.get(JTConfig.JT_JOBHISTORY_COMPLETED_LOCATION,
+      "file:///tmp/yarn/done/status");
+    String user =
+      conf.get(MRJobConfig.USER_NAME, System.getProperty("user.name"));
+    String statusstoredir =
+      doneLocation + "/" + user + "/" + TypeConverter.fromYarn(jobID).toString();
+    Path statusFile = new Path(statusstoredir, "jobstats");
+    try {
+      FileContext fc = FileContext.getFileContext(statusFile.toUri(), conf);
+      FSDataInputStream in = fc.open(statusFile);
+      JobHistoryParser parser = new JobHistoryParser(in);
+      jobStats = parser.parse();
+    } catch (IOException e) {
+      LOG.info("Could not open job status store file from dfs " +
+        TypeConverter.fromYarn(jobID).toString());
+      throw new IOException(e);
+    }
+    */
+    
+    //TODO: load the data lazily. for now load the full data upfront
+    loadFullHistoryData();
+
+    counters = TypeConverter.toYarn(jobInfo.getTotalCounters());
+    diagnostics.add(jobInfo.getErrorInfo());
+    report = new JobReport();
+    report.id = jobID;
+    report.state = JobState.valueOf(jobInfo.getJobStatus());
+    report.startTime = jobInfo.getLaunchTime();
+    report.finishTime = jobInfo.getFinishTime();
+  }
+
+  @Override
+  public int getCompletedMaps() {
+    return jobInfo.getFinishedMaps();
+  }
+
+  @Override
+  public int getCompletedReduces() {
+    return jobInfo.getFinishedReduces();
+  }
+
+  @Override
+  public Counters getCounters() {
+    return counters;
+  }
+
+  @Override
+  public JobID getID() {
+    return jobID;
+  }
+
+  @Override
+  public JobReport getReport() {
+    return report;
+  }
+
+  @Override
+  public JobState getState() {
+    return report.state;
+  }
+
+  @Override
+  public Task getTask(TaskID taskID) {
+    return tasks.get(taskID);
+  }
+
+  @Override
+  public TaskAttemptCompletionEvent[] getTaskAttemptCompletionEvents(
+      int fromEventId, int maxEvents) {
+    return completionEvents;
+  }
+
+  @Override
+  public Map<TaskID, Task> getTasks() {
+    return tasks;
+  }
+
+  //History data is leisurely loaded when task level data is requested
+  private synchronized void loadFullHistoryData() {
+    if (jobInfo != null) {
+      return; //data already loaded
+    }
+    String user = conf.get(MRJobConfig.USER_NAME);
+    if (user == null) {
+      LOG.error("user null is not allowed");
+    }
+    String jobName = TypeConverter.fromYarn(jobID).toString();
+    String jobhistoryDir = conf.get("yarn.server.nodemanager.jobhistory",
+        "file:///tmp/yarn/done")
+        + "/" + user;
+    FSDataInputStream in = null;
+    String jobhistoryFileName = jobName; // TODO use existing hadoop dire
+                                         // structure
+    Path historyFilePath = new Path(jobhistoryDir, jobhistoryFileName);
+
+    try {
+      FileContext fc = FileContext.getFileContext(historyFilePath.toUri());
+      in = fc.open(historyFilePath);
+      JobHistoryParser parser = new JobHistoryParser(in);
+      jobInfo = parser.parse();
+      LOG.info("jobInfo loaded");
+    } catch (IOException e) {
+      throw new YarnException("Could not load history file " + historyFilePath,
+          e);
+    }
+    
+    // populate the tasks
+    for (Map.Entry<org.apache.hadoop.mapreduce.TaskID, TaskInfo> entry : jobInfo
+        .getAllTasks().entrySet()) {
+      TaskID yarnTaskID = TypeConverter.toYarn(entry.getKey());
+      TaskInfo taskInfo = entry.getValue();
+      Task task = new CompletedTask(yarnTaskID, taskInfo);
+      tasks.put(yarnTaskID, task);
+      if (task.getType() == TaskType.MAP) {
+        mapTasks.put(task.getID(), task);
+      } else if (task.getType() == TaskType.REDUCE) {
+        reduceTasks.put(task.getID(), task);
+      }
+    }
+    
+    // TODO: populate the TaskAttemptCompletionEvent
+    completionEvents = new TaskAttemptCompletionEvent[0];
+    
+    
+  }
+
+  @Override
+  public List<String> getDiagnostics() {
+    return diagnostics;
+  }
+
+  @Override
+  public CharSequence getName() {
+    return jobInfo.getJobname();
+  }
+
+  @Override
+  public int getTotalMaps() {
+    return jobInfo.getTotalMaps();
+  }
+
+  @Override
+  public int getTotalReduces() {
+    return jobInfo.getTotalReduces();
+  }
+
+  @Override
+  public Map<TaskID, Task> getTasks(TaskType taskType) {
+    if (TaskType.MAP.equals(taskType)) {
+      return mapTasks;
+    } else {//we have only two type of tasks
+      return reduceTasks;
+    }
+  }
+}

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java?rev=1082677&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java Thu Mar 17 20:21:13 2011
@@ -0,0 +1,131 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.mapreduce.v2.hs;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
+import org.apache.hadoop.mapreduce.v2.app.job.Task;
+import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.lib.TypeConverter;
+import org.apache.hadoop.mapreduce.v2.api.Counters;
+import org.apache.hadoop.mapreduce.v2.api.TaskAttemptID;
+import org.apache.hadoop.mapreduce.v2.api.TaskID;
+import org.apache.hadoop.mapreduce.v2.api.TaskReport;
+import org.apache.hadoop.mapreduce.v2.api.TaskState;
+import org.apache.hadoop.mapreduce.v2.api.TaskType;
+
+public class CompletedTask implements Task {
+
+
+  private final TaskType type;
+  private final Counters counters;
+  private final long startTime;
+  private final long finishTime;
+  private final TaskState state;
+  private final TaskID taskID;
+  private final TaskReport report;
+  private final Map<TaskAttemptID, TaskAttempt> attempts =
+    new LinkedHashMap<TaskAttemptID, TaskAttempt>();
+  
+  private static final Log LOG = LogFactory.getLog(CompletedTask.class);
+
+  CompletedTask(TaskID taskID, TaskInfo taskinfo) {
+    this.taskID = taskID;
+    this.startTime = taskinfo.getStartTime();
+    this.finishTime = taskinfo.getFinishTime();
+    this.type = TypeConverter.toYarn(taskinfo.getTaskType());
+    this.counters = TypeConverter.toYarn(
+        new org.apache.hadoop.mapred.Counters(taskinfo.getCounters()));
+    this.state = TaskState.valueOf(taskinfo.getTaskStatus());
+    for (TaskAttemptInfo attemptHistory : 
+                taskinfo.getAllTaskAttempts().values()) {
+      CompletedTaskAttempt attempt = new CompletedTaskAttempt(taskID, 
+          attemptHistory);
+      attempts.put(attempt.getID(), attempt);
+    }
+    
+    report = new TaskReport();
+    report.id = taskID;
+    report.startTime = startTime;
+    report.finishTime = finishTime;
+    report.state = state;
+    report.progress = getProgress();
+    report.counters = getCounters();
+    report.runningAttempts = new ArrayList<TaskAttemptID>();
+    report.runningAttempts.addAll(attempts.keySet());
+    //report.successfulAttempt = taskHistory.; //TODO
+  }
+
+  @Override
+  public boolean canCommit(TaskAttemptID taskAttemptID) {
+    return false;
+  }
+
+  @Override
+  public TaskAttempt getAttempt(TaskAttemptID attemptID) {
+    return attempts.get(attemptID);
+  }
+
+  @Override
+  public Map<TaskAttemptID, TaskAttempt> getAttempts() {
+    return attempts;
+  }
+
+  @Override
+  public Counters getCounters() {
+    return counters;
+  }
+
+  @Override
+  public TaskID getID() {
+    return taskID;
+  }
+
+  @Override
+  public float getProgress() {
+    return 1.0f;
+  }
+
+  @Override
+  public TaskReport getReport() {
+    return report;
+  }
+
+  @Override
+  public TaskType getType() {
+    return type;
+  }
+
+  @Override
+  public boolean isFinished() {
+    return true;
+  }
+
+  @Override
+  public TaskState getState() {
+    return state;
+  }
+
+}

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java?rev=1082677&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java Thu Mar 17 20:21:13 2011
@@ -0,0 +1,121 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.mapreduce.v2.hs;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo;
+import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.lib.TypeConverter;
+import org.apache.hadoop.yarn.ContainerID;
+import org.apache.hadoop.mapreduce.v2.api.Counters;
+import org.apache.hadoop.mapreduce.v2.api.TaskAttemptID;
+import org.apache.hadoop.mapreduce.v2.api.TaskAttemptReport;
+import org.apache.hadoop.mapreduce.v2.api.TaskAttemptState;
+import org.apache.hadoop.mapreduce.v2.api.TaskID;
+
+public class CompletedTaskAttempt implements TaskAttempt {
+
+  private final TaskAttemptInfo attemptInfo;
+  private final TaskAttemptID attemptId;
+  private final Counters counters;
+  private final TaskAttemptState state;
+  private final TaskAttemptReport report;
+  private final List<CharSequence> diagnostics = new ArrayList<CharSequence>();
+
+  CompletedTaskAttempt(TaskID taskID, TaskAttemptInfo attemptInfo) {
+    this.attemptInfo = attemptInfo;
+    this.attemptId = TypeConverter.toYarn(attemptInfo.getAttemptId());
+    this.counters = TypeConverter.toYarn(
+        new org.apache.hadoop.mapred.Counters(attemptInfo.getCounters()));
+    this.state = TaskAttemptState.valueOf(attemptInfo.getState());
+    
+    if (attemptInfo.getError() != null) {
+      diagnostics.add(attemptInfo.getError());
+    }
+    
+    report = new TaskAttemptReport();
+    report.id = attemptId;
+    report.state = state;
+    report.progress = getProgress();
+    report.startTime = attemptInfo.getStartTime();
+    report.finishTime = attemptInfo.getFinishTime();
+    report.diagnosticInfo = attemptInfo.getError();
+    //result.phase = attemptInfo.get;//TODO
+    report.stateString = state.toString();
+    report.counters = getCounters();
+  }
+
+  @Override
+  public ContainerID getAssignedContainerID() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  @Override
+  public String getAssignedContainerMgrAddress() {
+    return attemptInfo.getHostname();
+  }
+
+  @Override
+  public Counters getCounters() {
+    return counters;
+  }
+
+  @Override
+  public TaskAttemptID getID() {
+    return attemptId;
+  }
+
+  @Override
+  public float getProgress() {
+    return 1.0f;
+  }
+
+  @Override
+  public TaskAttemptReport getReport() {
+    return report;
+  }
+
+  @Override
+  public TaskAttemptState getState() {
+    return state;
+  }
+
+  @Override
+  public boolean isFinished() {
+    return true;
+  }
+
+  @Override
+  public List<CharSequence> getDiagnostics() {
+    return diagnostics;
+  }
+
+  @Override
+  public long getLaunchTime() {
+    return report.startTime;
+  }
+
+  @Override
+  public long getFinishTime() {
+    return report.finishTime;
+  }
+}

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java?rev=1082677&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java Thu Mar 17 20:21:13 2011
@@ -0,0 +1,194 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.mapreduce.v2.hs;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+
+import org.apache.avro.ipc.AvroRemoteException;
+import org.apache.avro.ipc.Server;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.app.job.Task;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.yarn.YarnException;
+import org.apache.hadoop.yarn.ipc.RPCUtil;
+import org.apache.hadoop.yarn.ipc.YarnRPC;
+import org.apache.hadoop.yarn.service.AbstractService;
+import org.apache.hadoop.yarn.webapp.WebApp;
+import org.apache.hadoop.mapreduce.v2.api.Counters;
+import org.apache.hadoop.mapreduce.v2.api.JobID;
+import org.apache.hadoop.mapreduce.v2.api.JobReport;
+import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
+import org.apache.hadoop.mapreduce.v2.api.TaskAttemptCompletionEvent;
+import org.apache.hadoop.mapreduce.v2.api.TaskAttemptID;
+import org.apache.hadoop.mapreduce.v2.api.TaskAttemptReport;
+import org.apache.hadoop.mapreduce.v2.api.TaskID;
+import org.apache.hadoop.mapreduce.v2.api.TaskReport;
+import org.apache.hadoop.mapreduce.v2.api.TaskType;
+
+/**
+ * This module is responsible for talking to the 
+ * JobClient (user facing).
+ *
+ */
+public class HistoryClientService extends AbstractService {
+
+  private static final Log LOG = LogFactory.getLog(HistoryClientService.class);
+  
+  private MRClientProtocol protocolHandler;
+  private Server server;
+  private WebApp webApp;
+  private InetSocketAddress bindAddress;
+  private HistoryContext history;
+
+  public HistoryClientService(HistoryContext history) {
+    super("HistoryClientService");
+    this.history = history;
+    this.protocolHandler = new MRClientProtocolHandler();
+  }
+
+  public void start() {
+    Configuration conf = new Configuration(getConfig());
+    YarnRPC rpc = YarnRPC.create(conf);
+    String serviceAddr = conf.get("jobhistory.server.hostname") + ":"
+        + conf.get("jobhistory.server.port");
+    InetSocketAddress address = NetUtils.createSocketAddr(serviceAddr);
+    InetAddress hostNameResolved = null;
+    try {
+      hostNameResolved = address.getAddress().getLocalHost();
+    } catch (UnknownHostException e) {
+      throw new YarnException(e);
+    }
+
+    //TODO: security
+    server =
+        rpc.getServer(MRClientProtocol.class, protocolHandler, address,
+            conf, null);
+    server.start();
+    this.bindAddress =
+        NetUtils.createSocketAddr(hostNameResolved.getHostAddress()
+            + ":" + server.getPort());
+    LOG.info("Instantiated MRClientService at " + this.bindAddress);
+    
+    //TODO: start webApp on fixed port ??
+    super.start();
+  }
+
+  public void stop() {
+    server.close();
+    if (webApp != null) {
+      webApp.stop();
+    }
+    super.stop();
+  }
+
+  private class MRClientProtocolHandler implements MRClientProtocol {
+
+    private Job getJob(JobID jobID) throws AvroRemoteException {
+      Job job = history.getJob(jobID);
+      if (job == null) {
+        throw RPCUtil.getRemoteException("Unknown job " + jobID);
+      }
+      return job;
+    }
+
+    @Override
+    public Counters getCounters(JobID jobID) throws AvroRemoteException {
+      Job job = getJob(jobID);
+      return job.getCounters();
+    }
+
+    @Override
+    public JobReport getJobReport(JobID jobID) throws AvroRemoteException {
+      Job job = getJob(jobID);
+      return job.getReport();
+    }
+
+    @Override
+    public TaskAttemptReport getTaskAttemptReport(TaskAttemptID taskAttemptID)
+        throws AvroRemoteException {
+      Job job = getJob(taskAttemptID.taskID.jobID);
+      return job.getTask(taskAttemptID.taskID).
+          getAttempt(taskAttemptID).getReport();
+    }
+
+    @Override
+    public TaskReport getTaskReport(TaskID taskID) throws AvroRemoteException {
+      Job job = getJob(taskID.jobID);
+      return job.getTask(taskID).getReport();
+    }
+
+    @Override
+    public List<TaskAttemptCompletionEvent> getTaskAttemptCompletionEvents(
+        JobID jobID, 
+        int fromEventId, int maxEvents) throws AvroRemoteException {
+      Job job = getJob(jobID);
+      return Arrays.asList(job.getTaskAttemptCompletionEvents(fromEventId, 
+          maxEvents));
+    }
+
+    @Override
+    public Void killJob(JobID jobID) throws AvroRemoteException {
+      throw RPCUtil.getRemoteException("Invalid operation on completed job");
+    }
+
+    @Override
+    public Void killTask(TaskID taskID) throws AvroRemoteException {
+      getJob(taskID.jobID);
+      throw RPCUtil.getRemoteException("Invalid operation on completed job");
+    }
+
+    @Override
+    public Void killTaskAttempt(TaskAttemptID taskAttemptID)
+        throws AvroRemoteException {
+      getJob(taskAttemptID.taskID.jobID);
+      throw RPCUtil.getRemoteException("Invalid operation on completed job");
+    }
+
+    @Override
+    public List<CharSequence> getDiagnostics(TaskAttemptID taskAttemptID)
+        throws AvroRemoteException {
+      Job job = getJob(taskAttemptID.taskID.jobID);
+      return job.getTask(taskAttemptID.taskID).
+                 getAttempt(taskAttemptID).getDiagnostics();
+    }
+
+    @Override
+    public List<TaskReport> getTaskReports(JobID jobID, TaskType taskType)
+        throws AvroRemoteException {
+      Job job = getJob(jobID);
+      List<TaskReport> reports = new ArrayList<TaskReport>();
+      Collection<Task> tasks = job.getTasks(taskType).values();
+      for (Task task : tasks) {
+        reports.add(task.getReport());
+      }
+      return reports;
+    }
+
+  }
+}

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryContext.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryContext.java?rev=1082677&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryContext.java (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryContext.java Thu Mar 17 20:21:13 2011
@@ -0,0 +1,33 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.mapreduce.v2.hs;
+
+import java.util.Map;
+
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.yarn.ApplicationID;
+import org.apache.hadoop.mapreduce.v2.api.JobID;
+
+public interface HistoryContext {
+
+  Job getJob(JobID id);
+
+  Map<JobID, Job> getAllJobs(ApplicationID appID);
+
+}

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java?rev=1082677&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java Thu Mar 17 20:21:13 2011
@@ -0,0 +1,82 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.mapreduce.v2.hs;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.hs.CompletedJob;
+import org.apache.hadoop.mapreduce.v2.lib.TypeConverter;
+import org.apache.hadoop.yarn.YarnException;
+import org.apache.hadoop.yarn.ApplicationID;
+import org.apache.hadoop.mapreduce.v2.api.JobID;
+
+/*
+ * Loads and manages the Job history cache.
+ */
+public class JobHistory implements HistoryContext {
+
+  private Map<JobID, Job> completedJobCache =
+    new ConcurrentHashMap<JobID, Job>();
+  private Configuration conf;
+  private final LinkedList<Job> jobQ = new LinkedList<Job>();
+  private static final Log LOG = LogFactory.getLog(JobHistory.class);
+  private final int retiredJobsCacheSize = 1000; //TODO make it configurable
+
+
+  public JobHistory(Configuration conf) {
+    this.conf = conf;
+  }
+  @Override
+  public synchronized Job getJob(JobID jobID) {
+    Job job = completedJobCache.get(jobID);
+    if (job == null) {
+      try {
+        job = new CompletedJob(conf, jobID);
+      } catch (IOException e) {
+        LOG.warn("HistoryContext getJob failed " + e);
+        throw new YarnException(e);
+      }
+      completedJobCache.put(jobID, job);
+      jobQ.add(job);
+      if (jobQ.size() > retiredJobsCacheSize) {
+         Job removed = jobQ.remove();
+         completedJobCache.remove(removed.getID());
+      }
+    }
+    return job;
+  }
+
+  @Override
+  public Map<JobID, Job> getAllJobs(ApplicationID appID) {
+    //currently there is 1 to 1 mapping between app and job id
+    org.apache.hadoop.mapreduce.JobID oldJobID = TypeConverter.fromYarn(appID);
+    Map<JobID, Job> jobs = new HashMap<JobID, Job>();
+    JobID jobID = TypeConverter.toYarn(oldJobID);
+    jobs.put(jobID, getJob(jobID));
+    return jobs;
+  }
+}

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java?rev=1082677&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java Thu Mar 17 20:21:13 2011
@@ -0,0 +1,67 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.mapreduce.v2.hs;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.service.CompositeService;
+
+/******************************************************************
+ * {@link JobHistoryServer} is responsible for servicing all job history
+ * related requests from client.
+ *
+ *****************************************************************/
+public class JobHistoryServer extends CompositeService {
+  private static final Log LOG = LogFactory.getLog(JobHistoryServer.class);
+
+  static{
+    Configuration.addDefaultResource("mapred-default.xml");
+    Configuration.addDefaultResource("mapred-site.xml");
+  }
+
+  public JobHistoryServer() {
+    super(JobHistoryServer.class.getName());
+  }
+
+  public synchronized void init(Configuration conf) {
+    Configuration config = new YarnConfiguration(conf);
+    HistoryContext history = new JobHistory(conf);
+    addService(new HistoryClientService(history));
+    //TODO: add HistoryCleaner service
+    super.init(config);
+  }
+
+  public static void main(String[] args) {
+    StringUtils.startupShutdownMessage(JobHistoryServer.class, args, LOG);
+    try {
+      JobHistoryServer server = new JobHistoryServer();
+      YarnConfiguration conf = new YarnConfiguration(new JobConf());
+      server.init(conf);
+      server.start();
+    } catch (Throwable e) {
+      LOG.fatal(StringUtils.stringifyException(e));
+      System.exit(-1);
+    }
+  }
+
+}

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java?rev=1082677&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java Thu Mar 17 20:21:13 2011
@@ -0,0 +1,119 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.mapreduce.v2.hs;
+
+import java.util.Map;
+
+import junit.framework.Assert;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler;
+import org.apache.hadoop.mapreduce.v2.app.MRApp;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.app.job.Task;
+import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.lib.TypeConverter;
+import org.apache.hadoop.mapreduce.v2.api.JobID;
+import org.apache.hadoop.mapreduce.v2.api.JobState;
+import org.apache.hadoop.mapreduce.v2.api.TaskAttemptID;
+import org.apache.hadoop.mapreduce.v2.api.TaskAttemptState;
+import org.apache.hadoop.mapreduce.v2.api.TaskID;
+import org.apache.hadoop.mapreduce.v2.api.TaskState;
+import org.apache.hadoop.mapreduce.v2.api.TaskType;
+import org.apache.hadoop.yarn.event.EventHandler;
+import org.junit.Test;
+
+public class TestJobHistoryEvents {
+  private static final Log LOG = LogFactory.getLog(TestJobHistoryEvents.class);
+
+  @Test
+  public void testHistoryEvents() throws Exception {
+    Configuration conf = new Configuration();
+    conf.set("mapreduce.job.user.name", "test");
+    MRApp app = new HistoryEnabledApp(2, 1, true);
+    app.submit(conf);
+    Job job = app.getContext().getAllJobs().values().iterator().next();
+    JobID jobId = job.getID();
+    LOG.info("JOBID is " + TypeConverter.fromYarn(jobId).toString());
+    app.waitForState(job, JobState.SUCCEEDED);
+    /*
+     * Use HistoryContext to read logged events and verify the number of 
+     * completed maps 
+    */
+    HistoryContext context = new JobHistory(conf);
+    Job parsedJob = context.getJob(jobId);
+    Assert.assertEquals("CompletedMaps not correct", 2,
+        parsedJob.getCompletedMaps());
+    
+    
+    Map<TaskID, Task> tasks = parsedJob.getTasks();
+    Assert.assertEquals("No of tasks not correct", 3, tasks.size());
+    for (Task task : tasks.values()) {
+      verifyTask(task);
+    }
+    
+    Map<TaskID, Task> maps = parsedJob.getTasks(TaskType.MAP);
+    Assert.assertEquals("No of maps not correct", 2, maps.size());
+    
+    Map<TaskID, Task> reduces = parsedJob.getTasks(TaskType.REDUCE);
+    Assert.assertEquals("No of reduces not correct", 1, reduces.size());
+    
+    
+    Assert.assertEquals("CompletedReduce not correct", 1,
+        parsedJob.getCompletedReduces());
+    
+    Assert.assertEquals("Job state not currect", JobState.SUCCEEDED,
+        parsedJob.getState());
+  }
+
+  private void verifyTask(Task task) {
+    Assert.assertEquals("Task state not currect", TaskState.SUCCEEDED,
+        task.getState());
+    Map<TaskAttemptID, TaskAttempt> attempts = task.getAttempts();
+    Assert.assertEquals("No of attempts not correct", 1, attempts.size());
+    for (TaskAttempt attempt : attempts.values()) {
+      verifyAttempt(attempt);
+    }
+  }
+
+  private void verifyAttempt(TaskAttempt attempt) {
+    Assert.assertEquals("TaskAttempt state not currect", 
+        TaskAttemptState.SUCCEEDED, attempt.getState());
+  }
+
+  static class HistoryEnabledApp extends MRApp {
+    public HistoryEnabledApp(int maps, int reduces, boolean autoComplete) {
+      super(maps, reduces, autoComplete);
+    }
+
+    @Override
+    protected EventHandler<JobHistoryEvent> createJobHistoryHandler(
+        Configuration conf) {
+      return new JobHistoryEventHandler(conf);
+    }
+  }
+
+  public static void main(String[] args) throws Exception {
+    TestJobHistoryEvents t = new TestJobHistoryEvents();
+    t.testHistoryEvents();
+  }
+}

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-jobclient/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-jobclient/pom.xml?rev=1082677&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-jobclient/pom.xml (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-jobclient/pom.xml Thu Mar 17 20:21:13 2011
@@ -0,0 +1,95 @@
+<?xml version="1.0"?><project>
+  <parent>
+    <artifactId>hadoop-mapreduce-client</artifactId>
+    <groupId>org.apache.hadoop</groupId>
+    <version>${yarn.version}</version>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.hadoop</groupId>
+  <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+  <name>hadoop-mapreduce-client-jobclient</name>
+  <version>${yarn.version}</version>
+  <url>http://maven.apache.org</url>
+
+  <dependencies>
+    <!-- begin MNG-4223 workaround -->
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>yarn-api</artifactId>
+      <version>${yarn.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>yarn-common</artifactId>
+      <version>${yarn.version}</version>
+    </dependency>
+    <!-- end MNG-4223 workaround -->
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-common</artifactId>
+      <version>${yarn.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-shuffle</artifactId>
+      <version>${yarn.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-app</artifactId>
+      <version>${yarn.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-app</artifactId>
+      <version>${yarn.version}</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-hs</artifactId>
+      <version>${yarn.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>yarn-server-nodemanager</artifactId>
+      <version>${yarn.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>yarn-server-resourcemanager</artifactId>
+      <version>${yarn.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>yarn-server-common</artifactId>
+      <version>${yarn.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>yarn-server-tests</artifactId>
+      <version>${yarn.version}</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <!--exclude>**/TestYARNClient.java</exclude-->
+          </excludes>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+</project>