You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by vi...@apache.org on 2011/04/12 09:56:09 UTC

svn commit: r1091316 [1/3] - in /hadoop/mapreduce/branches/MR-279: ./ mr-client/hadoop-mapreduce-client-core/ mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache...

Author: vinodkv
Date: Tue Apr 12 07:56:07 2011
New Revision: 1091316

URL: http://svn.apache.org/viewvc?rev=1091316&view=rev
Log:
Implementing Containers' memory monitoring. Contributed by Vinod Kumar Vavilapalli.

Added:
    hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/util/LinuxResourceCalculatorPlugin.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorPlugin.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/test/java/org/apache/hadoop/util/
    hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/test/java/org/apache/hadoop/util/TestProcfsBasedProcessTree.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerDiagnosticsUpdateEvent.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/ContainerStartMonitoringEvent.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/ContainerStopMonitoringEvent.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/ContainersMonitorImpl.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java
Removed:
    hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceCalculatorPlugin.java
    hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/util/LinuxResourceCalculatorPlugin.java
    hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/util/ProcfsBasedProcessTree.java
    hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapreduce/util/TestProcfsBasedProcessTree.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerMonitor.java
Modified:
    hadoop/mapreduce/branches/MR-279/ivy.xml
    hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/pom.xml
    hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
    hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java
    hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/JobTracker.java
    hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/NodeHealthStatus.java
    hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/TaskMemoryManagerThread.java
    hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/TaskTracker.java
    hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/TaskTrackerStatus.java
    hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/util/LinuxMemoryCalculatorPlugin.java
    hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/util/MemoryCalculatorPlugin.java
    hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/util/ProcfsBasedProcessTree.java
    hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/DummyResourceCalculatorPlugin.java
    hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/TestTTResourceReporting.java
    hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerBlacklisting.java
    hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java
    hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java
    hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapreduce/util/TestLinuxResourceCalculatorPlugin.java
    hadoop/mapreduce/branches/MR-279/src/webapps/job/jobtracker.jspx
    hadoop/mapreduce/branches/MR-279/src/webapps/job/machines.jsp
    hadoop/mapreduce/branches/MR-279/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerStatus.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerStatusPBImpl.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-api/src/main/proto/yarn_protos.proto
    hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-common/src/main/java/org/apache/hadoop/Clock.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-common/src/main/resources/yarn-default.xml
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/pom.xml
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NMConfig.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerEventType.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/ContainersMonitor.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/ContainersMonitorEvent.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/DummyContainerManager.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerTokenSecretManager.java

Modified: hadoop/mapreduce/branches/MR-279/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/ivy.xml?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/ivy.xml (original)
+++ hadoop/mapreduce/branches/MR-279/ivy.xml Tue Apr 12 07:56:07 2011
@@ -76,6 +76,8 @@
                rev="${hadoop-mapreduce-client-core.version}" conf="common->default"/> 
    <dependency org="org.apache.hadoop" name="yarn-server-common" 
                rev="${yarn.version}" conf="common->default"/> 
+   <dependency org="org.apache.hadoop" name="yarn-common" 
+               rev="${yarn.version}" conf="common->default"/> 
 
    <dependency org="jdiff" name="jdiff" rev="${jdiff.version}"
                conf="jdiff->default"/>

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/pom.xml?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/pom.xml (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/pom.xml Tue Apr 12 07:56:07 2011
@@ -15,6 +15,14 @@
     <yarn.version>1.0-SNAPSHOT</yarn.version>
   </properties>
   
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>yarn-common</artifactId> 
+      <version>${yarn.version}</version>
+    </dependency>
+  </dependencies>
+
 <build>
     <plugins>
       <plugin>

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java Tue Apr 12 07:56:07 2011
@@ -59,13 +59,13 @@ import org.apache.hadoop.mapreduce.JobSt
 import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.lib.reduce.WrappedReducer;
 import org.apache.hadoop.mapreduce.task.ReduceContextImpl;
-import org.apache.hadoop.mapreduce.util.ResourceCalculatorPlugin;
-import org.apache.hadoop.mapreduce.util.ResourceCalculatorPlugin.*;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.util.Progress;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin.*;
 
 /**
  * Base class for tasks.

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java Tue Apr 12 07:56:07 2011
@@ -86,8 +86,9 @@ public class YARNRunner implements Clien
 
   private static final Log LOG = LogFactory.getLog(YARNRunner.class);
 
-  public static final String YARN_AM_RESOURCE_KEY = "yarn.am.mapreduce.resource.mb";
-  private static final int DEFAULT_YARN_AM_RESOURCE = 1024;
+  public static final String YARN_AM_VMEM_MB =
+      "yarn.am.mapreduce.resource.mb";
+  private static final int DEFAULT_YARN_AM_VMEM_MB = 1536;
   
   private final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
   private ResourceMgrDelegate resMgrDelegate;
@@ -263,9 +264,8 @@ public class YARNRunner implements Clien
     ApplicationId applicationId = resMgrDelegate.getApplicationId();
     appContext.setApplicationId(applicationId);
     Resource capability = recordFactory.newRecordInstance(Resource.class);
-    capability.setMemory(
-        conf.getInt(YARN_AM_RESOURCE_KEY, DEFAULT_YARN_AM_RESOURCE));
-    LOG.info("AppMaster capability = " + capability);
+    capability.setMemory(conf.getInt(YARN_AM_VMEM_MB, DEFAULT_YARN_AM_VMEM_MB));
+    LOG.info("Master capability = " + capability);
     appContext.setMasterCapability(capability);
 
     FileContext defaultFS = FileContext.getFileContext(conf);

Modified: hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/JobTracker.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/JobTracker.java?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/JobTracker.java (original)
+++ hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/JobTracker.java Tue Apr 12 07:56:07 2011
@@ -56,7 +56,7 @@ import javax.security.auth.login.LoginEx
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.Clock;
-import org.apache.hadoop.NodeHealthStatus;
+import org.apache.hadoop.mapred.NodeHealthStatus;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -2699,7 +2699,7 @@ public class JobTracker implements MRCon
     NodeHealthStatus status = trackerStatus.getHealthStatus();
     synchronized (faultyTrackers) {
       faultyTrackers.setNodeHealthStatus(trackerStatus.getHost(), 
-          status.isNodeHealthy(), status.getHealthReport());
+          status.getIsNodeHealthy(), status.getHealthReport());
     }
   }
     

Modified: hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/NodeHealthStatus.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/NodeHealthStatus.java?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/NodeHealthStatus.java (original)
+++ hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/NodeHealthStatus.java Tue Apr 12 07:56:07 2011
@@ -13,7 +13,7 @@ import org.apache.hadoop.io.Writable;
  * 
  */
 public class NodeHealthStatus implements
-    org.apache.hadoop.yarn.api.records.NodeHealthStatus, Writable {
+  org.apache.hadoop.yarn.server.api.records.NodeHealthStatus, Writable {
 
   private boolean isNodeHealthy;
   private String healthReport;
@@ -38,7 +38,8 @@ public class NodeHealthStatus implements
    * 
    * @param isNodeHealthy
    */
-  public void setNodeHealthy(boolean isNodeHealthy) {
+  @Override
+  public void setIsNodeHealthy(boolean isNodeHealthy) {
     this.isNodeHealthy = isNodeHealthy;
   }
 
@@ -48,7 +49,8 @@ public class NodeHealthStatus implements
    * 
    * @return true if the node is healthy.
    */
-  public boolean isNodeHealthy() {
+  @Override
+  public boolean getIsNodeHealthy() {
     return isNodeHealthy;
   }
 
@@ -58,6 +60,7 @@ public class NodeHealthStatus implements
    * @param healthReport
    *          String listing cause of failure.
    */
+  @Override
   public void setHealthReport(String healthReport) {
     this.healthReport = healthReport;
   }
@@ -79,7 +82,7 @@ public class NodeHealthStatus implements
    * @param lastReported last reported time by node 
    * health script
    */
-  public void setLastReported(long lastReported) {
+  public void setLastHealthReportTime(long lastReported) {
     this.lastHealthReportTime = lastReported;
   }
 
@@ -88,7 +91,8 @@ public class NodeHealthStatus implements
    * 
    * @return time stamp of most recent health update.
    */
-  public long getLastReported() {
+  @Override
+  public long getLastHealthReportTime() {
     return lastHealthReportTime;
   }
 

Modified: hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/TaskMemoryManagerThread.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/TaskMemoryManagerThread.java?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/TaskMemoryManagerThread.java (original)
+++ hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/TaskMemoryManagerThread.java Tue Apr 12 07:56:07 2011
@@ -33,7 +33,7 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.TaskTracker;
 import org.apache.hadoop.mapred.TaskTracker.TaskInProgress;
 import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
-import org.apache.hadoop.mapreduce.util.ProcfsBasedProcessTree;
+import org.apache.hadoop.yarn.util.ProcfsBasedProcessTree;
 import org.apache.hadoop.util.StringUtils;
 
 /**

Modified: hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/TaskTracker.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/TaskTracker.java?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/TaskTracker.java (original)
+++ hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/TaskTracker.java Tue Apr 12 07:56:07 2011
@@ -54,7 +54,7 @@ import javax.servlet.http.HttpServletRes
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.NodeHealthCheckerService;
-import org.apache.hadoop.NodeHealthStatus;
+import org.apache.hadoop.mapred.NodeHealthStatus;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -98,8 +98,7 @@ import static org.apache.hadoop.metrics2
 import org.apache.hadoop.mapreduce.util.ConfigUtil;
 import org.apache.hadoop.mapreduce.util.MRAsyncDiskService;
 import org.apache.hadoop.mapreduce.util.MemoryCalculatorPlugin;
-import org.apache.hadoop.mapreduce.util.ProcfsBasedProcessTree;
-import org.apache.hadoop.mapreduce.util.ResourceCalculatorPlugin;
+import org.apache.hadoop.yarn.util.ProcfsBasedProcessTree;
 import org.apache.hadoop.mapreduce.util.ServerConfigUtil;
 import org.apache.hadoop.net.DNS;
 import org.apache.hadoop.net.NetUtils;
@@ -115,6 +114,8 @@ import org.apache.hadoop.util.Reflection
 import org.apache.hadoop.util.RunJar;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.VersionInfo;
+import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
+
 import static org.apache.hadoop.mapred.QueueManager.toFullPropertyName;
 
 /*******************************************************
@@ -1730,8 +1731,8 @@ public class TaskTracker 
       if (healthChecker != null) {
         healthChecker.setHealthStatus(healthStatus);
       } else {
-        healthStatus.setNodeHealthy(true);
-        healthStatus.setLastReported(0L);
+        healthStatus.setIsNodeHealthy(true);
+        healthStatus.setLastHealthReportTime(0L);
         healthStatus.setHealthReport("");
       }
     }
@@ -1803,7 +1804,7 @@ public class TaskTracker 
    * @return total size of free virtual memory.
    */
   long getAvailableVirtualMemoryOnTT() {
-    long availableVirtualMemoryOnTT = TaskTrackerStatus.UNAVAILABLE;
+    long availableVirtualMemoryOnTT = ResourceCalculatorPlugin.UNAVAILABLE;
     if (resourceCalculatorPlugin != null) {
       availableVirtualMemoryOnTT =
               resourceCalculatorPlugin.getAvailableVirtualMemorySize();
@@ -1816,7 +1817,7 @@ public class TaskTracker 
    * @return total size of free physical memory in bytes
    */
   long getAvailablePhysicalMemoryOnTT() {
-    long availablePhysicalMemoryOnTT = TaskTrackerStatus.UNAVAILABLE;
+    long availablePhysicalMemoryOnTT = ResourceCalculatorPlugin.UNAVAILABLE;
     if (resourceCalculatorPlugin != null) {
       availablePhysicalMemoryOnTT =
               resourceCalculatorPlugin.getAvailablePhysicalMemorySize();
@@ -1829,7 +1830,7 @@ public class TaskTracker 
    * @return cumulative CPU used time in millisecond
    */
   long getCumulativeCpuTimeOnTT() {
-    long cumulativeCpuTime = TaskTrackerStatus.UNAVAILABLE;
+    long cumulativeCpuTime = ResourceCalculatorPlugin.UNAVAILABLE;
     if (resourceCalculatorPlugin != null) {
       cumulativeCpuTime = resourceCalculatorPlugin.getCumulativeCpuTime();
     }
@@ -1841,7 +1842,7 @@ public class TaskTracker 
    * @return number of processors
    */
   int getNumProcessorsOnTT() {
-    int numProcessors = TaskTrackerStatus.UNAVAILABLE;
+    int numProcessors = ResourceCalculatorPlugin.UNAVAILABLE;
     if (resourceCalculatorPlugin != null) {
       numProcessors = resourceCalculatorPlugin.getNumProcessors();
     }
@@ -1853,7 +1854,7 @@ public class TaskTracker 
    * @return CPU frequency in kHz
    */
   long getCpuFrequencyOnTT() {
-    long cpuFrequency = TaskTrackerStatus.UNAVAILABLE;
+    long cpuFrequency = ResourceCalculatorPlugin.UNAVAILABLE;
     if (resourceCalculatorPlugin != null) {
       cpuFrequency = resourceCalculatorPlugin.getCpuFrequency();
     }
@@ -1865,7 +1866,7 @@ public class TaskTracker 
    * @return CPU usage in %
    */
   float getCpuUsageOnTT() {
-    float cpuUsage = TaskTrackerStatus.UNAVAILABLE;
+    float cpuUsage = ResourceCalculatorPlugin.UNAVAILABLE;
     if (resourceCalculatorPlugin != null) {
       cpuUsage = resourceCalculatorPlugin.getCpuUsage();
     }

Modified: hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/TaskTrackerStatus.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/TaskTrackerStatus.java?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/TaskTrackerStatus.java (original)
+++ hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapred/TaskTrackerStatus.java Tue Apr 12 07:56:07 2011
@@ -24,6 +24,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.mapred.TaskStatus.State;
+import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
 
 import java.io.*;
 import java.util.*;
@@ -57,7 +58,6 @@ public class TaskTrackerStatus implement
   private int maxReduceTasks;
   private NodeHealthStatus healthStatus;
    
-  public static final int UNAVAILABLE = -1;
   /**
    * Class representing a collection of resources on this tasktracker.
    */
@@ -69,12 +69,12 @@ public class TaskTrackerStatus implement
     private long reduceSlotMemorySizeOnTT;
     private long availableSpace;
 
-    private long availableVirtualMemory = UNAVAILABLE; // in byte
-    private long availablePhysicalMemory = UNAVAILABLE; // in byte
-    private int numProcessors = UNAVAILABLE;
-    private long cumulativeCpuTime = UNAVAILABLE; // in millisecond
-    private long cpuFrequency = UNAVAILABLE; // in kHz
-    private float cpuUsage = UNAVAILABLE; // in %
+    private long availableVirtualMemory = ResourceCalculatorPlugin.UNAVAILABLE; // in byte
+    private long availablePhysicalMemory = ResourceCalculatorPlugin.UNAVAILABLE; // in byte
+    private int numProcessors = ResourceCalculatorPlugin.UNAVAILABLE;
+    private long cumulativeCpuTime = ResourceCalculatorPlugin.UNAVAILABLE; // in millisecond
+    private long cpuFrequency = ResourceCalculatorPlugin.UNAVAILABLE; // in kHz
+    private float cpuUsage = ResourceCalculatorPlugin.UNAVAILABLE; // in %
 
     ResourceStatus() {
       totalVirtualMemory = JobConf.DISABLED_MEMORY_LIMIT;
@@ -192,7 +192,7 @@ public class TaskTrackerStatus implement
      */
     void setAvailableVirtualMemory(long availableMem) {
       availableVirtualMemory = availableMem > 0 ?
-                               availableMem : UNAVAILABLE;
+                               availableMem : ResourceCalculatorPlugin.UNAVAILABLE;
     }
 
     /**
@@ -215,7 +215,7 @@ public class TaskTrackerStatus implement
      */
     void setAvailablePhysicalMemory(long availableRAM) {
       availablePhysicalMemory = availableRAM > 0 ?
-                                availableRAM : UNAVAILABLE;
+                                availableRAM : ResourceCalculatorPlugin.UNAVAILABLE;
     }
 
     /**
@@ -236,7 +236,7 @@ public class TaskTrackerStatus implement
      */
     public void setCpuFrequency(long cpuFrequency) {
       this.cpuFrequency = cpuFrequency > 0 ?
-                          cpuFrequency : UNAVAILABLE;
+                          cpuFrequency : ResourceCalculatorPlugin.UNAVAILABLE;
     }
 
     /**
@@ -257,7 +257,7 @@ public class TaskTrackerStatus implement
      */
     public void setNumProcessors(int numProcessors) {
       this.numProcessors = numProcessors > 0 ?
-                           numProcessors : UNAVAILABLE;
+                           numProcessors : ResourceCalculatorPlugin.UNAVAILABLE;
     }
 
     /**
@@ -278,7 +278,7 @@ public class TaskTrackerStatus implement
      */
     public void setCumulativeCpuTime(long cumulativeCpuTime) {
       this.cumulativeCpuTime = cumulativeCpuTime > 0 ?
-                               cumulativeCpuTime : UNAVAILABLE;
+                               cumulativeCpuTime : ResourceCalculatorPlugin.UNAVAILABLE;
     }
 
     /**

Modified: hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/util/LinuxMemoryCalculatorPlugin.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/util/LinuxMemoryCalculatorPlugin.java?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/util/LinuxMemoryCalculatorPlugin.java (original)
+++ hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/util/LinuxMemoryCalculatorPlugin.java Tue Apr 12 07:56:07 2011
@@ -20,11 +20,12 @@ package org.apache.hadoop.mapreduce.util
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.yarn.util.LinuxResourceCalculatorPlugin;
 
 /**
  * Plugin to calculate virtual and physical memories on Linux systems.
  * @deprecated 
- * Use {@link org.apache.hadoop.mapreduce.util.LinuxResourceCalculatorPlugin}
+ * Use {@link org.apache.hadoop.yarn.util.LinuxResourceCalculatorPlugin}
  * instead
  */
 @Deprecated

Modified: hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/util/MemoryCalculatorPlugin.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/util/MemoryCalculatorPlugin.java?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/util/MemoryCalculatorPlugin.java (original)
+++ hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/mapreduce/util/MemoryCalculatorPlugin.java Tue Apr 12 07:56:07 2011
@@ -27,7 +27,7 @@ import org.apache.hadoop.util.Reflection
 /**
  * Plugin to calculate virtual and physical memories on the system.
  * @deprecated Use
- *             {@link org.apache.hadoop.mapreduce.util.ResourceCalculatorPlugin}
+ *             {@link org.apache.hadoop.util.ResourceCalculatorPlugin}
  *             instead
  */
 @Deprecated

Modified: hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/util/ProcfsBasedProcessTree.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/util/ProcfsBasedProcessTree.java?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/util/ProcfsBasedProcessTree.java (original)
+++ hadoop/mapreduce/branches/MR-279/src/java/org/apache/hadoop/util/ProcfsBasedProcessTree.java Tue Apr 12 07:56:07 2011
@@ -32,7 +32,7 @@ import org.apache.hadoop.classification.
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class ProcfsBasedProcessTree extends
-    org.apache.hadoop.mapreduce.util.ProcfsBasedProcessTree {
+    org.apache.hadoop.yarn.util.ProcfsBasedProcessTree {
 
   public ProcfsBasedProcessTree(String pid) {
     super(pid);

Modified: hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/DummyResourceCalculatorPlugin.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/DummyResourceCalculatorPlugin.java?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/DummyResourceCalculatorPlugin.java (original)
+++ hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/DummyResourceCalculatorPlugin.java Tue Apr 12 07:56:07 2011
@@ -19,7 +19,7 @@
 package org.apache.hadoop.mapred;
 
 import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.mapreduce.util.ResourceCalculatorPlugin;
+import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
 
 /**
  * Plugin class to test resource information reported by TT. Use

Modified: hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/TestTTResourceReporting.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/TestTTResourceReporting.java?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/TestTTResourceReporting.java (original)
+++ hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/TestTTResourceReporting.java Tue Apr 12 07:56:07 2011
@@ -24,9 +24,9 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapreduce.SleepJob;
 import org.apache.hadoop.mapreduce.TaskCounter;
-import org.apache.hadoop.mapreduce.util.LinuxResourceCalculatorPlugin;
-import org.apache.hadoop.mapreduce.util.ResourceCalculatorPlugin;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.hadoop.yarn.util.LinuxResourceCalculatorPlugin;
+import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
 import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.mapreduce.server.jobtracker.TaskTracker;
@@ -94,13 +94,13 @@ public class TestTTResourceReporting ext
       long availablePhysicalMemoryOnTT =
           getConf().getLong("availablePmemOnTT", JobConf.DISABLED_MEMORY_LIMIT);
       long cumulativeCpuTime =
-          getConf().getLong("cumulativeCpuTime", TaskTrackerStatus.UNAVAILABLE);
+          getConf().getLong("cumulativeCpuTime", ResourceCalculatorPlugin.UNAVAILABLE);
       long cpuFrequency =
-          getConf().getLong("cpuFrequency", TaskTrackerStatus.UNAVAILABLE);
+          getConf().getLong("cpuFrequency", ResourceCalculatorPlugin.UNAVAILABLE);
       int numProcessors =
-          getConf().getInt("numProcessors", TaskTrackerStatus.UNAVAILABLE);
+          getConf().getInt("numProcessors", ResourceCalculatorPlugin.UNAVAILABLE);
       float cpuUsage =
-          getConf().getFloat("cpuUsage", TaskTrackerStatus.UNAVAILABLE);
+          getConf().getFloat("cpuUsage", ResourceCalculatorPlugin.UNAVAILABLE);
 
       long reportedTotalVirtualMemoryOnTT =
           status.getResourceStatus().getTotalVirtualMemory();

Modified: hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerBlacklisting.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerBlacklisting.java?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerBlacklisting.java (original)
+++ hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerBlacklisting.java Tue Apr 12 07:56:07 2011
@@ -73,7 +73,7 @@ public class TestTaskTrackerBlacklisting
     boolean jumpADay = false;
 
     @Override
-    long getTime() {
+    public long getTime() {
       if (!jumpADay) {
         return super.getTime();
       } else {
@@ -168,9 +168,9 @@ public class TestTaskTrackerBlacklisting
           .convertTrackerNameToHostName(tracker));
       if (status != null) {
         NodeHealthStatus healthStatus = tts.getHealthStatus();
-        healthStatus.setNodeHealthy(status.isNodeHealthy());
+        healthStatus.setIsNodeHealthy(status.getIsNodeHealthy());
         healthStatus.setHealthReport(status.getHealthReport());
-        healthStatus.setLastReported(status.getLastReported());
+        healthStatus.setLastHealthReportTime(status.getLastHealthReportTime());
       }
       jobTracker.heartbeat(tts, false, initialContact, 
                            false, responseId);
@@ -200,8 +200,8 @@ public class TestTaskTrackerBlacklisting
     for (String host : hosts) {
       checkReasonForBlackListing(host, nodeUnHealthyReasonSet);
     }
-    status.setNodeHealthy(true);
-    status.setLastReported(System.currentTimeMillis());
+    status.setIsNodeHealthy(true);
+    status.setLastHealthReportTime(System.currentTimeMillis());
     status.setHealthReport("");
     //white list tracker so the further test cases can be
     //using trackers.
@@ -314,8 +314,8 @@ public class TestTaskTrackerBlacklisting
           error,
           jobTracker.getFaultReport(hosts[i]).replace("\n", ""));
     }
-    status.setNodeHealthy(false);
-    status.setLastReported(System.currentTimeMillis());
+    status.setIsNodeHealthy(false);
+    status.setLastHealthReportTime(System.currentTimeMillis());
     status.setHealthReport(error1);
     sendHeartBeat(status, false);
     checkReasonForBlackListing(hosts[0], nodeUnHealthyReasonSet);
@@ -334,8 +334,8 @@ public class TestTaskTrackerBlacklisting
   
   private NodeHealthStatus getUnhealthyNodeStatus(String error) {
     NodeHealthStatus status = new NodeHealthStatus();
-    status.setNodeHealthy(false);
-    status.setLastReported(System.currentTimeMillis());
+    status.setIsNodeHealthy(false);
+    status.setLastHealthReportTime(System.currentTimeMillis());
     status.setHealthReport(error);
     return status;
   }

Modified: hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java (original)
+++ hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java Tue Apr 12 07:56:07 2011
@@ -35,12 +35,12 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
-import org.apache.hadoop.mapreduce.util.LinuxResourceCalculatorPlugin;
 import org.apache.hadoop.mapreduce.util.ProcfsBasedProcessTree;
 import org.apache.hadoop.mapreduce.SleepJob;
 import org.apache.hadoop.mapreduce.util.TestProcfsBasedProcessTree;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.hadoop.yarn.util.LinuxResourceCalculatorPlugin;
 
 import org.junit.After;
 import org.junit.Ignore;

Modified: hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java (original)
+++ hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java Tue Apr 12 07:56:07 2011
@@ -699,7 +699,7 @@ public class UtilsForTests {
     }
 
     @Override
-    long getTime() {
+    public long getTime() {
       return time;
     }
   }

Modified: hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapreduce/util/TestLinuxResourceCalculatorPlugin.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapreduce/util/TestLinuxResourceCalculatorPlugin.java?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapreduce/util/TestLinuxResourceCalculatorPlugin.java (original)
+++ hadoop/mapreduce/branches/MR-279/src/test/mapred/org/apache/hadoop/mapreduce/util/TestLinuxResourceCalculatorPlugin.java Tue Apr 12 07:56:07 2011
@@ -26,7 +26,8 @@ import java.util.Random;
 import junit.framework.TestCase;
 
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.TaskTrackerStatus;
+import org.apache.hadoop.yarn.util.LinuxResourceCalculatorPlugin;
+import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
 import org.junit.Test;
 
 /**
@@ -172,7 +173,7 @@ public class TestLinuxResourceCalculator
     updateStatFile(uTime, nTime, sTime);
     assertEquals(plugin.getCumulativeCpuTime(),
                  FAKE_JIFFY_LENGTH * (uTime + nTime + sTime));
-    assertEquals(plugin.getCpuUsage(), (float)(TaskTrackerStatus.UNAVAILABLE));
+    assertEquals(plugin.getCpuUsage(), (float)(ResourceCalculatorPlugin.UNAVAILABLE));
     
     // Advance the time and sample again to test the CPU usage calculation
     uTime += 100L;

Modified: hadoop/mapreduce/branches/MR-279/src/webapps/job/jobtracker.jspx
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/src/webapps/job/jobtracker.jspx?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/webapps/job/jobtracker.jspx (original)
+++ hadoop/mapreduce/branches/MR-279/src/webapps/job/jobtracker.jspx Tue Apr 12 07:56:07 2011
@@ -23,7 +23,8 @@
   <jsp:directive.page import="java.util.Date" />
   <jsp:directive.page import="java.util.List" />
   <jsp:directive.page import="org.apache.hadoop.mapreduce.server.jobtracker.JobTrackerJspHelper" />
-  <jsp:directive.page import="org.apache.hadoop.util.*" />
+    <jsp:directive.page import="org.apache.hadoop.util.*" />
+  <jsp:directive.page import="org.apache.hadoop.yarn.util.*" />
   <jsp:directive.page import="org.apache.hadoop.mapreduce.*" />
   <jsp:directive.page import="org.apache.hadoop.mapred.JSPUtil" />
 

Modified: hadoop/mapreduce/branches/MR-279/src/webapps/job/machines.jsp
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/src/webapps/job/machines.jsp?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/src/webapps/job/machines.jsp (original)
+++ hadoop/mapreduce/branches/MR-279/src/webapps/job/machines.jsp Tue Apr 12 07:56:07 2011
@@ -84,8 +84,8 @@
       for (Iterator it = c.iterator(); it.hasNext(); ) {
         TaskTrackerStatus tt = (TaskTrackerStatus) it.next();
         long sinceHeartbeat = System.currentTimeMillis() - tt.getLastSeen();
-        boolean isHealthy = tt.getHealthStatus().isNodeHealthy();
-        long sinceHealthCheck = tt.getHealthStatus().getLastReported();
+        boolean isHealthy = tt.getHealthStatus().getIsNodeHealthy();
+        long sinceHealthCheck = tt.getHealthStatus().getLastHealthReportTime();
         String healthString = "";
         if(sinceHealthCheck == 0) {
           healthString = "N/A";

Modified: hadoop/mapreduce/branches/MR-279/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerStatus.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerStatus.java?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerStatus.java (original)
+++ hadoop/mapreduce/branches/MR-279/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerStatus.java Tue Apr 12 07:56:07 2011
@@ -1,11 +1,13 @@
 package org.apache.hadoop.yarn.api.records;
 
 public interface ContainerStatus {
-  public abstract ContainerId getContainerId();
-  public abstract ContainerState getState();
-  public abstract int getExitStatus();
+  ContainerId getContainerId();
+  ContainerState getState();
+  String getExitStatus();
+  String getDiagnostics();
   
-  public abstract void setContainerId(ContainerId containerId);
-  public abstract void setState(ContainerState state);
-  public abstract void setExitStatus(int exitStatus);
+  void setContainerId(ContainerId containerId);
+  void setState(ContainerState state);
+  void setExitStatus(String exitStatus);
+  void setDiagnostics(String diagnostics);
 }

Modified: hadoop/mapreduce/branches/MR-279/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerStatusPBImpl.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerStatusPBImpl.java?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerStatusPBImpl.java (original)
+++ hadoop/mapreduce/branches/MR-279/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerStatusPBImpl.java Tue Apr 12 07:56:07 2011
@@ -98,15 +98,27 @@ public class ContainerStatusPBImpl exten
     this.containerId = containerId;
   }
   @Override
-  public int getExitStatus() {
+  public String getExitStatus() {
     ContainerStatusProtoOrBuilder p = viaProto ? proto : builder;
     return (p.getExitStatus());
   }
 
   @Override
-  public void setExitStatus(int exitStatus) {
+  public void setExitStatus(String exitStatus) {
     maybeInitBuilder();
-    builder.setExitStatus((exitStatus));
+    builder.setExitStatus(exitStatus);
+  }
+
+  @Override
+  public String getDiagnostics() {
+    ContainerStatusProtoOrBuilder p = viaProto ? proto : builder;
+    return (p.getDiagnostics());    
+  }
+
+  @Override
+  public void setDiagnostics(String diagnostics) {
+    maybeInitBuilder();
+    builder.setDiagnostics(diagnostics);
   }
 
   private ContainerStateProto convertToProtoFormat(ContainerState e) {

Modified: hadoop/mapreduce/branches/MR-279/yarn/yarn-api/src/main/proto/yarn_protos.proto
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/yarn/yarn-api/src/main/proto/yarn_protos.proto?rev=1091316&r1=1091315&r2=1091316&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/yarn/yarn-api/src/main/proto/yarn_protos.proto (original)
+++ hadoop/mapreduce/branches/MR-279/yarn/yarn-api/src/main/proto/yarn_protos.proto Tue Apr 12 07:56:07 2011
@@ -168,7 +168,8 @@ message ContainerLaunchContextProto {
 message ContainerStatusProto {
   optional ContainerIdProto container_id = 1;
   optional ContainerStateProto state = 2;
-  optional int32 exit_status = 3;
+  optional string diagnostics = 3 [default = ""];
+  optional string exit_status = 4 [default = "N/A"];
 }
 
 

Added: hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/util/LinuxResourceCalculatorPlugin.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/util/LinuxResourceCalculatorPlugin.java?rev=1091316&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/util/LinuxResourceCalculatorPlugin.java (added)
+++ hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/util/LinuxResourceCalculatorPlugin.java Tue Apr 12 07:56:07 2011
@@ -0,0 +1,412 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.util;
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.yarn.util.ProcfsBasedProcessTree;
+import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
+
+/**
+ * Plugin to calculate resource information on Linux systems.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class LinuxResourceCalculatorPlugin extends ResourceCalculatorPlugin {
+  private static final Log LOG =
+      LogFactory.getLog(LinuxResourceCalculatorPlugin.class);
+
+  /**
+   * proc's meminfo virtual file has keys-values in the format
+   * "key:[ \t]*value[ \t]kB".
+   */
+  private static final String PROCFS_MEMFILE = "/proc/meminfo";
+  private static final Pattern PROCFS_MEMFILE_FORMAT =
+      Pattern.compile("^([a-zA-Z]*):[ \t]*([0-9]*)[ \t]kB");
+
+  // We need the values for the following keys in meminfo
+  private static final String MEMTOTAL_STRING = "MemTotal";
+  private static final String SWAPTOTAL_STRING = "SwapTotal";
+  private static final String MEMFREE_STRING = "MemFree";
+  private static final String SWAPFREE_STRING = "SwapFree";
+  private static final String INACTIVE_STRING = "Inactive";
+
+  /**
+   * Patterns for parsing /proc/cpuinfo
+   */
+  private static final String PROCFS_CPUINFO = "/proc/cpuinfo";
+  private static final Pattern PROCESSOR_FORMAT =
+      Pattern.compile("^processor[ \t]:[ \t]*([0-9]*)");
+  private static final Pattern FREQUENCY_FORMAT =
+      Pattern.compile("^cpu MHz[ \t]*:[ \t]*([0-9.]*)");
+
+  /**
+   * Pattern for parsing /proc/stat
+   */
+  private static final String PROCFS_STAT = "/proc/stat";
+  private static final Pattern CPU_TIME_FORMAT =
+    Pattern.compile("^cpu[ \t]*([0-9]*)" +
+    		            "[ \t]*([0-9]*)[ \t]*([0-9]*)[ \t].*");
+  
+  private String procfsMemFile;
+  private String procfsCpuFile;
+  private String procfsStatFile;
+  long jiffyLengthInMillis;
+
+  private long ramSize = 0;
+  private long swapSize = 0;
+  private long ramSizeFree = 0;  // free ram space on the machine (kB)
+  private long swapSizeFree = 0; // free swap space on the machine (kB)
+  private long inactiveSize = 0; // inactive cache memory (kB)
+  private int numProcessors = 0; // number of processors on the system
+  private long cpuFrequency = 0L; // CPU frequency on the system (kHz)
+  private long cumulativeCpuTime = 0L; // CPU used time since system is on (ms)
+  private long lastCumulativeCpuTime = 0L; // CPU used time read last time (ms)
+  // Unix timestamp while reading the CPU time (ms)
+  private float cpuUsage = ResourceCalculatorPlugin.UNAVAILABLE;
+  private long sampleTime = ResourceCalculatorPlugin.UNAVAILABLE;
+  private long lastSampleTime = ResourceCalculatorPlugin.UNAVAILABLE;
+  private ProcfsBasedProcessTree pTree = null;
+
+  boolean readMemInfoFile = false;
+  boolean readCpuInfoFile = false;
+  
+  /**
+   * Get current time
+   * @return Unix time stamp in millisecond
+   */
+  long getCurrentTime() {
+    return System.currentTimeMillis();
+  }
+  
+  public LinuxResourceCalculatorPlugin() {
+    procfsMemFile = PROCFS_MEMFILE;
+    procfsCpuFile = PROCFS_CPUINFO;
+    procfsStatFile = PROCFS_STAT;
+    jiffyLengthInMillis = ProcfsBasedProcessTree.JIFFY_LENGTH_IN_MILLIS;
+    String pid = System.getenv().get("JVM_PID");
+    pTree = new ProcfsBasedProcessTree(pid);
+  }
+  
+  /**
+   * Constructor which allows assigning the /proc/ directories. This will be
+   * used only in unit tests
+   * @param procfsMemFile fake file for /proc/meminfo
+   * @param procfsCpuFile fake file for /proc/cpuinfo
+   * @param procfsStatFile fake file for /proc/stat
+   * @param jiffyLengthInMillis fake jiffy length value
+   */
+  public LinuxResourceCalculatorPlugin(String procfsMemFile,
+                                       String procfsCpuFile,
+                                       String procfsStatFile,
+                                       long jiffyLengthInMillis) {
+    this.procfsMemFile = procfsMemFile;
+    this.procfsCpuFile = procfsCpuFile;
+    this.procfsStatFile = procfsStatFile;
+    this.jiffyLengthInMillis = jiffyLengthInMillis;
+    String pid = System.getenv().get("JVM_PID");
+    pTree = new ProcfsBasedProcessTree(pid);
+  }
+
+  /**
+   * Read /proc/meminfo, parse and compute memory information only once
+   */
+  private void readProcMemInfoFile() {
+    readProcMemInfoFile(false);
+  }
+
+  /**
+   * Read /proc/meminfo, parse and compute memory information
+   * @param readAgain if false, read only on the first time
+   */
+  private void readProcMemInfoFile(boolean readAgain) {
+
+    if (readMemInfoFile && !readAgain) {
+      return;
+    }
+
+    // Read "/proc/memInfo" file
+    BufferedReader in = null;
+    FileReader fReader = null;
+    try {
+      fReader = new FileReader(procfsMemFile);
+      in = new BufferedReader(fReader);
+    } catch (FileNotFoundException f) {
+      // shouldn't happen....
+      return;
+    }
+
+    Matcher mat = null;
+
+    try {
+      String str = in.readLine();
+      while (str != null) {
+        mat = PROCFS_MEMFILE_FORMAT.matcher(str);
+        if (mat.find()) {
+          if (mat.group(1).equals(MEMTOTAL_STRING)) {
+            ramSize = Long.parseLong(mat.group(2));
+          } else if (mat.group(1).equals(SWAPTOTAL_STRING)) {
+            swapSize = Long.parseLong(mat.group(2));
+          } else if (mat.group(1).equals(MEMFREE_STRING)) {
+            ramSizeFree = Long.parseLong(mat.group(2));
+          } else if (mat.group(1).equals(SWAPFREE_STRING)) {
+            swapSizeFree = Long.parseLong(mat.group(2));
+          } else if (mat.group(1).equals(INACTIVE_STRING)) {
+            inactiveSize = Long.parseLong(mat.group(2));
+          }
+        }
+        str = in.readLine();
+      }
+    } catch (IOException io) {
+      LOG.warn("Error reading the stream " + io);
+    } finally {
+      // Close the streams
+      try {
+        fReader.close();
+        try {
+          in.close();
+        } catch (IOException i) {
+          LOG.warn("Error closing the stream " + in);
+        }
+      } catch (IOException i) {
+        LOG.warn("Error closing the stream " + fReader);
+      }
+    }
+
+    readMemInfoFile = true;
+  }
+
+  /**
+   * Read /proc/cpuinfo, parse and calculate CPU information
+   */
+  private void readProcCpuInfoFile() {
+    // This directory needs to be read only once
+    if (readCpuInfoFile) {
+      return;
+    }
+    // Read "/proc/cpuinfo" file
+    BufferedReader in = null;
+    FileReader fReader = null;
+    try {
+      fReader = new FileReader(procfsCpuFile);
+      in = new BufferedReader(fReader);
+    } catch (FileNotFoundException f) {
+      // shouldn't happen....
+      return;
+    }
+    Matcher mat = null;
+    try {
+      numProcessors = 0;
+      String str = in.readLine();
+      while (str != null) {
+        mat = PROCESSOR_FORMAT.matcher(str);
+        if (mat.find()) {
+          numProcessors++;
+        }
+        mat = FREQUENCY_FORMAT.matcher(str);
+        if (mat.find()) {
+          cpuFrequency = (long)(Double.parseDouble(mat.group(1)) * 1000); // kHz
+        }
+        str = in.readLine();
+      }
+    } catch (IOException io) {
+      LOG.warn("Error reading the stream " + io);
+    } finally {
+      // Close the streams
+      try {
+        fReader.close();
+        try {
+          in.close();
+        } catch (IOException i) {
+          LOG.warn("Error closing the stream " + in);
+        }
+      } catch (IOException i) {
+        LOG.warn("Error closing the stream " + fReader);
+      }
+    }
+    readCpuInfoFile = true;
+  }
+
+  /**
+   * Read /proc/stat file, parse and calculate cumulative CPU
+   */
+  private void readProcStatFile() {
+    // Read "/proc/stat" file
+    BufferedReader in = null;
+    FileReader fReader = null;
+    try {
+      fReader = new FileReader(procfsStatFile);
+      in = new BufferedReader(fReader);
+    } catch (FileNotFoundException f) {
+      // shouldn't happen....
+      return;
+    }
+
+    Matcher mat = null;
+    try {
+      String str = in.readLine();
+      while (str != null) {
+        mat = CPU_TIME_FORMAT.matcher(str);
+        if (mat.find()) {
+          long uTime = Long.parseLong(mat.group(1));
+          long nTime = Long.parseLong(mat.group(2));
+          long sTime = Long.parseLong(mat.group(3));
+          cumulativeCpuTime = uTime + nTime + sTime; // milliseconds
+          break;
+        }
+        str = in.readLine();
+      }
+      cumulativeCpuTime *= jiffyLengthInMillis;
+    } catch (IOException io) {
+      LOG.warn("Error reading the stream " + io);
+    } finally {
+      // Close the streams
+      try {
+        fReader.close();
+        try {
+          in.close();
+        } catch (IOException i) {
+          LOG.warn("Error closing the stream " + in);
+        }
+      } catch (IOException i) {
+        LOG.warn("Error closing the stream " + fReader);
+      }
+    }
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public long getPhysicalMemorySize() {
+    readProcMemInfoFile();
+    return ramSize * 1024;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public long getVirtualMemorySize() {
+    readProcMemInfoFile();
+    return (ramSize + swapSize) * 1024;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public long getAvailablePhysicalMemorySize() {
+    readProcMemInfoFile(true);
+    return (ramSizeFree + inactiveSize) * 1024;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public long getAvailableVirtualMemorySize() {
+    readProcMemInfoFile(true);
+    return (ramSizeFree + swapSizeFree + inactiveSize) * 1024;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public int getNumProcessors() {
+    readProcCpuInfoFile();
+    return numProcessors;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public long getCpuFrequency() {
+    readProcCpuInfoFile();
+    return cpuFrequency;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public long getCumulativeCpuTime() {
+    readProcStatFile();
+    return cumulativeCpuTime;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public float getCpuUsage() {
+    readProcStatFile();
+    sampleTime = getCurrentTime();
+    if (lastSampleTime == ResourceCalculatorPlugin.UNAVAILABLE ||
+        lastSampleTime > sampleTime) {
+      // lastSampleTime > sampleTime may happen when the system time is changed
+      lastSampleTime = sampleTime;
+      lastCumulativeCpuTime = cumulativeCpuTime;
+      return cpuUsage;
+    }
+    // When lastSampleTime is sufficiently old, update cpuUsage.
+    // Also take a sample of the current time and cumulative CPU time for the
+    // use of the next calculation.
+    final long MINIMUM_UPDATE_INTERVAL = 10 * jiffyLengthInMillis;
+    if (sampleTime > lastSampleTime + MINIMUM_UPDATE_INTERVAL) {
+	    cpuUsage = (float)(cumulativeCpuTime - lastCumulativeCpuTime) * 100F /
+	               ((float)(sampleTime - lastSampleTime) * getNumProcessors());
+	    lastSampleTime = sampleTime;
+      lastCumulativeCpuTime = cumulativeCpuTime;
+    }
+    return cpuUsage;
+  }
+
+  /**
+   * Test the {@link LinuxResourceCalculatorPlugin}
+   *
+   * @param args
+   */
+  public static void main(String[] args) {
+    LinuxResourceCalculatorPlugin plugin = new LinuxResourceCalculatorPlugin();
+    System.out.println("Physical memory Size (bytes) : "
+        + plugin.getPhysicalMemorySize());
+    System.out.println("Total Virtual memory Size (bytes) : "
+        + plugin.getVirtualMemorySize());
+    System.out.println("Available Physical memory Size (bytes) : "
+        + plugin.getAvailablePhysicalMemorySize());
+    System.out.println("Total Available Virtual memory Size (bytes) : "
+        + plugin.getAvailableVirtualMemorySize());
+    System.out.println("Number of Processors : " + plugin.getNumProcessors());
+    System.out.println("CPU frequency (kHz) : " + plugin.getCpuFrequency());
+    System.out.println("Cumulative CPU time (ms) : " +
+            plugin.getCumulativeCpuTime());
+    try {
+      // Sleep so we can compute the CPU usage
+      Thread.sleep(500L);
+    } catch (InterruptedException e) {
+      // do nothing
+    }
+    System.out.println("CPU usage % : " + plugin.getCpuUsage());
+  }
+
+  @Override
+  public ProcResourceValues getProcResourceValues() {
+    pTree = pTree.getProcessTree();
+    long cpuTime = pTree.getCumulativeCpuTime();
+    long pMem = pTree.getCumulativeRssmem();
+    long vMem = pTree.getCumulativeVmem();
+    return new ProcResourceValues(cpuTime, pMem, vMem);
+  }
+}

Added: hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java?rev=1091316&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java (added)
+++ hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java Tue Apr 12 07:56:07 2011
@@ -0,0 +1,631 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.util;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.util.Shell.ShellCommandExecutor;
+import org.apache.hadoop.util.StringUtils;
+
+/**
+ * A Proc file-system based ProcessTree. Works only on Linux.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class ProcfsBasedProcessTree {
+
+  static final Log LOG = LogFactory
+      .getLog(ProcfsBasedProcessTree.class);
+
+  private static final String PROCFS = "/proc/";
+
+  private static final Pattern PROCFS_STAT_FILE_FORMAT = Pattern .compile(
+    "^([0-9-]+)\\s([^\\s]+)\\s[^\\s]\\s([0-9-]+)\\s([0-9-]+)\\s([0-9-]+)\\s" +
+    "([0-9-]+\\s){7}([0-9]+)\\s([0-9]+)\\s([0-9-]+\\s){7}([0-9]+)\\s([0-9]+)" +
+    "(\\s[0-9-]+){15}");
+
+  public static final String PROCFS_STAT_FILE = "stat";
+  public static final String PROCFS_CMDLINE_FILE = "cmdline";
+  public static final long PAGE_SIZE;
+  static {
+    ShellCommandExecutor shellExecutor =
+            new ShellCommandExecutor(new String[]{"getconf",  "PAGESIZE"});
+    long pageSize = -1;
+    try {
+      shellExecutor.execute();
+      pageSize = Long.parseLong(shellExecutor.getOutput().replace("\n", ""));
+    } catch (IOException e) {
+      LOG.error(StringUtils.stringifyException(e));
+    } finally {
+      PAGE_SIZE = pageSize;
+    }
+  }
+  public static final long JIFFY_LENGTH_IN_MILLIS; // in millisecond
+  static {
+    ShellCommandExecutor shellExecutor =
+            new ShellCommandExecutor(new String[]{"getconf",  "CLK_TCK"});
+    long jiffiesPerSecond = -1;
+    try {
+      shellExecutor.execute();
+      jiffiesPerSecond = Long.parseLong(shellExecutor.getOutput().replace("\n", ""));
+    } catch (IOException e) {
+      LOG.error(StringUtils.stringifyException(e));
+    } finally {
+      JIFFY_LENGTH_IN_MILLIS = jiffiesPerSecond != -1 ?
+                     Math.round(1000D / jiffiesPerSecond) : -1;
+    }
+  }
+
+  // to enable testing, using this variable which can be configured
+  // to a test directory.
+  private String procfsDir;
+  
+  protected final Integer pid;
+  private Long cpuTime = 0L;
+  private boolean setsidUsed = false;
+
+  protected Map<Integer, ProcessInfo> processTree =
+    new HashMap<Integer, ProcessInfo>();
+
+  public ProcfsBasedProcessTree(String pid) {
+    this(pid, false);
+  }
+
+  public ProcfsBasedProcessTree(String pid, boolean setsidUsed) {
+    this(pid, setsidUsed, PROCFS);
+  }
+
+  /**
+   * Build a new process tree rooted at the pid.
+   * 
+   * This method is provided mainly for testing purposes, where
+   * the root of the proc file system can be adjusted.
+   * 
+   * @param pid root of the process tree
+   * @param setsidUsed true, if setsid was used for the root pid
+   * @param procfsDir the root of a proc file system - only used for testing. 
+   */
+  public ProcfsBasedProcessTree(String pid, boolean setsidUsed,
+      String procfsDir) {
+    this.pid = getValidPID(pid);
+    this.setsidUsed = setsidUsed;
+    this.procfsDir = procfsDir;
+  }
+
+  /**
+   * Checks if the ProcfsBasedProcessTree is available on this system.
+   * 
+   * @return true if ProcfsBasedProcessTree is available. False otherwise.
+   */
+  public static boolean isAvailable() {
+    try {
+      String osName = System.getProperty("os.name");
+      if (!osName.startsWith("Linux")) {
+        LOG.info("ProcfsBasedProcessTree currently is supported only on "
+            + "Linux.");
+        return false;
+      }
+    } catch (SecurityException se) {
+      LOG.warn("Failed to get Operating System name. " + se);
+      return false;
+    }
+    return true;
+  }
+
+  /**
+   * Get the process-tree with latest state. If the root-process is not alive,
+   * an empty tree will be returned.
+   * 
+   * @return the process-tree with latest state.
+   */
+  public ProcfsBasedProcessTree getProcessTree() {
+    if (pid != -1) {
+      // Get the list of processes
+      List<Integer> processList = getProcessList();
+
+      Map<Integer, ProcessInfo> allProcessInfo = new HashMap<Integer, ProcessInfo>();
+      
+      // cache the processTree to get the age for processes
+      Map<Integer, ProcessInfo> oldProcs = 
+              new HashMap<Integer, ProcessInfo>(processTree);
+      processTree.clear();
+
+      ProcessInfo me = null;
+      for (Integer proc : processList) {
+        // Get information for each process
+        ProcessInfo pInfo = new ProcessInfo(proc);
+        if (constructProcessInfo(pInfo, procfsDir) != null) {
+          allProcessInfo.put(proc, pInfo);
+          if (proc.equals(this.pid)) {
+            me = pInfo; // cache 'me'
+            processTree.put(proc, pInfo);
+          }
+        }
+      }
+
+      if (me == null) {
+        return this; 
+      }
+
+      // Add each process to its parent.
+      for (Map.Entry<Integer, ProcessInfo> entry : allProcessInfo.entrySet()) {
+        Integer pID = entry.getKey();
+        if (pID != 1) {
+          ProcessInfo pInfo = entry.getValue();
+          ProcessInfo parentPInfo = allProcessInfo.get(pInfo.getPpid());
+          if (parentPInfo != null) {
+            parentPInfo.addChild(pInfo);
+          }
+        }
+      }
+
+      // now start constructing the process-tree
+      LinkedList<ProcessInfo> pInfoQueue = new LinkedList<ProcessInfo>();
+      pInfoQueue.addAll(me.getChildren());
+      while (!pInfoQueue.isEmpty()) {
+        ProcessInfo pInfo = pInfoQueue.remove();
+        if (!processTree.containsKey(pInfo.getPid())) {
+          processTree.put(pInfo.getPid(), pInfo);
+        }
+        pInfoQueue.addAll(pInfo.getChildren());
+      }
+
+      // update age values and compute the number of jiffies since last update
+      for (Map.Entry<Integer, ProcessInfo> procs : processTree.entrySet()) {
+        ProcessInfo oldInfo = oldProcs.get(procs.getKey());
+        if (procs.getValue() != null) {
+          procs.getValue().updateJiffy(oldInfo);
+          if (oldInfo != null) {
+            procs.getValue().updateAge(oldInfo);  
+          }
+        }
+      }
+
+      if (LOG.isDebugEnabled()) {
+        // Log.debug the ProcfsBasedProcessTree
+        LOG.debug(this.toString());
+      }
+    }
+    return this;
+  }
+
+  /** Verify that the given process id is same as its process group id.
+   * @param pidStr Process id of the to-be-verified-process
+   * @param procfsDir  Procfs root dir
+   */
+  public boolean checkPidPgrpidForMatch() {
+    return checkPidPgrpidForMatch(pid, PROCFS);
+  }
+
+  public static boolean checkPidPgrpidForMatch(int _pid, String procfs) {
+    // Get information for this process
+    ProcessInfo pInfo = new ProcessInfo(_pid);
+    pInfo = constructProcessInfo(pInfo, procfs);
+    // null if process group leader finished execution; issue no warning
+    // make sure that pid and its pgrpId match
+    return pInfo == null || pInfo.getPgrpId().equals(_pid);
+  }
+
+  private static final String PROCESSTREE_DUMP_FORMAT =
+      "\t|- %d %d %d %d %s %d %d %d %d %s\n";
+
+  public List<Integer> getCurrentProcessIDs() {
+    List<Integer> currentPIDs = new ArrayList<Integer>();
+    currentPIDs.addAll(processTree.keySet());
+    return currentPIDs;
+  }
+
+  /**
+   * Get a dump of the process-tree.
+   * 
+   * @return a string concatenating the dump of information of all the processes
+   *         in the process-tree
+   */
+  public String getProcessTreeDump() {
+    StringBuilder ret = new StringBuilder();
+    // The header.
+    ret.append(String.format("\t|- PID PPID PGRPID SESSID CMD_NAME "
+        + "USER_MODE_TIME(MILLIS) SYSTEM_TIME(MILLIS) VMEM_USAGE(BYTES) "
+        + "RSSMEM_USAGE(PAGES) FULL_CMD_LINE\n"));
+    for (ProcessInfo p : processTree.values()) {
+      if (p != null) {
+        ret.append(String.format(PROCESSTREE_DUMP_FORMAT, p.getPid(), p
+            .getPpid(), p.getPgrpId(), p.getSessionId(), p.getName(), p
+            .getUtime(), p.getStime(), p.getVmem(), p.getRssmemPage(), p
+            .getCmdLine(procfsDir)));
+      }
+    }
+    return ret.toString();
+  }
+
+  /**
+   * Get the cumulative virtual memory used by all the processes in the
+   * process-tree.
+   * 
+   * @return cumulative virtual memory used by the process-tree in bytes.
+   */
+  public long getCumulativeVmem() {
+    // include all processes.. all processes will be older than 0.
+    return getCumulativeVmem(0);
+  }
+
+  /**
+   * Get the cumulative resident set size (rss) memory used by all the processes
+   * in the process-tree.
+   *
+   * @return cumulative rss memory used by the process-tree in bytes. return 0
+   *         if it cannot be calculated
+   */
+  public long getCumulativeRssmem() {
+    // include all processes.. all processes will be older than 0.
+    return getCumulativeRssmem(0);
+  }
+
+  /**
+   * Get the cumulative virtual memory used by all the processes in the
+   * process-tree that are older than the passed in age.
+   * 
+   * @param olderThanAge processes above this age are included in the
+   *                      memory addition
+   * @return cumulative virtual memory used by the process-tree in bytes,
+   *          for processes older than this age.
+   */
+  public long getCumulativeVmem(int olderThanAge) {
+    long total = 0;
+    for (ProcessInfo p : processTree.values()) {
+      if ((p != null) && (p.getAge() > olderThanAge)) {
+        total += p.getVmem();
+      }
+    }
+    return total;
+  }
+  
+  /**
+   * Get the cumulative resident set size (rss) memory used by all the processes
+   * in the process-tree that are older than the passed in age.
+   *
+   * @param olderThanAge processes above this age are included in the
+   *                      memory addition
+   * @return cumulative rss memory used by the process-tree in bytes,
+   *          for processes older than this age. return 0 if it cannot be
+   *          calculated
+   */
+  public long getCumulativeRssmem(int olderThanAge) {
+    if (PAGE_SIZE < 0) {
+      return 0;
+    }
+    long totalPages = 0;
+    for (ProcessInfo p : processTree.values()) {
+      if ((p != null) && (p.getAge() > olderThanAge)) {
+        totalPages += p.getRssmemPage();
+      }
+    }
+    return totalPages * PAGE_SIZE; // convert # pages to byte
+  }
+
+  /**
+   * Get the CPU time in millisecond used by all the processes in the
+   * process-tree since the process-tree created
+   *
+   * @return cumulative CPU time in millisecond since the process-tree created
+   *         return 0 if it cannot be calculated
+   */
+  public long getCumulativeCpuTime() {
+    if (JIFFY_LENGTH_IN_MILLIS < 0) {
+      return 0;
+    }
+    long incJiffies = 0;
+    for (ProcessInfo p : processTree.values()) {
+      if (p != null) {
+        incJiffies += p.dtime;
+      }
+    }
+    cpuTime += incJiffies * JIFFY_LENGTH_IN_MILLIS;
+    return cpuTime;
+  }
+
+  private static Integer getValidPID(String pid) {
+    Integer retPid = -1;
+    try {
+      retPid = Integer.parseInt(pid);
+      if (retPid <= 0) {
+        retPid = -1;
+      }
+    } catch (NumberFormatException nfe) {
+      retPid = -1;
+    }
+    return retPid;
+  }
+
+  /**
+   * Get the list of all processes in the system.
+   */
+  private List<Integer> getProcessList() {
+    String[] processDirs = (new File(procfsDir)).list();
+    List<Integer> processList = new ArrayList<Integer>();
+
+    for (String dir : processDirs) {
+      try {
+        int pd = Integer.parseInt(dir);
+        if ((new File(procfsDir, dir)).isDirectory()) {
+          processList.add(Integer.valueOf(pd));
+        }
+      } catch (NumberFormatException n) {
+        // skip this directory
+      } catch (SecurityException s) {
+        // skip this process
+      }
+    }
+    return processList;
+  }
+
+  /**
+   * Construct the ProcessInfo using the process' PID and procfs rooted at the
+   * specified directory and return the same. It is provided mainly to assist
+   * testing purposes.
+   * 
+   * Returns null on failing to read from procfs,
+   *
+   * @param pinfo ProcessInfo that needs to be updated
+   * @param procfsDir root of the proc file system
+   * @return updated ProcessInfo, null on errors.
+   */
+  private static ProcessInfo constructProcessInfo(ProcessInfo pinfo, 
+                                                    String procfsDir) {
+    ProcessInfo ret = null;
+    // Read "procfsDir/<pid>/stat" file - typically /proc/<pid>/stat
+    BufferedReader in = null;
+    FileReader fReader = null;
+    try {
+      File pidDir = new File(procfsDir, String.valueOf(pinfo.getPid()));
+      fReader = new FileReader(new File(pidDir, PROCFS_STAT_FILE));
+      in = new BufferedReader(fReader);
+    } catch (FileNotFoundException f) {
+      // The process vanished in the interim!
+      LOG.warn("The process " + pinfo.getPid()
+          + " may have finished in the interim.");
+      return ret;
+    }
+
+    ret = pinfo;
+    try {
+      String str = in.readLine(); // only one line
+      Matcher m = PROCFS_STAT_FILE_FORMAT.matcher(str);
+      boolean mat = m.find();
+      if (mat) {
+        // Set (name) (ppid) (pgrpId) (session) (utime) (stime) (vsize) (rss)
+        pinfo.updateProcessInfo(m.group(2), Integer.parseInt(m.group(3)),
+                Integer.parseInt(m.group(4)), Integer.parseInt(m.group(5)),
+                Long.parseLong(m.group(7)), Long.parseLong(m.group(8)),
+                Long.parseLong(m.group(10)), Long.parseLong(m.group(11)));
+      } else {
+        LOG.warn("Unexpected: procfs stat file is not in the expected format"
+            + " for process with pid " + pinfo.getPid());
+        ret = null;
+      }
+    } catch (IOException io) {
+      LOG.warn("Error reading the stream " + io);
+      ret = null;
+    } finally {
+      // Close the streams
+      try {
+        fReader.close();
+        try {
+          in.close();
+        } catch (IOException i) {
+          LOG.warn("Error closing the stream " + in);
+        }
+      } catch (IOException i) {
+        LOG.warn("Error closing the stream " + fReader);
+      }
+    }
+
+    return ret;
+  }
+  /**
+   * Returns a string printing PIDs of process present in the
+   * ProcfsBasedProcessTree. Output format : [pid pid ..]
+   */
+  public String toString() {
+    StringBuffer pTree = new StringBuffer("[ ");
+    for (Integer p : processTree.keySet()) {
+      pTree.append(p);
+      pTree.append(" ");
+    }
+    return pTree.substring(0, pTree.length()) + "]";
+  }
+
+  /**
+   * 
+   * Class containing information of a process.
+   * 
+   */
+  private static class ProcessInfo {
+    private Integer pid; // process-id
+    private String name; // command name
+    private Integer pgrpId; // process group-id
+    private Integer ppid; // parent process-id
+    private Integer sessionId; // session-id
+    private Long vmem; // virtual memory usage
+    private Long rssmemPage; // rss memory usage in # of pages
+    private Long utime = 0L; // # of jiffies in user mode
+    private Long stime = 0L; // # of jiffies in kernel mode
+    // how many times has this process been seen alive
+    private int age; 
+
+    // # of jiffies used since last update:
+    private Long dtime = 0L;
+    // dtime = (utime + stime) - (utimeOld + stimeOld)
+    // We need this to compute the cumulative CPU time
+    // because the subprocess may finish earlier than root process
+
+    private List<ProcessInfo> children = new ArrayList<ProcessInfo>(); // list of children
+
+    public ProcessInfo(int pid) {
+      this.pid = Integer.valueOf(pid);
+      // seeing this the first time.
+      this.age = 1;
+    }
+
+    public Integer getPid() {
+      return pid;
+    }
+
+    public String getName() {
+      return name;
+    }
+
+    public Integer getPgrpId() {
+      return pgrpId;
+    }
+
+    public Integer getPpid() {
+      return ppid;
+    }
+
+    public Integer getSessionId() {
+      return sessionId;
+    }
+
+    public Long getVmem() {
+      return vmem;
+    }
+
+    public Long getUtime() {
+      return utime;
+    }
+
+    public Long getStime() {
+      return stime;
+    }
+
+    public Long getDtime() {
+      return dtime;
+    }
+
+    public Long getRssmemPage() { // get rss # of pages
+      return rssmemPage;
+    }
+
+    public int getAge() {
+      return age;
+    }
+    
+    public boolean isParent(ProcessInfo p) {
+      if (pid.equals(p.getPpid())) {
+        return true;
+      }
+      return false;
+    }
+
+    public void updateProcessInfo(String name, Integer ppid, Integer pgrpId,
+        Integer sessionId, Long utime, Long stime, Long vmem, Long rssmem) {
+      this.name = name;
+      this.ppid = ppid;
+      this.pgrpId = pgrpId;
+      this.sessionId = sessionId;
+      this.utime = utime;
+      this.stime = stime;
+      this.vmem = vmem;
+      this.rssmemPage = rssmem;
+    }
+
+    public void updateJiffy(ProcessInfo oldInfo) {
+      this.dtime = (oldInfo == null ? this.utime + this.stime
+              : (this.utime + this.stime) - (oldInfo.utime + oldInfo.stime));
+    }
+
+    public void updateAge(ProcessInfo oldInfo) {
+      this.age = oldInfo.age + 1;
+    }
+    
+    public boolean addChild(ProcessInfo p) {
+      return children.add(p);
+    }
+
+    public List<ProcessInfo> getChildren() {
+      return children;
+    }
+
+    public String getCmdLine(String procfsDir) {
+      String ret = "N/A";
+      if (pid == null) {
+        return ret;
+      }
+      BufferedReader in = null;
+      FileReader fReader = null;
+      try {
+        fReader =
+            new FileReader(new File(new File(procfsDir, pid.toString()),
+                PROCFS_CMDLINE_FILE));
+      } catch (FileNotFoundException f) {
+        // The process vanished in the interim!
+        return ret;
+      }
+
+      in = new BufferedReader(fReader);
+
+      try {
+        ret = in.readLine(); // only one line
+        if (ret == null) {
+          ret = "N/A";
+        } else {
+          ret = ret.replace('\0', ' '); // Replace each null char with a space
+          if (ret.equals("")) {
+            // The cmdline might be empty because the process is swapped out or
+            // is a zombie.
+            ret = "N/A";
+          }
+        }
+      } catch (IOException io) {
+        LOG.warn("Error reading the stream " + io);
+        ret = "N/A";
+      } finally {
+        // Close the streams
+        try {
+          fReader.close();
+          try {
+            in.close();
+          } catch (IOException i) {
+            LOG.warn("Error closing the stream " + in);
+          }
+        } catch (IOException i) {
+          LOG.warn("Error closing the stream " + fReader);
+        }
+      }
+
+      return ret;
+    }
+  }
+}

Added: hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorPlugin.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorPlugin.java?rev=1091316&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorPlugin.java (added)
+++ hadoop/mapreduce/branches/MR-279/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorPlugin.java Tue Apr 12 07:56:07 2011
@@ -0,0 +1,168 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.util;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.util.ReflectionUtils;
+
+/**
+ * Plugin to calculate resource information on the system.
+ * 
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public abstract class ResourceCalculatorPlugin extends Configured {
+
+  public static final int UNAVAILABLE = -1;
+
+  /**
+   * Obtain the total size of the virtual memory present in the system.
+   * 
+   * @return virtual memory size in bytes.
+   */
+  public abstract long getVirtualMemorySize();
+
+  /**
+   * Obtain the total size of the physical memory present in the system.
+   * 
+   * @return physical memory size bytes.
+   */
+  public abstract long getPhysicalMemorySize();
+
+  /**
+   * Obtain the total size of the available virtual memory present
+   * in the system.
+   *
+   * @return available virtual memory size in bytes.
+   */
+  public abstract long getAvailableVirtualMemorySize();
+
+  /**
+   * Obtain the total size of the available physical memory present
+   * in the system.
+   *
+   * @return available physical memory size bytes.
+   */
+  public abstract long getAvailablePhysicalMemorySize();
+
+  /**
+   * Obtain the total number of processors present on the system.
+   *
+   * @return number of processors
+   */
+  public abstract int getNumProcessors();
+
+  /**
+   * Obtain the CPU frequency of on the system.
+   *
+   * @return CPU frequency in kHz
+   */
+  public abstract long getCpuFrequency();
+
+  /**
+   * Obtain the cumulative CPU time since the system is on.
+   *
+   * @return cumulative CPU time in milliseconds
+   */
+  public abstract long getCumulativeCpuTime();
+
+  /**
+   * Obtain the CPU usage % of the machine. Return -1 if it is unavailable
+   *
+   * @return CPU usage in %
+   */
+  public abstract float getCpuUsage();
+
+  /**
+   * Obtain resource status used by current process tree.
+   */
+  @InterfaceAudience.Private
+  @InterfaceStability.Unstable
+  public abstract ProcResourceValues getProcResourceValues();
+
+  public class ProcResourceValues {
+    private final long cumulativeCpuTime;
+    private final long physicalMemorySize;
+    private final long virtualMemorySize;
+    public ProcResourceValues(long cumulativeCpuTime, long physicalMemorySize,
+                              long virtualMemorySize) {
+      this.cumulativeCpuTime = cumulativeCpuTime;
+      this.physicalMemorySize = physicalMemorySize;
+      this.virtualMemorySize = virtualMemorySize;
+    }
+    /**
+     * Obtain the physical memory size used by current process tree.
+     * @return physical memory size in bytes.
+     */
+    public long getPhysicalMemorySize() {
+      return physicalMemorySize;
+    }
+
+    /**
+     * Obtain the virtual memory size used by a current process tree.
+     * @return virtual memory size in bytes.
+     */
+    public long getVirtualMemorySize() {
+      return virtualMemorySize;
+    }
+
+    /**
+     * Obtain the cumulative CPU time used by a current process tree.
+     * @return cumulative CPU time in milliseconds
+     */
+    public long getCumulativeCpuTime() {
+      return cumulativeCpuTime;
+    }
+  }
+
+  /**
+   * Get the ResourceCalculatorPlugin from the class name and configure it. If
+   * class name is null, this method will try and return a memory calculator
+   * plugin available for this system.
+   * 
+   * @param clazz class-name
+   * @param conf configure the plugin with this.
+   * @return ResourceCalculatorPlugin
+   */
+  public static ResourceCalculatorPlugin getResourceCalculatorPlugin(
+      Class<? extends ResourceCalculatorPlugin> clazz, Configuration conf) {
+
+    if (clazz != null) {
+      return ReflectionUtils.newInstance(clazz, conf);
+    }
+
+    // FIXME
+    // No class given, try a os specific class
+//    try {
+//      String osName = System.getProperty("os.name");
+//      if (osName.startsWith("Linux")) {
+//        return new LinuxResourceCalculatorPlugin();
+//      }
+//    } catch (SecurityException se) {
+//      // Failed to get Operating System name.
+//      return null;
+//    }
+
+    // Not supported on this system.
+    return null;
+  }
+}