You are viewing a plain text version of this content. The canonical link for it is here.
Posted to yarn-commits@hadoop.apache.org by ac...@apache.org on 2012/12/07 19:58:06 UTC

svn commit: r1418439 - in /hadoop/common/branches/branch-trunk-win/hadoop-yarn-project: ./ hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/

Author: acmurthy
Date: Fri Dec  7 18:58:04 2012
New Revision: 1418439

URL: http://svn.apache.org/viewvc?rev=1418439&view=rev
Log:
YARN-234. Added support for process tree and resource calculator in MS Windows to YARN. Contributed by Chris Nauroth.

Added:
    hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/WindowsBasedProcessTree.java
    hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/WindowsResourceCalculatorPlugin.java
    hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsBasedProcessTree.java
    hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsResourceCalculatorPlugin.java
Modified:
    hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/CHANGES.branch-trunk-win.txt
    hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java
    hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorPlugin.java
    hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorProcessTree.java
    hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestProcfsBasedProcessTree.java

Modified: hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/CHANGES.branch-trunk-win.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/CHANGES.branch-trunk-win.txt?rev=1418439&r1=1418438&r2=1418439&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/CHANGES.branch-trunk-win.txt (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/CHANGES.branch-trunk-win.txt Fri Dec  7 18:58:04 2012
@@ -16,3 +16,6 @@ branch-trunk-win changes - unreleased
   YARN-233. Added support for running containers in MS Windows to YARN. (Chris
   Nauroth via acmurthy)
 
+  YARN-234. Added support for process tree and resource calculator in MS Windows 
+  to YARN. (Chris Nauroth via acmurthy)
+

Modified: hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java?rev=1418439&r1=1418438&r2=1418439&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java Fri Dec  7 18:58:04 2012
@@ -36,6 +36,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Shell.ShellCommandExecutor;
 import org.apache.hadoop.util.StringUtils;
 
@@ -59,32 +60,30 @@ public class ProcfsBasedProcessTree exte
   public static final String PROCFS_STAT_FILE = "stat";
   public static final String PROCFS_CMDLINE_FILE = "cmdline";
   public static final long PAGE_SIZE;
-  static {
-    ShellCommandExecutor shellExecutor =
-            new ShellCommandExecutor(new String[]{"getconf",  "PAGESIZE"});
-    long pageSize = -1;
-    try {
-      shellExecutor.execute();
-      pageSize = Long.parseLong(shellExecutor.getOutput().replace("\n", ""));
-    } catch (IOException e) {
-      LOG.error(StringUtils.stringifyException(e));
-    } finally {
-      PAGE_SIZE = pageSize;
-    }
-  }
   public static final long JIFFY_LENGTH_IN_MILLIS; // in millisecond
+  
   static {
-    ShellCommandExecutor shellExecutor =
-            new ShellCommandExecutor(new String[]{"getconf",  "CLK_TCK"});
     long jiffiesPerSecond = -1;
+    long pageSize = -1;
     try {
-      shellExecutor.execute();
-      jiffiesPerSecond = Long.parseLong(shellExecutor.getOutput().replace("\n", ""));
+      if(Shell.LINUX) {
+        ShellCommandExecutor shellExecutorClk = new ShellCommandExecutor(
+            new String[] { "getconf", "CLK_TCK" });
+        shellExecutorClk.execute();
+        jiffiesPerSecond = Long.parseLong(shellExecutorClk.getOutput().replace("\n", ""));
+
+        ShellCommandExecutor shellExecutorPage = new ShellCommandExecutor(
+            new String[] { "getconf", "PAGESIZE" });
+        shellExecutorPage.execute();
+        pageSize = Long.parseLong(shellExecutorPage.getOutput().replace("\n", ""));
+
+      }
     } catch (IOException e) {
       LOG.error(StringUtils.stringifyException(e));
     } finally {
       JIFFY_LENGTH_IN_MILLIS = jiffiesPerSecond != -1 ?
                      Math.round(1000D / jiffiesPerSecond) : -1;
+                     PAGE_SIZE = pageSize;
     }
   }
 
@@ -126,8 +125,7 @@ public class ProcfsBasedProcessTree exte
    */
   public static boolean isAvailable() {
     try {
-      String osName = System.getProperty("os.name");
-      if (!osName.startsWith("Linux")) {
+      if (!Shell.LINUX) {
         LOG.info("ProcfsBasedProcessTree currently is supported only on "
             + "Linux.");
         return false;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorPlugin.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorPlugin.java?rev=1418439&r1=1418438&r2=1418439&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorPlugin.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorPlugin.java Fri Dec  7 18:58:04 2012
@@ -23,6 +23,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.Shell;
 
 /**
  * Plugin to calculate resource information on the system.
@@ -31,6 +32,18 @@ import org.apache.hadoop.util.Reflection
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public abstract class ResourceCalculatorPlugin extends Configured {
+  
+  protected String processPid = null;
+
+  /**
+   * set the pid of the process for which <code>getProcResourceValues</code>
+   * will be invoked
+   * 
+   * @param pid
+   */
+  public void setProcessPid(String pid) {
+    processPid = pid;
+  }
 
   /**
    * Obtain the total size of the virtual memory present in the system.
@@ -151,10 +164,12 @@ public abstract class ResourceCalculator
 
     // No class given, try a os specific class
     try {
-      String osName = System.getProperty("os.name");
-      if (osName.startsWith("Linux")) {
+      if (Shell.LINUX) {
         return new LinuxResourceCalculatorPlugin();
       }
+      if (Shell.WINDOWS) {
+        return new WindowsResourceCalculatorPlugin();
+      }
     } catch (SecurityException se) {
       // Failed to get Operating System name.
       return null;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorProcessTree.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorProcessTree.java?rev=1418439&r1=1418438&r2=1418439&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorProcessTree.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorProcessTree.java Fri Dec  7 18:58:04 2012
@@ -147,14 +147,11 @@ public abstract class ResourceCalculator
     }
 
     // No class given, try a os specific class
-    try {
-      String osName = System.getProperty("os.name");
-      if (osName.startsWith("Linux")) {
-        return new ProcfsBasedProcessTree(pid);
-      }
-    } catch (SecurityException se) {
-      // Failed to get Operating System name.
-      return null;
+    if (ProcfsBasedProcessTree.isAvailable()) {
+      return new ProcfsBasedProcessTree(pid);
+    }
+    if (WindowsBasedProcessTree.isAvailable()) {
+      return new WindowsBasedProcessTree(pid);
     }
 
     // Not supported on this system.

Added: hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/WindowsBasedProcessTree.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/WindowsBasedProcessTree.java?rev=1418439&view=auto
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/WindowsBasedProcessTree.java (added)
+++ hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/WindowsBasedProcessTree.java Fri Dec  7 18:58:04 2012
@@ -0,0 +1,206 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.util;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.util.Shell.ShellCommandExecutor;
+import org.apache.hadoop.util.StringUtils;
+
+
+public class WindowsBasedProcessTree extends ResourceCalculatorProcessTree {
+
+  static final Log LOG = LogFactory
+      .getLog(WindowsBasedProcessTree.class);
+  
+  static class ProcessInfo {
+    String pid; // process pid
+    long vmem; // virtual memory
+    long workingSet; // working set, RAM used
+    long cpuTimeMs; // total cpuTime in millisec
+    long cpuTimeMsDelta; // delta of cpuTime since last update
+    int age = 1;
+  }
+  
+  private String taskProcessId = null;
+  private long cpuTimeMs = 0;
+  private Map<String, ProcessInfo> processTree = 
+      new HashMap<String, ProcessInfo>();
+    
+  public static boolean isAvailable() {
+    if (Shell.WINDOWS) {
+      ShellCommandExecutor shellExecutor = new ShellCommandExecutor(
+          new String[] { Shell.WINUTILS, "help" });
+      try {
+        shellExecutor.execute();
+      } catch (IOException e) {
+        LOG.error(StringUtils.stringifyException(e));
+      } finally {
+        String output = shellExecutor.getOutput();
+        if (output != null &&
+            output.contains("Prints to stdout a list of processes in the task")) {
+          return true;
+        }
+      }
+    }
+    return false;
+  }
+
+  public WindowsBasedProcessTree(String pid) {
+    super(pid);
+    taskProcessId = pid;
+  }
+
+  // helper method to override while testing
+  String getAllProcessInfoFromShell() {
+    ShellCommandExecutor shellExecutor = new ShellCommandExecutor(
+        new String[] { Shell.WINUTILS, "task", "processList", taskProcessId });
+    try {
+      shellExecutor.execute();
+      return shellExecutor.getOutput();
+    } catch (IOException e) {
+      LOG.error(StringUtils.stringifyException(e));
+    }
+    return null;
+  }
+
+  /**
+   * Parses string of process info lines into ProcessInfo objects
+   * @param processesInfoStr
+   * @return Map of pid string to ProcessInfo objects
+   */
+  Map<String, ProcessInfo> createProcessInfo(String processesInfoStr) {
+    String[] processesStr = processesInfoStr.split("\r\n");
+    Map<String, ProcessInfo> allProcs = new HashMap<String, ProcessInfo>();
+    final int procInfoSplitCount = 4;
+    for (String processStr : processesStr) {
+      if (processStr != null) {
+        String[] procInfo = processStr.split(",");
+        if (procInfo.length == procInfoSplitCount) {
+          try {
+            ProcessInfo pInfo = new ProcessInfo();
+            pInfo.pid = procInfo[0];
+            pInfo.vmem = Long.parseLong(procInfo[1]);
+            pInfo.workingSet = Long.parseLong(procInfo[2]);
+            pInfo.cpuTimeMs = Long.parseLong(procInfo[3]);
+            allProcs.put(pInfo.pid, pInfo);
+          } catch (NumberFormatException nfe) {
+            LOG.debug("Error parsing procInfo." + nfe);
+          }
+        } else {
+          LOG.debug("Expected split length of proc info to be "
+              + procInfoSplitCount + ". Got " + procInfo.length);
+        }
+      }
+    }
+    return allProcs;
+  }
+  
+  @Override
+  public ResourceCalculatorProcessTree getProcessTree() {
+    if(taskProcessId != null) {
+      // taskProcessId can be null in some tests
+      String processesInfoStr = getAllProcessInfoFromShell();
+      if (processesInfoStr != null && processesInfoStr.length() > 0) {
+        Map<String, ProcessInfo> allProcessInfo = createProcessInfo(processesInfoStr);
+
+        for (Map.Entry<String, ProcessInfo> entry : allProcessInfo.entrySet()) {
+          String pid = entry.getKey();
+          ProcessInfo pInfo = entry.getValue();
+          ProcessInfo oldInfo = processTree.get(pid);
+          if (oldInfo != null) {
+            // existing process, update age and replace value
+            pInfo.age += oldInfo.age;
+            // calculate the delta since the last refresh. totals are being kept
+            // in the WindowsBasedProcessTree object
+            pInfo.cpuTimeMsDelta = pInfo.cpuTimeMs - oldInfo.cpuTimeMs;
+          } else {
+            // new process. delta cpu == total cpu
+            pInfo.cpuTimeMsDelta = pInfo.cpuTimeMs;
+          }
+        }
+        processTree.clear();
+        processTree = allProcessInfo;
+      } else {
+        // clearing process tree to mimic semantics of existing Procfs impl
+        processTree.clear();
+      }
+    }
+
+    return this;
+  }
+
+  @Override
+  public boolean checkPidPgrpidForMatch() {
+    // This is always true on Windows, because the pid doubles as a job object
+    // name for task management.
+    return true;
+  }
+
+  @Override
+  public String getProcessTreeDump() {
+    StringBuilder ret = new StringBuilder();
+    // The header.
+    ret.append(String.format("\t|- PID " + "CPU_TIME(MILLIS) "
+        + "VMEM(BYTES) WORKING_SET(BYTES)\n"));
+    for (ProcessInfo p : processTree.values()) {
+      if (p != null) {
+        ret.append(String.format("\t|- %s %d %d %d\n", p.pid,
+            p.cpuTimeMs, p.vmem, p.workingSet));
+      }
+    }
+    return ret.toString();
+  }
+
+  @Override
+  public long getCumulativeVmem(int olderThanAge) {
+    long total = 0;
+    for (ProcessInfo p : processTree.values()) {
+      if ((p != null) && (p.age > olderThanAge)) {
+        total += p.vmem;
+      }
+    }
+    return total;
+  }
+
+  @Override
+  public long getCumulativeRssmem(int olderThanAge) {
+    long total = 0;
+    for (ProcessInfo p : processTree.values()) {
+      if ((p != null) && (p.age > olderThanAge)) {
+        total += p.workingSet;
+      }
+    }
+    return total;
+  }
+
+  @Override
+  public long getCumulativeCpuTime() {
+    for (ProcessInfo p : processTree.values()) {
+      cpuTimeMs += p.cpuTimeMsDelta;
+    }
+    return cpuTimeMs;
+  }
+
+}

Added: hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/WindowsResourceCalculatorPlugin.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/WindowsResourceCalculatorPlugin.java?rev=1418439&view=auto
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/WindowsResourceCalculatorPlugin.java (added)
+++ hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/WindowsResourceCalculatorPlugin.java Fri Dec  7 18:58:04 2012
@@ -0,0 +1,181 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.util;
+
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.util.Shell.ShellCommandExecutor;
+import org.apache.hadoop.util.StringUtils;
+
+public class WindowsResourceCalculatorPlugin extends ResourceCalculatorPlugin {
+  
+  static final Log LOG = LogFactory
+      .getLog(WindowsResourceCalculatorPlugin.class);
+  
+  long vmemSize;
+  long memSize;
+  long vmemAvailable;
+  long memAvailable;
+  int numProcessors;
+  long cpuFrequencyKhz;
+  long cumulativeCpuTimeMs;
+  float cpuUsage;
+  
+  long lastRefreshTime;
+  private final int refreshIntervalMs = 1000;
+  
+  WindowsBasedProcessTree pTree = null;
+  
+  public WindowsResourceCalculatorPlugin() {
+    lastRefreshTime = 0;
+    reset();
+  }
+  
+  void reset() {
+    vmemSize = -1;
+    memSize = -1;
+    vmemAvailable = -1;
+    memAvailable = -1;
+    numProcessors = -1;
+    cpuFrequencyKhz = -1;
+    cumulativeCpuTimeMs = -1;
+    cpuUsage = -1;
+  }
+
+  String getSystemInfoInfoFromShell() {
+    ShellCommandExecutor shellExecutor = new ShellCommandExecutor(
+        new String[] { Shell.WINUTILS, "systeminfo" });
+    try {
+      shellExecutor.execute();
+      return shellExecutor.getOutput();
+    } catch (IOException e) {
+      LOG.error(StringUtils.stringifyException(e));
+    }
+    return null;
+  }
+  
+  void refreshIfNeeded() {
+    long now = System.currentTimeMillis();
+    if (now - lastRefreshTime > refreshIntervalMs) {
+      long refreshInterval = now - lastRefreshTime;
+      lastRefreshTime = now;
+      long lastCumCpuTimeMs = cumulativeCpuTimeMs;
+      reset();
+      String sysInfoStr = getSystemInfoInfoFromShell();
+      if (sysInfoStr != null) {
+        final int sysInfoSplitCount = 7;
+        String[] sysInfo = sysInfoStr.substring(0, sysInfoStr.indexOf("\r\n"))
+            .split(",");
+        if (sysInfo.length == sysInfoSplitCount) {
+          try {
+            vmemSize = Long.parseLong(sysInfo[0]);
+            memSize = Long.parseLong(sysInfo[1]);
+            vmemAvailable = Long.parseLong(sysInfo[2]);
+            memAvailable = Long.parseLong(sysInfo[3]);
+            numProcessors = Integer.parseInt(sysInfo[4]);
+            cpuFrequencyKhz = Long.parseLong(sysInfo[5]);
+            cumulativeCpuTimeMs = Long.parseLong(sysInfo[6]);
+            if (lastCumCpuTimeMs != -1) {
+              cpuUsage = (cumulativeCpuTimeMs - lastCumCpuTimeMs)
+                  / (refreshInterval * 1.0f);
+            }
+
+          } catch (NumberFormatException nfe) {
+            LOG.warn("Error parsing sysInfo." + nfe);
+          }
+        } else {
+          LOG.warn("Expected split length of sysInfo to be "
+              + sysInfoSplitCount + ". Got " + sysInfo.length);
+        }
+      }
+    }
+  }
+  
+  /** {@inheritDoc} */
+  @Override
+  public long getVirtualMemorySize() {
+    refreshIfNeeded();
+    return vmemSize;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public long getPhysicalMemorySize() {
+    refreshIfNeeded();
+    return memSize;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public long getAvailableVirtualMemorySize() {
+    refreshIfNeeded();
+    return vmemAvailable;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public long getAvailablePhysicalMemorySize() {
+    refreshIfNeeded();
+    return memAvailable;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public int getNumProcessors() {
+    refreshIfNeeded();
+    return numProcessors;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public long getCpuFrequency() {
+    refreshIfNeeded();
+    return -1;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public long getCumulativeCpuTime() {
+    refreshIfNeeded();
+    return cumulativeCpuTimeMs;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public float getCpuUsage() {
+    refreshIfNeeded();
+    return cpuUsage;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public ProcResourceValues getProcResourceValues() {
+    if(pTree == null) {
+      pTree = new WindowsBasedProcessTree(processPid);
+    }
+    pTree.getProcessTree();
+    long cpuTime = pTree.getCumulativeCpuTime();
+    long pMem = pTree.getCumulativeRssmem();
+    long vMem = pTree.getCumulativeVmem();
+    return new ProcResourceValues(cpuTime, pMem, vMem);
+  }
+}

Modified: hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestProcfsBasedProcessTree.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestProcfsBasedProcessTree.java?rev=1418439&r1=1418438&r2=1418439&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestProcfsBasedProcessTree.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestProcfsBasedProcessTree.java Fri Dec  7 18:58:04 2012
@@ -36,6 +36,7 @@ import org.apache.hadoop.fs.FileContext;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Shell.ExitCodeException;
 import org.apache.hadoop.util.Shell.ShellCommandExecutor;
 import org.apache.hadoop.yarn.util.ProcfsBasedProcessTree;
@@ -107,14 +108,18 @@ public class TestProcfsBasedProcessTree 
   @Test
   public void testProcessTree() throws Exception {
 
+    if (!Shell.LINUX) {
+      System.out
+          .println("ProcfsBasedProcessTree is not available on this system. Not testing");
+      return;
+
+    }
     try {
-      if (!ProcfsBasedProcessTree.isAvailable()) {
-        System.out
-            .println("ProcfsBasedProcessTree is not available on this system. Not testing");
-        return;
-      }
+      Assert.assertTrue(ProcfsBasedProcessTree.isAvailable());
     } catch (Exception e) {
       LOG.info(StringUtils.stringifyException(e));
+      Assert.assertTrue("ProcfsBaseProcessTree should be available on Linux",
+        false);
       return;
     }
     // create shell script

Added: hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsBasedProcessTree.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsBasedProcessTree.java?rev=1418439&view=auto
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsBasedProcessTree.java (added)
+++ hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsBasedProcessTree.java Fri Dec  7 18:58:04 2012
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.util;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.util.Shell;
+
+import junit.framework.TestCase;
+
+public class TestWindowsBasedProcessTree extends TestCase {
+  private static final Log LOG = LogFactory
+      .getLog(TestWindowsBasedProcessTree.class);
+  
+  class WindowsBasedProcessTreeTester extends WindowsBasedProcessTree {
+    String infoStr = null;
+    public WindowsBasedProcessTreeTester(String pid) {
+      super(pid);
+    }
+    @Override
+    String getAllProcessInfoFromShell() {
+      return infoStr;
+    }
+  }
+  
+  public void testTree() {
+    if( !Shell.WINDOWS) {
+      LOG.info("Platform not Windows. Not testing");
+      return;      
+    }
+    assertTrue("WindowsBasedProcessTree should be available on Windows", 
+               WindowsBasedProcessTree.isAvailable());
+    
+    
+    WindowsBasedProcessTreeTester pTree = new WindowsBasedProcessTreeTester("-1");
+    pTree.infoStr = "3524,1024,1024,500\r\n2844,1024,1024,500\r\n";
+    pTree.getProcessTree();
+    assertTrue(pTree.getCumulativeVmem() == 2048);
+    assertTrue(pTree.getCumulativeVmem(0) == 2048);
+    assertTrue(pTree.getCumulativeRssmem() == 2048);
+    assertTrue(pTree.getCumulativeRssmem(0) == 2048);
+    assertTrue(pTree.getCumulativeCpuTime() == 1000);
+
+    pTree.infoStr = "3524,1024,1024,1000\r\n2844,1024,1024,1000\r\n1234,1024,1024,1000\r\n";
+    pTree.getProcessTree();
+    assertTrue(pTree.getCumulativeVmem() == 3072);
+    assertTrue(pTree.getCumulativeVmem(1) == 2048);
+    assertTrue(pTree.getCumulativeRssmem() == 3072);
+    assertTrue(pTree.getCumulativeRssmem(1) == 2048);
+    assertTrue(pTree.getCumulativeCpuTime() == 3000);    
+
+    pTree.infoStr = "3524,1024,1024,1500\r\n2844,1024,1024,1500\r\n";
+    pTree.getProcessTree();
+    assertTrue(pTree.getCumulativeVmem() == 2048);
+    assertTrue(pTree.getCumulativeVmem(2) == 2048);
+    assertTrue(pTree.getCumulativeRssmem() == 2048);
+    assertTrue(pTree.getCumulativeRssmem(2) == 2048);
+    assertTrue(pTree.getCumulativeCpuTime() == 4000);    
+  }
+}

Added: hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsResourceCalculatorPlugin.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsResourceCalculatorPlugin.java?rev=1418439&view=auto
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsResourceCalculatorPlugin.java (added)
+++ hadoop/common/branches/branch-trunk-win/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsResourceCalculatorPlugin.java Fri Dec  7 18:58:04 2012
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.util;
+
+import junit.framework.TestCase;
+
+public class TestWindowsResourceCalculatorPlugin extends TestCase {
+  
+  
+  class WindowsResourceCalculatorPluginTester extends WindowsResourceCalculatorPlugin {
+    private String infoStr = null;
+    @Override
+    String getSystemInfoInfoFromShell() {
+      return infoStr;
+    }    
+  }
+  
+  public void testParseSystemInfoString() {
+    WindowsResourceCalculatorPluginTester tester = new WindowsResourceCalculatorPluginTester();
+    // info str derived from windows shell command has \r\n termination
+    tester.infoStr = "17177038848,8589467648,15232745472,6400417792,1,2805000,6261812\r\n";
+    // call a method to refresh values
+    tester.getAvailablePhysicalMemorySize();
+    // verify information has been refreshed
+    assertTrue(tester.vmemSize == 17177038848L);
+    assertTrue(tester.memSize == 8589467648L);
+    assertTrue(tester.vmemAvailable == 15232745472L);
+    assertTrue(tester.memAvailable == 6400417792L);
+    assertTrue(tester.numProcessors == 1);
+    assertTrue(tester.cpuFrequencyKhz == 2805000L);
+    assertTrue(tester.cumulativeCpuTimeMs == 6261812L);
+    assertTrue(tester.cpuUsage == -1);
+  }
+  
+  public void testRefreshAndCpuUsage() throws InterruptedException {
+    WindowsResourceCalculatorPluginTester tester = new WindowsResourceCalculatorPluginTester();
+    // info str derived from windows shell command has \r\n termination
+    tester.infoStr = "17177038848,8589467648,15232745472,6400417792,1,2805000,6261812\r\n";
+    tester.getAvailablePhysicalMemorySize();
+    // verify information has been refreshed
+    assertTrue(tester.memAvailable == 6400417792L);
+    assertTrue(tester.cpuUsage == -1);
+    
+    tester.infoStr = "17177038848,8589467648,15232745472,5400417792,1,2805000,6261812\r\n";
+    tester.getAvailablePhysicalMemorySize();
+    // verify information has not been refreshed
+    assertTrue(tester.memAvailable == 6400417792L);
+    assertTrue(tester.cpuUsage == -1);
+    
+    Thread.sleep(1500);
+    tester.infoStr = "17177038848,8589467648,15232745472,5400417792,1,2805000,6286812\r\n";
+    tester.getAvailablePhysicalMemorySize();
+    // verify information has been refreshed
+    assertTrue(tester.memAvailable == 5400417792L);
+    assertTrue(tester.cpuUsage >= 0.1);
+  }
+  
+  public void testErrorInGetSystemInfo() {
+    WindowsResourceCalculatorPluginTester tester = new WindowsResourceCalculatorPluginTester();
+    // info str derived from windows shell command has \r\n termination
+    tester.infoStr = null;
+    // call a method to refresh values
+    tester.getAvailablePhysicalMemorySize();    
+  }
+
+}