You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2007/07/16 20:45:12 UTC

svn commit: r556691 - in /lucene/hadoop/trunk: ./ conf/ src/java/org/apache/hadoop/dfs/ src/java/org/apache/hadoop/mapred/ src/java/org/apache/hadoop/metrics/ src/java/org/apache/hadoop/metrics/jvm/

Author: cutting
Date: Mon Jul 16 11:45:07 2007
New Revision: 556691

URL: http://svn.apache.org/viewvc?view=rev&rev=556691
Log:
HADOOP-1562.  Add JVM metrics.  Contributed by David Bowen.

Added:
    lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/jvm/
    lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/jvm/EventCounter.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java
Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/conf/hadoop-metrics.properties
    lucene/hadoop/trunk/conf/log4j.properties
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DataNode.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NameNode.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/SecondaryNameNode.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/MetricsRecord.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=556691&r1=556690&r2=556691
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Mon Jul 16 11:45:07 2007
@@ -356,6 +356,9 @@
 
 111. HADOOP-1599.  Fix distcp bug on Windows.  (Senthil Subramanian via cutting)
 
+112. HADOOP-1562.  Add JVM metrics, including GC and logging stats.
+     (David Bowen via cutting)
+
 
 Release 0.13.0 - 2007-06-08
 

Modified: lucene/hadoop/trunk/conf/hadoop-metrics.properties
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/conf/hadoop-metrics.properties?view=diff&rev=556691&r1=556690&r2=556691
==============================================================================
--- lucene/hadoop/trunk/conf/hadoop-metrics.properties (original)
+++ lucene/hadoop/trunk/conf/hadoop-metrics.properties Mon Jul 16 11:45:07 2007
@@ -1,25 +1,40 @@
 # Configuration of the "dfs" context for null
 dfs.class=org.apache.hadoop.metrics.spi.NullContext
 
-# Configuration of the "mapred" context for null
-mapred.class=org.apache.hadoop.metrics.spi.NullContext
-
 # Configuration of the "dfs" context for file
 #dfs.class=org.apache.hadoop.metrics.file.FileContext
 #dfs.period=10
 #dfs.fileName=/tmp/dfsmetrics.log
 
-# Configuration of the "mapred" context for file
-#mapred.class=org.apache.hadoop.metrics.file.FileContext
-#mapred.period=10
-#mapred.fileName=/tmp/mrmetrics.log
-
 # Configuration of the "dfs" context for ganglia
 # dfs.class=org.apache.hadoop.metrics.ganglia.GangliaContext
 # dfs.period=10
 # dfs.servers=localhost:8649
 
+
+# Configuration of the "mapred" context for null
+mapred.class=org.apache.hadoop.metrics.spi.NullContext
+
+# Configuration of the "mapred" context for file
+#mapred.class=org.apache.hadoop.metrics.file.FileContext
+#mapred.period=10
+#mapred.fileName=/tmp/mrmetrics.log
+
 # Configuration of the "mapred" context for ganglia
 # mapred.class=org.apache.hadoop.metrics.ganglia.GangliaContext
 # mapred.period=10
 # mapred.servers=localhost:8649
+
+
+# Configuration of the "jvm" context for null
+jvm.class=org.apache.hadoop.metrics.spi.NullContext
+
+# Configuration of the "jvm" context for file
+#jvm.class=org.apache.hadoop.metrics.file.FileContext
+#jvm.period=10
+#jvm.fileName=/tmp/jvmmetrics.log
+
+# Configuration of the "jvm" context for ganglia
+# jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext
+# jvm.period=10
+# jvm.servers=localhost:8649

Modified: lucene/hadoop/trunk/conf/log4j.properties
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/conf/log4j.properties?view=diff&rev=556691&r1=556690&r2=556691
==============================================================================
--- lucene/hadoop/trunk/conf/log4j.properties (original)
+++ lucene/hadoop/trunk/conf/log4j.properties Mon Jul 16 11:45:07 2007
@@ -4,7 +4,7 @@
 hadoop.log.file=hadoop.log
 
 # Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hadoop.root.logger}
+log4j.rootLogger=${hadoop.root.logger}, EventCounter
 
 # Logging Threshold
 log4j.threshhold=ALL
@@ -80,3 +80,8 @@
 #log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
 #log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
 
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DataNode.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DataNode.java?view=diff&rev=556691&r1=556690&r2=556691
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DataNode.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DataNode.java Mon Jul 16 11:45:07 2007
@@ -34,9 +34,11 @@
 import java.io.*;
 import java.net.*;
 import java.util.*;
+import org.apache.hadoop.dfs.FSConstants.StartupOption;
 import org.apache.hadoop.metrics.MetricsContext;
 import org.apache.hadoop.metrics.MetricsRecord;
 import org.apache.hadoop.metrics.Updater;
+import org.apache.hadoop.metrics.jvm.JvmMetrics;
 
 /**********************************************************
  * DataNode is a class (and program) that stores a set of
@@ -119,7 +121,7 @@
   long heartBeatInterval;
   private DataStorage storage = null;
   private StatusHttpServer infoServer = null;
-  private DataNodeMetrics myMetrics = new DataNodeMetrics();
+  private DataNodeMetrics myMetrics;
   private static InetSocketAddress nameNodeAddr;
   private static DataNode datanodeObject = null;
   private static Thread dataNodeThread = null;
@@ -134,9 +136,14 @@
     private int blocksReplicated = 0;
     private int blocksRemoved = 0;
       
-    DataNodeMetrics() {
+    DataNodeMetrics(Configuration conf) {
+      String sessionId = conf.get("session.id"); 
+      // Initiate reporting of Java VM metrics
+      JvmMetrics.init("DataNode", sessionId);
+      // Create record for DataNode metrics
       MetricsContext context = MetricsUtil.getContext("dfs");
       metricsRecord = MetricsUtil.createRecord(context, "datanode");
+      metricsRecord.setTag("sessionId", sessionId);
       context.registerUpdater(this);
     }
       
@@ -194,7 +201,10 @@
    */
   DataNode(Configuration conf, 
            AbstractList<File> dataDirs) throws IOException {
+      
+    myMetrics = new DataNodeMetrics(conf);
     datanodeObject = this;
+
     try {
       startDataNode(conf, dataDirs);
     } catch (IOException ie) {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java?view=diff&rev=556691&r1=556690&r2=556691
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java Mon Jul 16 11:45:07 2007
@@ -19,6 +19,8 @@
 
 import java.io.*;
 import java.util.*;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.dfs.FSConstants.StartupOption;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.metrics.MetricsRecord;
@@ -354,21 +356,22 @@
   private MetricsRecord directoryMetrics = null;
     
   /** Access an existing dfs name directory. */
-  public FSDirectory(FSNamesystem ns) throws IOException {
+  public FSDirectory(FSNamesystem ns, Configuration conf) throws IOException {
     this.fsImage = new FSImage();
     namesystem = ns;
-    initialize();
+    initialize(conf);
   }
 
-  public FSDirectory(FSImage fsImage, FSNamesystem ns) throws IOException {
+  public FSDirectory(FSImage fsImage, FSNamesystem ns, Configuration conf) throws IOException {
     this.fsImage = fsImage;
     namesystem = ns;
-    initialize();
+    initialize(conf);
   }
     
-  private void initialize() {
+  private void initialize(Configuration conf) {
     MetricsContext metricsContext = MetricsUtil.getContext("dfs");
     directoryMetrics = MetricsUtil.createRecord(metricsContext, "FSDirectory");
+    directoryMetrics.setTag("sessionId", conf.get("session.id"));
   }
 
   void loadFSImage(Collection<File> dataDirs,

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java?view=diff&rev=556691&r1=556690&r2=556691
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java Mon Jul 16 11:45:07 2007
@@ -243,7 +243,7 @@
 
     this.localMachine = hostname;
     this.port = port;
-    this.dir = new FSDirectory(this);
+    this.dir = new FSDirectory(this, conf);
     StartupOption startOpt = NameNode.getStartupOption(conf);
     this.dir.loadFSImage(getNamespaceDirs(conf), startOpt);
     this.safeMode = new SafeModeInfo(conf);
@@ -294,9 +294,9 @@
    * dirs is a list of directories where the filesystem directory state 
    * is stored
    */
-  FSNamesystem(FSImage fsImage) throws IOException {
+  FSNamesystem(FSImage fsImage, Configuration conf) throws IOException {
     fsNamesystemObject = this;
-    this.dir = new FSDirectory(fsImage, this);
+    this.dir = new FSDirectory(fsImage, this, conf);
   }
 
   /** Return the FSNamesystem object

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NameNode.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NameNode.java?view=diff&rev=556691&r1=556690&r2=556691
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NameNode.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NameNode.java Mon Jul 16 11:45:07 2007
@@ -35,6 +35,7 @@
 import org.apache.hadoop.metrics.MetricsUtil;
 import org.apache.hadoop.metrics.MetricsContext;
 import org.apache.hadoop.metrics.Updater;
+import org.apache.hadoop.metrics.jvm.JvmMetrics;
 
 /**********************************************************
  * NameNode serves as both directory namespace manager and
@@ -100,14 +101,20 @@
 
   private static class NameNodeMetrics implements Updater {
     private final MetricsRecord metricsRecord;
+    
     private int numFilesCreated = 0;
     private int numFilesOpened = 0;
     private int numFilesRenamed = 0;
     private int numFilesListed = 0;
       
-    NameNodeMetrics() {
+    NameNodeMetrics(Configuration conf) {
+      String sessionId = conf.get("session.id");
+      // Initiate Java VM metrics
+      JvmMetrics.init("NameNode", sessionId);
+      // Create a record for NameNode metrics
       MetricsContext metricsContext = MetricsUtil.getContext("dfs");
       metricsRecord = MetricsUtil.createRecord(metricsContext, "namenode");
+      metricsRecord.setTag("sessionId", sessionId);
       metricsContext.registerUpdater(this);
     }
       
@@ -147,7 +154,7 @@
     }
   }
     
-  private NameNodeMetrics myMetrics = new NameNodeMetrics();
+  private NameNodeMetrics myMetrics;
     
   /**
    * Initialize the server
@@ -168,6 +175,8 @@
     conf.set("fs.default.name", nameNodeAddress.getHostName() + ":" + nameNodeAddress.getPort());
     LOG.info("Namenode up at: " + this.nameNodeAddress);
 
+    myMetrics = new NameNodeMetrics(conf);
+        
     try {
       this.namesystem = new FSNamesystem(this.nameNodeAddress.getHostName(), this.nameNodeAddress.getPort(), this, conf);
       this.server.start();  //start RPC server   
@@ -179,7 +188,7 @@
       this.server.stop();
       throw e;
     }
-      
+    
   }
     
   /**
@@ -767,7 +776,7 @@
       }
     }
 
-    FSNamesystem nsys = new FSNamesystem(new FSImage(dirsToFormat));
+    FSNamesystem nsys = new FSNamesystem(new FSImage(dirsToFormat), conf);
     nsys.dir.fsImage.format();
     return false;
   }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/SecondaryNameNode.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/SecondaryNameNode.java?view=diff&rev=556691&r1=556690&r2=556691
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/SecondaryNameNode.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/SecondaryNameNode.java Mon Jul 16 11:45:07 2007
@@ -33,6 +33,7 @@
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
+import org.apache.hadoop.metrics.jvm.JvmMetrics;
 
 /**********************************************************
  * The Secondary NameNode is a helper to the primary NameNode.
@@ -77,6 +78,9 @@
    */
   public SecondaryNameNode(Configuration conf)  throws IOException {
 
+    // initiate Java VM metrics
+    JvmMetrics.init("SecondaryNameNode", conf.get("session.id"));
+    
     //
     // initialize error simulation code for junit test
     //
@@ -294,8 +298,8 @@
    * DEST_FS_IMAGE
    */
   private void doMerge() throws IOException {
-    FSNamesystem namesystem = new FSNamesystem(
-                                               new FSImage(checkpointDir));
+    FSNamesystem namesystem = 
+            new FSNamesystem(new FSImage(checkpointDir), conf);
     FSImage fsImage = namesystem.dir.fsImage;
     fsImage.loadFSImage(srcImage);
     fsImage.getEditLog().loadFSEdits(editFile);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java?view=diff&rev=556691&r1=556690&r2=556691
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java Mon Jul 16 11:45:07 2007
@@ -50,6 +50,7 @@
 import org.apache.hadoop.metrics.MetricsRecord;
 import org.apache.hadoop.metrics.MetricsUtil;
 import org.apache.hadoop.metrics.Updater;
+import org.apache.hadoop.metrics.jvm.JvmMetrics;
 import org.apache.hadoop.util.HostsFileReader;
 import org.apache.hadoop.util.StringUtils;
 
@@ -427,9 +428,13 @@
     private int numJobsCompleted = 0;
       
     JobTrackerMetrics(JobConf conf) {
+      String sessionId = conf.getSessionId();
+      // Initiate JVM Metrics
+      JvmMetrics.init("JobTracker", sessionId);
+      // Create a record for map-reduce metrics
       MetricsContext context = MetricsUtil.getContext("mapred");
       metricsRecord = MetricsUtil.createRecord(context, "jobtracker");
-      metricsRecord.setTag("sessionId", conf.getSessionId());
+      metricsRecord.setTag("sessionId", sessionId);
       context.registerUpdater(this);
     }
       
@@ -658,6 +663,7 @@
     trackerIdentifier = dateFormat.format(new Date());
 
     myMetrics = new JobTrackerMetrics(jobConf);
+    
     this.expireTrackersThread = new Thread(this.expireTrackers,
                                            "expireTrackers");
     this.expireTrackersThread.start();

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java?view=diff&rev=556691&r1=556690&r2=556691
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java Mon Jul 16 11:45:07 2007
@@ -61,6 +61,7 @@
 import org.apache.hadoop.metrics.MetricsRecord;
 import org.apache.hadoop.metrics.MetricsUtil;
 import org.apache.hadoop.metrics.Updater;
+import org.apache.hadoop.metrics.jvm.JvmMetrics;
 import org.apache.hadoop.net.DNS;
 import org.apache.hadoop.util.DiskChecker;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -218,9 +219,13 @@
       
     TaskTrackerMetrics() {
       JobConf conf = getJobConf();
+      String sessionId = conf.getSessionId();
+      // Initiate Java VM Metrics
+      JvmMetrics.init("TaskTracker", sessionId);
+      // Create a record for Task Tracker metrics
       MetricsContext context = MetricsUtil.getContext("mapred");
       metricsRecord = MetricsUtil.createRecord(context, "tasktracker");
-      metricsRecord.setTag("sessionId", conf.getSessionId());
+      metricsRecord.setTag("sessionId", sessionId);
       context.registerUpdater(this);
     }
       
@@ -1756,6 +1761,9 @@
       task.setConf(job);
           
       defaultConf.addFinalResource(new Path(task.getJobFile()));
+      
+      // Initiate Java VM metrics
+      JvmMetrics.init(task.getPhase().toString(), job.getSessionId());
 
       try {
         // use job-specified working directory

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/MetricsRecord.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/MetricsRecord.java?view=diff&rev=556691&r1=556690&r2=556691
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/MetricsRecord.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/MetricsRecord.java Mon Jul 16 11:45:07 2007
@@ -75,7 +75,8 @@
   public abstract String getRecordName();
     
   /**
-   * Sets the named tag to the specified value.
+   * Sets the named tag to the specified value.  The tagValue may be null, 
+   * which is treated the same as an empty String.
    *
    * @param tagName name of the tag
    * @param tagValue new value of the tag

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/jvm/EventCounter.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/jvm/EventCounter.java?view=auto&rev=556691
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/jvm/EventCounter.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/jvm/EventCounter.java Mon Jul 16 11:45:07 2007
@@ -0,0 +1,83 @@
+/*
+ * EventCounter.java
+ *
+ * Created on July 5, 2007, 9:24 AM
+ */
+
+package org.apache.hadoop.metrics.jvm;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.log4j.AppenderSkeleton;
+import org.apache.log4j.Level;
+import org.apache.log4j.spi.LoggingEvent;
+
+/**
+ * A log4J Appender that simply counts logging events in three levels:
+ * fatal, error and warn.
+ */
+public class EventCounter extends AppenderSkeleton {
+        
+    private static final int FATAL = 0;
+    private static final int ERROR = 1;
+    private static final int WARN  = 2;
+    private static final int INFO  = 3;
+    
+    private static class EventCounts {
+        private final long[] counts = { 0, 0, 0, 0 };
+    
+        private synchronized void incr(int i) { 
+            ++counts[i]; 
+        }
+        
+        private synchronized long get(int i) { 
+            return counts[i]; 
+        }
+    }
+    private static EventCounts counts = new EventCounts();
+    
+    public static long getFatal() { 
+        return counts.get(FATAL); 
+    }
+    
+    public static long getError() { 
+        return counts.get(ERROR); 
+    }
+    
+    public static long getWarn() { 
+        return counts.get(WARN);  
+    }
+    
+    public static long getInfo() {
+        return counts.get(INFO);
+    }
+    
+    public void append(LoggingEvent event) {
+        Level level = event.getLevel();
+        if (level == Level.INFO) {
+            counts.incr(INFO);
+        }
+        else if (level == Level.WARN) {
+            counts.incr(WARN);
+        }
+        else if (level == Level.ERROR) {
+            counts.incr(ERROR);
+        }
+        else if (level == Level.FATAL) {
+            counts.incr(FATAL);
+        }
+
+    }
+    
+    // Strange: these two methods are abstract in AppenderSkeleton, but not
+    // included in the javadoc (log4j 1.2.13).
+    
+    public void close() {
+    }
+    public boolean requiresLayout() {
+        return false;
+    }
+    
+    
+    
+}

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java?view=auto&rev=556691
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java Mon Jul 16 11:45:07 2007
@@ -0,0 +1,173 @@
+/*
+ * JvmMetrics.java
+ *
+ * Created on July 2, 2007, 12:27 PM
+ */
+
+package org.apache.hadoop.metrics.jvm;
+
+import java.lang.management.ManagementFactory;
+import java.lang.management.MemoryMXBean;
+import java.lang.management.MemoryUsage;
+import java.lang.management.ThreadInfo;
+import java.lang.management.ThreadMXBean;
+import org.apache.hadoop.metrics.MetricsContext;
+import org.apache.hadoop.metrics.MetricsRecord;
+import org.apache.hadoop.metrics.MetricsUtil;
+import org.apache.hadoop.metrics.Updater;
+
+import static java.lang.Thread.State.*;
+import java.lang.management.GarbageCollectorMXBean;
+import java.util.List;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.metrics.MetricsException;
+
+/**
+ * Singleton class which eports Java Virtual Machine metrics to the metrics API.  
+ * Any application can create an instance of this class in order to emit
+ * Java VM metrics.  
+ */
+public class JvmMetrics implements Updater {
+    
+    private static final float M = 1024*1024;
+    private static JvmMetrics theInstance = null;
+    private static Log log = LogFactory.getLog(JvmMetrics.class);
+    
+    private MetricsRecord metrics;
+    
+    // garbage collection counters
+    private long gcCount = 0;
+    private long gcTimeMillis = 0;
+    
+    // logging event counters
+    private long fatalCount = 0;
+    private long errorCount = 0;
+    private long warnCount  = 0;
+    private long infoCount  = 0;
+    
+    public synchronized static JvmMetrics init(String processName, String sessionId) {
+        if (theInstance != null) {
+            log.info("Cannot initialize JVM Metrics with processName=" + 
+                     processName + ", sessionId=" + sessionId + 
+                     " - already initialized");
+        }
+        else {
+            log.info("Initializing JVM Metrics with processName=" 
+                    + processName + ", sessionId=" + sessionId);
+            theInstance = new JvmMetrics(processName, sessionId);
+        }
+        return theInstance;
+    }
+    
+    /** Creates a new instance of JvmMetrics */
+    private JvmMetrics(String processName, String sessionId) {
+        MetricsContext context = MetricsUtil.getContext("jvm");
+        metrics = MetricsUtil.createRecord(context, "metrics");
+        metrics.setTag("processName", processName);
+        metrics.setTag("sessionId", sessionId);
+        context.registerUpdater(this);
+    }
+    
+    /**
+     * This will be called periodically (with the period being configuration
+     * dependent).
+     */
+    public void doUpdates(MetricsContext context) {
+        doMemoryUpdates();
+        doGarbageCollectionUpdates();
+        doThreadUpdates();
+        doEventCountUpdates();
+        metrics.update();
+    }
+    
+    private void doMemoryUpdates() {
+        MemoryMXBean memoryMXBean =
+               ManagementFactory.getMemoryMXBean();
+        MemoryUsage memNonHeap =
+                memoryMXBean.getNonHeapMemoryUsage();
+        MemoryUsage memHeap =
+                memoryMXBean.getHeapMemoryUsage();
+        metrics.setMetric("memNonHeapUsedM", memNonHeap.getUsed()/M);
+        metrics.setMetric("memNonHeapCommittedM", memNonHeap.getCommitted()/M);
+        metrics.setMetric("memHeapUsedM", memHeap.getUsed()/M);
+        metrics.setMetric("memHeapCommittedM", memHeap.getCommitted()/M);
+    }
+    
+    private void doGarbageCollectionUpdates() {
+        List<GarbageCollectorMXBean> gcBeans =
+                ManagementFactory.getGarbageCollectorMXBeans();
+        long count = 0;
+        long timeMillis = 0;
+        for (GarbageCollectorMXBean gcBean : gcBeans) {
+            count += gcBean.getCollectionCount();
+            timeMillis += gcBean.getCollectionTime();
+        }
+        metrics.incrMetric("gcCount", (int)(count - gcCount));
+        metrics.incrMetric("gcTimeMillis", (int)(timeMillis - gcTimeMillis));
+        
+        gcCount = count;
+        gcTimeMillis = timeMillis;
+    }
+    
+    private void doThreadUpdates() {
+        ThreadMXBean threadMXBean =
+                ManagementFactory.getThreadMXBean();
+        long threadIds[] = 
+                threadMXBean.getAllThreadIds();
+        ThreadInfo[] threadInfos =
+                threadMXBean.getThreadInfo(threadIds, 0);
+        
+        int threadsNew = 0;
+        int threadsRunnable = 0;
+        int threadsBlocked = 0;
+        int threadsWaiting = 0;
+        int threadsTimedWaiting = 0;
+        int threadsTerminated = 0;
+        
+        for (ThreadInfo threadInfo : threadInfos) {
+            Thread.State state = threadInfo.getThreadState();
+            if (state == NEW) {
+                threadsNew++;
+            } 
+            else if (state == RUNNABLE) {
+                threadsRunnable++;
+            }
+            else if (state == BLOCKED) {
+                threadsBlocked++;
+            }
+            else if (state == WAITING) {
+                threadsWaiting++;
+            } 
+            else if (state == TIMED_WAITING) {
+                threadsTimedWaiting++;
+            }
+            else if (state == TERMINATED) {
+                threadsTerminated++;
+            }
+        }
+        metrics.setMetric("threadsNew", threadsNew);
+        metrics.setMetric("threadsRunnable", threadsRunnable);
+        metrics.setMetric("threadsBlocked", threadsBlocked);
+        metrics.setMetric("threadsWaiting", threadsWaiting);
+        metrics.setMetric("threadsTimedWaiting", threadsTimedWaiting);
+        metrics.setMetric("threadsTerminated", threadsTerminated);
+    }
+    
+    private void doEventCountUpdates() {
+        long newFatal = EventCounter.getFatal();
+        long newError = EventCounter.getError();
+        long newWarn  = EventCounter.getWarn();
+        long newInfo  = EventCounter.getInfo();
+        
+        metrics.incrMetric("logFatal", (int)(newFatal - fatalCount));
+        metrics.incrMetric("logError", (int)(newError - errorCount));
+        metrics.incrMetric("logWarn",  (int)(newWarn - warnCount));
+        metrics.incrMetric("logInfo",  (int)(newInfo - infoCount));
+        
+        fatalCount = newFatal;
+        errorCount = newError;
+        warnCount  = newWarn;
+        infoCount  = newInfo;
+    }
+}