You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by su...@apache.org on 2011/05/11 01:56:55 UTC

svn commit: r1101682 - in /hadoop/common/trunk: ./ conf/ ivy/ src/java/org/apache/hadoop/ipc/ src/java/org/apache/hadoop/ipc/metrics/ src/java/org/apache/hadoop/log/ src/java/org/apache/hadoop/log/metrics/ src/java/org/apache/hadoop/metrics/file/ src/j...

Author: suresh
Date: Tue May 10 23:56:54 2011
New Revision: 1101682

URL: http://svn.apache.org/viewvc?rev=1101682&view=rev
Log:
HADOOP-6920. Metrics instrumentation to move new metrics2 framework. Contributed by Luke Lu.

Added:
    hadoop/common/trunk/src/java/org/apache/hadoop/log/EventCounter.java
    hadoop/common/trunk/src/java/org/apache/hadoop/log/metrics/
    hadoop/common/trunk/src/java/org/apache/hadoop/log/metrics/EventCounter.java
    hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java
    hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/source/
    hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/source/TestJvmMetrics.java
Removed:
    hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcActivityMBean.java
    hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcDetailedActivityMBean.java
    hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcMgt.java
    hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcMgtMBean.java
Modified:
    hadoop/common/trunk/CHANGES.txt
    hadoop/common/trunk/conf/hadoop-metrics2.properties
    hadoop/common/trunk/conf/log4j.properties
    hadoop/common/trunk/ivy/hadoop-common-template.xml
    hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java
    hadoop/common/trunk/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java
    hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java
    hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java
    hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/package-info.java
    hadoop/common/trunk/src/java/org/apache/hadoop/metrics/file/FileContext.java
    hadoop/common/trunk/src/java/org/apache/hadoop/metrics/jvm/EventCounter.java
    hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java
    hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/source/JvmMetrics.java
    hadoop/common/trunk/src/java/org/apache/hadoop/security/UserGroupInformation.java
    hadoop/common/trunk/src/java/org/apache/hadoop/util/VersionInfo.java
    hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java
    hadoop/common/trunk/src/test/core/org/apache/hadoop/security/TestUserGroupInformation.java
    hadoop/common/trunk/src/test/findbugsExcludeFile.xml

Modified: hadoop/common/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=1101682&r1=1101681&r2=1101682&view=diff
==============================================================================
--- hadoop/common/trunk/CHANGES.txt (original)
+++ hadoop/common/trunk/CHANGES.txt Tue May 10 23:56:54 2011
@@ -26,6 +26,10 @@ Trunk (unreleased changes)
 
     HADOOP-6919. New metrics2 framework. (Luke Lu via acmurthy) 
 
+    HADOOP-6920. Metrics instrumentation to move new metrics2 framework.
+    (Luke Lu via suresh)
+
+
   IMPROVEMENTS
 
     HADOOP-7042. Updates to test-patch.sh to include failed test names and

Modified: hadoop/common/trunk/conf/hadoop-metrics2.properties
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/conf/hadoop-metrics2.properties?rev=1101682&r1=1101681&r2=1101682&view=diff
==============================================================================
--- hadoop/common/trunk/conf/hadoop-metrics2.properties (original)
+++ hadoop/common/trunk/conf/hadoop-metrics2.properties Tue May 10 23:56:54 2011
@@ -2,15 +2,26 @@
 # See javadoc of package-info.java for org.apache.hadoop.metrics2 for details
 
 *.sink.file.class=org.apache.hadoop.metrics2.sink.FileSink
+# default sampling period
+*.period=10
 
+# The namenode-metrics.out will contain metrics from all context
 #namenode.sink.file.filename=namenode-metrics.out
+# Specifying a special sampling period for namenode:
+#namenode.sink.*.period=8
 
 #datanode.sink.file.filename=datanode-metrics.out
 
-#jobtracker.sink.file.filename=jobtracker-metrics.out
+# the following example split metrics of different
+# context to different sinks (in this case files)
+#jobtracker.sink.file_jvm.context=jvm
+#jobtracker.sink.file_jvm.filename=jobtracker-jvm-metrics.out
+#jobtracker.sink.file_mapred.context=mapred
+#jobtracker.sink.file_mapred.filename=jobtracker-mapred-metrics.out
 
 #tasktracker.sink.file.filename=tasktracker-metrics.out
 
 #maptask.sink.file.filename=maptask-metrics.out
 
 #reducetask.sink.file.filename=reducetask-metrics.out
+

Modified: hadoop/common/trunk/conf/log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/conf/log4j.properties?rev=1101682&r1=1101681&r2=1101682&view=diff
==============================================================================
--- hadoop/common/trunk/conf/log4j.properties (original)
+++ hadoop/common/trunk/conf/log4j.properties Tue May 10 23:56:54 2011
@@ -117,7 +117,7 @@ log4j.logger.org.jets3t.service.impl.res
 # Event Counter Appender
 # Sends counts of logging messages at different severity levels to Hadoop Metrics.
 #
-log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
+log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
 
 #
 # Job Summary Appender

Modified: hadoop/common/trunk/ivy/hadoop-common-template.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/ivy/hadoop-common-template.xml?rev=1101682&r1=1101681&r2=1101682&view=diff
==============================================================================
--- hadoop/common/trunk/ivy/hadoop-common-template.xml (original)
+++ hadoop/common/trunk/ivy/hadoop-common-template.xml Tue May 10 23:56:54 2011
@@ -148,7 +148,7 @@
     <dependency>
       <groupId>com.google.guava</groupId>
       <artifactId>guava</artifactId>
-      <version>r07</version>
+      <version>r09</version>
     </dependency>
   </dependencies>
 </project>

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java?rev=1101682&r1=1101681&r2=1101682&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java Tue May 10 23:56:54 2011
@@ -1045,7 +1045,7 @@ public abstract class Server {
           }
           doSaslReply(SaslStatus.ERROR, null, sendToClient.getClass().getName(), 
               sendToClient.getLocalizedMessage());
-          rpcMetrics.authenticationFailures.inc();
+          rpcMetrics.incrAuthenticationFailures();
           String clientIP = this.toString();
           // attempting user could be null
           AUDITLOG.warn(AUTH_FAILED_FOR + clientIP + ":" + attemptingUser);
@@ -1069,7 +1069,7 @@ public abstract class Server {
           if (LOG.isDebugEnabled()) {
             LOG.debug("SASL server successfully authenticated client: " + user);
           }
-          rpcMetrics.authenticationSuccesses.inc();
+          rpcMetrics.incrAuthenticationSuccesses();
           AUDITLOG.info(AUTH_SUCCESSFULL_FOR + user);
           saslContextEstablished = true;
         }
@@ -1227,6 +1227,7 @@ public abstract class Server {
         String protocolClassName = header.getProtocol();
         if (protocolClassName != null) {
           protocol = getProtocolClass(header.getProtocol(), conf);
+          rpcDetailedMetrics.init(protocol);
         }
       } catch (ClassNotFoundException cnfe) {
         throw new IOException("Unknown protocol: " + header.getProtocol());
@@ -1349,9 +1350,9 @@ public abstract class Server {
         if (LOG.isDebugEnabled()) {
           LOG.debug("Successfully authorized " + header);
         }
-        rpcMetrics.authorizationSuccesses.inc();
+        rpcMetrics.incrAuthorizationSuccesses();
       } catch (AuthorizationException ae) {
-        rpcMetrics.authorizationFailures.inc();
+        rpcMetrics.incrAuthorizationFailures();
         setupResponse(authFailedResponse, authFailedCall, Status.FATAL, null,
             ae.getClass().getName(), ae.getMessage());
         responder.doRespond(authFailedCall);
@@ -1520,10 +1521,8 @@ public abstract class Server {
     // Start the listener here and let it bind to the port
     listener = new Listener();
     this.port = listener.getAddress().getPort();    
-    this.rpcMetrics = new RpcMetrics(serverName,
-                          Integer.toString(this.port), this);
-    this.rpcDetailedMetrics = new RpcDetailedMetrics(serverName,
-                            Integer.toString(this.port));
+    this.rpcMetrics = RpcMetrics.create(this);
+    this.rpcDetailedMetrics = RpcDetailedMetrics.create(this.port);
     this.tcpNoDelay = conf.getBoolean("ipc.server.tcpnodelay", false);
 
     // Create the responder here
@@ -1759,7 +1758,7 @@ public abstract class Server {
     int count =  (buffer.remaining() <= NIO_BUFFER_LIMIT) ?
                  channel.write(buffer) : channelIO(null, channel, buffer);
     if (count > 0) {
-      rpcMetrics.sentBytes.inc(count);
+      rpcMetrics.incrSentBytes(count);
     }
     return count;
   }
@@ -1779,7 +1778,7 @@ public abstract class Server {
     int count = (buffer.remaining() <= NIO_BUFFER_LIMIT) ?
                 channel.read(buffer) : channelIO(channel, null, buffer);
     if (count > 0) {
-      rpcMetrics.receivedBytes.inc(count);
+      rpcMetrics.incrReceivedBytes(count);
     }
     return count;
   }

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java?rev=1101682&r1=1101681&r2=1101682&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java Tue May 10 23:56:54 2011
@@ -41,7 +41,6 @@ import org.apache.hadoop.security.token.
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.*;
-import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
 
 /** An RpcEngine implementation for Writable data. */
 @InterfaceStability.Evolving
@@ -390,9 +389,8 @@ public class WritableRpcEngine implement
         Invocation call = (Invocation)param;
         if (verbose) log("Call: " + call);
 
-        Method method =
-          protocol.getMethod(call.getMethodName(),
-                                   call.getParameterClasses());
+        Method method = protocol.getMethod(call.getMethodName(),
+                                           call.getParameterClasses());
         method.setAccessible(true);
 
         // Verify rpc version
@@ -429,24 +427,10 @@ public class WritableRpcEngine implement
                     " queueTime= " + qTime +
                     " procesingTime= " + processingTime);
         }
-        rpcMetrics.rpcQueueTime.inc(qTime);
-        rpcMetrics.rpcProcessingTime.inc(processingTime);
-
-        MetricsTimeVaryingRate m =
-         (MetricsTimeVaryingRate) rpcDetailedMetrics.registry.get(call.getMethodName());
-      	if (m == null) {
-      	  try {
-      	    m = new MetricsTimeVaryingRate(call.getMethodName(),
-      	                                        rpcDetailedMetrics.registry);
-      	  } catch (IllegalArgumentException iae) {
-      	    // the metrics has been registered; re-fetch the handle
-      	    LOG.info("Error register " + call.getMethodName(), iae);
-      	    m = (MetricsTimeVaryingRate) rpcDetailedMetrics.registry.get(
-      	        call.getMethodName());
-      	  }
-      	}
-        m.inc(processingTime);
-
+        rpcMetrics.addRpcQueueTime(qTime);
+        rpcMetrics.addRpcProcessingTime(processingTime);
+        rpcDetailedMetrics.addProcessingTime(call.getMethodName(),
+                                             processingTime);
         if (verbose) log("Return: "+value);
 
         return new ObjectWritable(method.getReturnType(), value);

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java?rev=1101682&r1=1101681&r2=1101682&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java Tue May 10 23:56:54 2011
@@ -20,65 +20,61 @@ package org.apache.hadoop.ipc.metrics;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.metrics.MetricsContext;
-import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.hadoop.metrics.MetricsUtil;
-import org.apache.hadoop.metrics.Updater;
-import org.apache.hadoop.metrics.util.MetricsBase;
-import org.apache.hadoop.metrics.util.MetricsRegistry;
-import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
+import org.apache.hadoop.metrics2.annotation.Metric;
+import org.apache.hadoop.metrics2.annotation.Metrics;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.metrics2.lib.MetricsRegistry;
+import org.apache.hadoop.metrics2.lib.MutableRates;
 
 /**
- * 
- * This class is for maintaining  the various RPC method related statistics
+ * This class is for maintaining RPC method related statistics
  * and publishing them through the metrics interfaces.
- * This also registers the JMX MBean for RPC.
  */
 @InterfaceAudience.Private
-public class RpcDetailedMetrics implements Updater {
-  public final MetricsRegistry registry = new MetricsRegistry();
-  private final MetricsRecord metricsRecord;
-  private static final Log LOG = LogFactory.getLog(RpcDetailedMetrics.class);
-  RpcDetailedActivityMBean rpcMBean;
-  
+@Metrics(about="Per method RPC metrics", context="rpcdetailed")
+public class RpcDetailedMetrics {
+
+  @Metric MutableRates rates;
+
+  static final Log LOG = LogFactory.getLog(RpcDetailedMetrics.class);
+  final MetricsRegistry registry;
+  final String name;
+
+  RpcDetailedMetrics(int port) {
+    name = "RpcDetailedActivityForPort"+ port;
+    registry = new MetricsRegistry("rpcdetailed")
+        .tag("port", "RPC port", String.valueOf(port));
+    LOG.debug(registry.info());
+  }
+
+  public String name() { return name; }
+
+  public static RpcDetailedMetrics create(int port) {
+    RpcDetailedMetrics m = new RpcDetailedMetrics(port);
+    return DefaultMetricsSystem.instance().register(m.name, null, m);
+  }
+
   /**
-   * Statically added metrics to expose at least one metrics, without
-   * which other dynamically added metrics are not exposed over JMX.
+   * Initialize the metrics for JMX with protocol methods
+   * @param protocol the protocol class
    */
-  final MetricsTimeVaryingRate getProtocolVersion = 
-    new MetricsTimeVaryingRate("getProtocolVersion", registry);
-  
-  public RpcDetailedMetrics(final String hostName, final String port) {
-    MetricsContext context = MetricsUtil.getContext("rpc");
-    metricsRecord = MetricsUtil.createRecord(context, "detailed-metrics");
-
-    metricsRecord.setTag("port", port);
-
-    LOG.info("Initializing RPC Metrics with hostName=" 
-        + hostName + ", port=" + port);
-
-    context.registerUpdater(this);
-    
-    // Need to clean up the interface to RpcMgt - don't need both metrics and server params
-    rpcMBean = new RpcDetailedActivityMBean(registry, hostName, port);
+  public void init(Class<?> protocol) {
+    rates.init(protocol);
   }
-  
-  
+
   /**
-   * Push the metrics to the monitoring subsystem on doUpdate() call.
+   * Add an RPC processing time sample
+   * @param name  of the RPC call
+   * @param processingTime  the processing time
    */
-  public void doUpdates(final MetricsContext context) {
-    
-    synchronized (this) {
-      for (MetricsBase m : registry.getMetricsList()) {
-        m.pushMetric(metricsRecord);
-      }
-    }
-    metricsRecord.update();
+  //@Override // some instrumentation interface
+  public void addProcessingTime(String name, int processingTime) {
+    rates.add(name, processingTime);
   }
 
-  public void shutdown() {
-    if (rpcMBean != null) 
-      rpcMBean.shutdown();
-  }
+  /**
+   * Shutdown the instrumentation for the process
+   */
+  //@Override // some instrumentation interface
+  public void shutdown() {}
 }

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java?rev=1101682&r1=1101681&r2=1101682&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java Tue May 10 23:56:54 2011
@@ -19,136 +19,141 @@ package org.apache.hadoop.ipc.metrics;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.ipc.Server;
-import org.apache.hadoop.metrics.MetricsContext;
-import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.hadoop.metrics.MetricsUtil;
-import org.apache.hadoop.metrics.Updater;
-import org.apache.hadoop.metrics.util.MetricsBase;
-import org.apache.hadoop.metrics.util.MetricsIntValue;
-import org.apache.hadoop.metrics.util.MetricsRegistry;
-import org.apache.hadoop.metrics.util.MetricsTimeVaryingInt;
-import org.apache.hadoop.metrics.util.MetricsTimeVaryingLong;
-import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.metrics2.annotation.Metric;
+import org.apache.hadoop.metrics2.annotation.Metrics;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.metrics2.lib.MetricsRegistry;
+import org.apache.hadoop.metrics2.lib.MutableCounterInt;
+import org.apache.hadoop.metrics2.lib.MutableCounterLong;
+import org.apache.hadoop.metrics2.lib.MutableRate;
 
 /**
- * 
  * This class is for maintaining  the various RPC statistics
  * and publishing them through the metrics interfaces.
- * This also registers the JMX MBean for RPC.
- * <p>
- * This class has a number of metrics variables that are publicly accessible;
- * these variables (objects) have methods to update their values;
- * for example:
- *  <p> {@link #rpcQueueTime}.inc(time)
- *
  */
 @InterfaceAudience.Private
-public class RpcMetrics implements Updater {
-  private final MetricsRegistry registry = new MetricsRegistry();
-  private final MetricsRecord metricsRecord;
-  private final Server myServer;
-  private static final Log LOG = LogFactory.getLog(RpcMetrics.class);
-  RpcActivityMBean rpcMBean;
+@Metrics(about="Aggregate RPC metrics", context="rpc")
+public class RpcMetrics {
+
+  static final Log LOG = LogFactory.getLog(RpcMetrics.class);
+  final Server server;
+  final MetricsRegistry registry;
+  final String name;
   
-  public RpcMetrics(final String hostName, final String port,
-      final Server server) {
-    myServer = server;
-    MetricsContext context = MetricsUtil.getContext("rpc");
-    metricsRecord = MetricsUtil.createRecord(context, "metrics");
-
-    metricsRecord.setTag("port", port);
-
-    LOG.info("Initializing RPC Metrics with hostName=" 
-        + hostName + ", port=" + port);
-
-    context.registerUpdater(this);
-    
-    // Need to clean up the interface to RpcMgt - don't need both metrics and server params
-    rpcMBean = new RpcActivityMBean(registry, hostName, port);
+  RpcMetrics(Server server) {
+    String port = String.valueOf(server.getListenerAddress().getPort());
+    name = "RpcActivityForPort"+ port;
+    this.server = server;
+    registry = new MetricsRegistry("rpc").tag("port", "RPC port", port);
+    LOG.debug("Initialized "+ registry);
   }
-  
-  
-  /**
-   * The metrics variables are public:
-   *  - they can be set directly by calling their set/inc methods
-   *  -they can also be read directly - e.g. JMX does this.
-   */
+
+  public String name() { return name; }
+
+  public static RpcMetrics create(Server server) {
+    RpcMetrics m = new RpcMetrics(server);
+    return DefaultMetricsSystem.instance().register(m.name, null, m);
+  }
+
+  @Metric("Number of received bytes") MutableCounterLong receivedBytes;
+  @Metric("Number of sent bytes") MutableCounterLong sentBytes;
+  @Metric("Queue time") MutableRate rpcQueueTime;
+  @Metric("Processsing time") MutableRate rpcProcessingTime;
+  @Metric("Number of authentication failures")
+  MutableCounterInt rpcAuthenticationFailures;
+  @Metric("Number of authentication successes")
+  MutableCounterInt rpcAuthenticationSuccesses;
+  @Metric("Number of authorization failures")
+  MutableCounterInt rpcAuthorizationFailures;
+  @Metric("Number of authorization sucesses")
+  MutableCounterInt rpcAuthorizationSuccesses;
+
+  @Metric("Number of open connections") public int numOpenConnections() {
+    return server.getNumOpenConnections();
+  }
+
+  @Metric("Length of the call queue") public int callQueueLength() {
+    return server.getCallQueueLen();
+  }
+
+  // Public instrumentation methods that could be extracted to an
+  // abstract class if we decide to do custom instrumentation classes a la
+  // JobTrackerInstrumenation. The methods with //@Override comment are
+  // candidates for abstract methods in a abstract instrumentation class.
 
   /**
-   * metrics - number of bytes received
-   */
-  public final MetricsTimeVaryingLong receivedBytes = 
-         new MetricsTimeVaryingLong("ReceivedBytes", registry);
-  /**
-   * metrics - number of bytes sent
-   */
-  public final MetricsTimeVaryingLong sentBytes = 
-         new MetricsTimeVaryingLong("SentBytes", registry);
-  /**
-   * metrics - rpc queue time
-   */
-  public final MetricsTimeVaryingRate rpcQueueTime =
-          new MetricsTimeVaryingRate("RpcQueueTime", registry);
-  /**
-   * metrics - rpc processing time
+   * One authentication failure event
    */
-  public final MetricsTimeVaryingRate rpcProcessingTime =
-          new MetricsTimeVaryingRate("RpcProcessingTime", registry);
+  //@Override
+  public void incrAuthenticationFailures() {
+    rpcAuthenticationFailures.incr();
+  }
+
   /**
-   * metrics - number of open connections
+   * One authentication success event
    */
-  public final MetricsIntValue numOpenConnections = 
-          new MetricsIntValue("NumOpenConnections", registry);
+  //@Override
+  public void incrAuthenticationSuccesses() {
+    rpcAuthenticationSuccesses.incr();
+  }
+
   /**
-   * metrics - length of the queue
+   * One authorization success event
    */
-  public final MetricsIntValue callQueueLen = 
-          new MetricsIntValue("callQueueLen", registry);
+  //@Override
+  public void incrAuthorizationSuccesses() {
+    rpcAuthorizationSuccesses.incr();
+  }
+
   /**
-   * metrics - number of failed authentications
+   * One authorization failure event
    */
-  public final MetricsTimeVaryingInt authenticationFailures = 
-          new MetricsTimeVaryingInt("rpcAuthenticationFailures", registry);
+  //@Override
+  public void incrAuthorizationFailures() {
+    rpcAuthorizationFailures.incr();
+  }
+
   /**
-   * metrics - number of successful authentications
+   * Shutdown the instrumentation for the process
    */
-  public final MetricsTimeVaryingInt authenticationSuccesses = 
-          new MetricsTimeVaryingInt("rpcAuthenticationSuccesses", registry);
+  //@Override
+  public void shutdown() {}
+
   /**
-   * metrics - number of failed authorizations
+   * Increment sent bytes by count
+   * @param count to increment
    */
-  public final MetricsTimeVaryingInt authorizationFailures = 
-          new MetricsTimeVaryingInt("rpcAuthorizationFailures", registry);
+  //@Override
+  public void incrSentBytes(int count) {
+    sentBytes.incr(count);
+  }
+
   /**
-   * metrics - number of successful authorizations
+   * Increment received bytes by count
+   * @param count to increment
    */
-  public final MetricsTimeVaryingInt authorizationSuccesses = 
-         new MetricsTimeVaryingInt("rpcAuthorizationSuccesses", registry);
-  
+  //@Override
+  public void incrReceivedBytes(int count) {
+    receivedBytes.incr(count);
+  }
+
   /**
-   * Push the metrics to the monitoring subsystem on doUpdate() call.
+   * Add an RPC queue time sample
+   * @param qTime the queue time
    */
-  public void doUpdates(final MetricsContext context) {
-    
-    synchronized (this) {
-      // ToFix - fix server to use the following two metrics directly so
-      // the metrics do not have be copied here.
-      numOpenConnections.set(myServer.getNumOpenConnections());
-      callQueueLen.set(myServer.getCallQueueLen());
-      for (MetricsBase m : registry.getMetricsList()) {
-        m.pushMetric(metricsRecord);
-      }
-    }
-    metricsRecord.update();
+  //@Override
+  public void addRpcQueueTime(int qTime) {
+    rpcQueueTime.add(qTime);
   }
 
   /**
-   * shutdown the metrics
-   */
-  public void shutdown() {
-    if (rpcMBean != null) 
-      rpcMBean.shutdown();
+   * Add an RPC processing time sample
+   * @param processingTime the processing time
+   */
+  //@Override
+  public void addRpcProcessingTime(int processingTime) {
+    rpcProcessingTime.add(processingTime);
   }
 }

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/package-info.java?rev=1101682&r1=1101681&r2=1101682&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/package-info.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/package-info.java Tue May 10 23:56:54 2011
@@ -15,6 +15,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
+/**
+ * RPC related metrics.
+ */
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
 package org.apache.hadoop.ipc.metrics;

Added: hadoop/common/trunk/src/java/org/apache/hadoop/log/EventCounter.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/log/EventCounter.java?rev=1101682&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/log/EventCounter.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/log/EventCounter.java Tue May 10 23:56:54 2011
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.log;
+
+/**
+ * A log4J Appender that simply counts logging events in three levels:
+ * fatal, error and warn. The class name is used in log4j.properties
+ * @deprecated use {@link org.apache.hadoop.log.metrics.EventCounter} instead
+ */
+@Deprecated
+public class EventCounter extends org.apache.hadoop.log.metrics.EventCounter {
+  static {
+    // The logging system is not started yet.
+    System.err.println("WARNING: "+ EventCounter.class.getName() +
+        " is deprecated. Please use "+
+        org.apache.hadoop.log.metrics.EventCounter.class.getName() +
+        " in all the log4j.properties files.");
+  }
+}

Added: hadoop/common/trunk/src/java/org/apache/hadoop/log/metrics/EventCounter.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/log/metrics/EventCounter.java?rev=1101682&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/log/metrics/EventCounter.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/log/metrics/EventCounter.java Tue May 10 23:56:54 2011
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.log.metrics;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+import org.apache.log4j.AppenderSkeleton;
+import org.apache.log4j.Level;
+import org.apache.log4j.spi.LoggingEvent;
+
+/**
+ * A log4J Appender that simply counts logging events in three levels:
+ * fatal, error and warn. The class name is used in log4j.properties
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class EventCounter extends AppenderSkeleton {
+  private static final int FATAL = 0;
+  private static final int ERROR = 1;
+  private static final int WARN = 2;
+  private static final int INFO = 3;
+
+  private static class EventCounts {
+    private final long[] counts = {0, 0, 0, 0};
+
+    private synchronized void incr(int i) {
+      ++counts[i];
+    }
+
+    private synchronized long get(int i) {
+      return counts[i];
+    }
+  }
+
+  private static EventCounts counts = new EventCounts();
+
+  @InterfaceAudience.Private
+  public static long getFatal() {
+    return counts.get(FATAL);
+  }
+
+  @InterfaceAudience.Private
+  public static long getError() {
+    return counts.get(ERROR);
+  }
+
+  @InterfaceAudience.Private
+  public static long getWarn() {
+    return counts.get(WARN);
+  }
+
+  @InterfaceAudience.Private
+  public static long getInfo() {
+    return counts.get(INFO);
+  }
+
+  @Override
+  public void append(LoggingEvent event) {
+    Level level = event.getLevel();
+    // depends on the api, == might not work
+    // see HADOOP-7055 for details
+    if (level.equals(Level.INFO)) {
+      counts.incr(INFO);
+    }
+    else if (level.equals(Level.WARN)) {
+      counts.incr(WARN);
+    }
+    else if (level.equals(Level.ERROR)) {
+      counts.incr(ERROR);
+    }
+    else if (level.equals(Level.FATAL)) {
+      counts.incr(FATAL);
+    }
+  }
+
+  @Override
+  public void close() {
+  }
+
+  @Override
+  public boolean requiresLayout() {
+    return false;
+  }
+}

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/metrics/file/FileContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics/file/FileContext.java?rev=1101682&r1=1101681&r2=1101682&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics/file/FileContext.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics/file/FileContext.java Tue May 10 23:56:54 2011
@@ -42,7 +42,7 @@ import org.apache.hadoop.metrics.spi.Out
  * myContextName.fileName=/tmp/metrics.log
  * myContextName.period=5
  * </pre>
- * @deprecated use {@link org.apache.hadoop.metrics2.sink.FileSink} instead.
+ * @see org.apache.hadoop.metrics2.sink.FileSink for metrics 2.0.
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/metrics/jvm/EventCounter.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics/jvm/EventCounter.java?rev=1101682&r1=1101681&r2=1101682&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics/jvm/EventCounter.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics/jvm/EventCounter.java Tue May 10 23:56:54 2011
@@ -17,82 +17,17 @@
  */
 package org.apache.hadoop.metrics.jvm;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.log4j.AppenderSkeleton;
-import org.apache.log4j.Level;
-import org.apache.log4j.spi.LoggingEvent;
-
 /**
  * A log4J Appender that simply counts logging events in three levels:
  * fatal, error and warn.
  */
-@InterfaceAudience.Private
-@InterfaceStability.Evolving
-public class EventCounter extends AppenderSkeleton {
-        
-    private static final int FATAL = 0;
-    private static final int ERROR = 1;
-    private static final int WARN  = 2;
-    private static final int INFO  = 3;
-    
-    private static class EventCounts {
-        private final long[] counts = { 0, 0, 0, 0 };
-    
-        private synchronized void incr(int i) { 
-            ++counts[i]; 
-        }
-        
-        private synchronized long get(int i) { 
-            return counts[i]; 
-        }
-    }
-    private static EventCounts counts = new EventCounts();
-    
-    public static long getFatal() { 
-        return counts.get(FATAL); 
-    }
-    
-    public static long getError() { 
-        return counts.get(ERROR); 
-    }
-    
-    public static long getWarn() { 
-        return counts.get(WARN);  
-    }
-    
-    public static long getInfo() {
-        return counts.get(INFO);
-    }
-    
-    public void append(LoggingEvent event) {
-        Level level = event.getLevel();
-        if (level == Level.INFO) {
-            counts.incr(INFO);
-        }
-        else if (level == Level.WARN) {
-            counts.incr(WARN);
-        }
-        else if (level == Level.ERROR) {
-            counts.incr(ERROR);
-        }
-        else if (level == Level.FATAL) {
-            counts.incr(FATAL);
-        }
+public class EventCounter extends org.apache.hadoop.log.metrics.EventCounter {
 
-    }
-    
-    // Strange: these two methods are abstract in AppenderSkeleton, but not
-    // included in the javadoc (log4j 1.2.13).
-    
-    public void close() {
-    }
-    public boolean requiresLayout() {
-        return false;
-    }
-    
-    
-    
+  static {
+    // The logging system is not started yet.
+    System.err.println("WARNING: "+ EventCounter.class.getName() +
+        " is deprecated. Please use "+
+        org.apache.hadoop.log.metrics.EventCounter.class.getName() +
+        " in all the log4j.properties files.");
+  }
 }

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java?rev=1101682&r1=1101681&r2=1101682&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java Tue May 10 23:56:54 2011
@@ -121,8 +121,9 @@ public enum DefaultMetricsSystem {
     if (sourceNames.map.containsKey(name)) {
       if (dupOK) {
         return name;
+      } else if (!miniClusterMode) {
+        throw new MetricsException("Metrics source "+ name +" already exists!");
       }
-      throw new MetricsException("Metrics source "+ name +" already exists!");
     }
     return sourceNames.uniqueName(name);
   }

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/source/JvmMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/source/JvmMetrics.java?rev=1101682&r1=1101681&r2=1101682&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/source/JvmMetrics.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/source/JvmMetrics.java Tue May 10 23:56:54 2011
@@ -18,16 +18,150 @@
 
 package org.apache.hadoop.metrics2.source;
 
+import java.lang.management.ManagementFactory;
+import java.lang.management.MemoryMXBean;
+import java.lang.management.MemoryUsage;
+import java.lang.management.ThreadInfo;
+import java.lang.management.ThreadMXBean;
+import static java.lang.Thread.State.*;
+import java.lang.management.GarbageCollectorMXBean;
+import java.util.Map;
+import java.util.List;
+
+import com.google.common.collect.Maps;
+
 import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.log.metrics.EventCounter;
+import org.apache.hadoop.metrics2.MetricsCollector;
+import org.apache.hadoop.metrics2.MetricsInfo;
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import org.apache.hadoop.metrics2.MetricsSource;
+import org.apache.hadoop.metrics2.MetricsSystem;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.metrics2.lib.Interns;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.*;
+import static org.apache.hadoop.metrics2.impl.MsInfo.*;
 
 /**
- * JVM related metrics. Mostly used by various servers as part of the metrics
- * they export.
+ * JVM and logging related metrics.
+ * Mostly used by various servers as a part of the metrics they export.
  */
 @InterfaceAudience.Private
-@InterfaceStability.Evolving
-public class JvmMetrics {
-  // placeholder for javadoc to prevent broken links, until
-  // HADOOP-6920
+public class JvmMetrics implements MetricsSource {
+  enum Singleton {
+    INSTANCE;
+
+    JvmMetrics impl;
+
+    synchronized JvmMetrics init(String processName, String sessionId) {
+      if (impl == null) {
+        impl = create(processName, sessionId, DefaultMetricsSystem.instance());
+      }
+      return impl;
+    }
+  }
+
+  static final float M = 1024*1024;
+
+  final MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
+  final List<GarbageCollectorMXBean> gcBeans =
+      ManagementFactory.getGarbageCollectorMXBeans();
+  final ThreadMXBean threadMXBean = ManagementFactory.getThreadMXBean();
+  final String processName, sessionId;
+  final Map<String, MetricsInfo[]> gcInfoCache = Maps.newHashMap();
+
+  JvmMetrics(String processName, String sessionId) {
+    this.processName = processName;
+    this.sessionId = sessionId;
+  }
+
+  public static JvmMetrics create(String processName, String sessionId,
+                                  MetricsSystem ms) {
+    return ms.register(JvmMetrics.name(), JvmMetrics.description(),
+                       new JvmMetrics(processName, sessionId));
+  }
+
+  public static JvmMetrics initSingleton(String processName, String sessionId) {
+    return Singleton.INSTANCE.init(processName, sessionId);
+  }
+
+  @Override
+  public void getMetrics(MetricsCollector collector, boolean all) {
+    MetricsRecordBuilder rb = collector.addRecord(JvmMetrics)
+        .setContext("jvm").tag(ProcessName, processName)
+        .tag(SessionId, sessionId);
+    getMemoryUsage(rb);
+    getGcUsage(rb);
+    getThreadUsage(rb);
+    getEventCounters(rb);
+  }
+
+  private void getMemoryUsage(MetricsRecordBuilder rb) {
+    MemoryUsage memNonHeap = memoryMXBean.getNonHeapMemoryUsage();
+    MemoryUsage memHeap = memoryMXBean.getHeapMemoryUsage();
+    rb.addGauge(MemNonHeapUsedM, memNonHeap.getUsed() / M)
+      .addGauge(MemNonHeapCommittedM, memNonHeap.getCommitted() / M)
+      .addGauge(MemHeapUsedM, memHeap.getUsed() / M)
+      .addGauge(MemHeapCommittedM, memHeap.getCommitted() / M);
+  }
+
+  private void getGcUsage(MetricsRecordBuilder rb) {
+    long count = 0;
+    long timeMillis = 0;
+    for (GarbageCollectorMXBean gcBean : gcBeans) {
+      long c = gcBean.getCollectionCount();
+      long t = gcBean.getCollectionTime();
+      MetricsInfo[] gcInfo = getGcInfo(gcBean.getName());
+      rb.addCounter(gcInfo[0], c).addCounter(gcInfo[1], t);
+      count += c;
+      timeMillis += t;
+    }
+    rb.addCounter(GcCount, count)
+      .addCounter(GcTimeMillis, timeMillis);
+  }
+
+  private synchronized MetricsInfo[] getGcInfo(String gcName) {
+    MetricsInfo[] gcInfo = gcInfoCache.get(gcName);
+    if (gcInfo == null) {
+      gcInfo = new MetricsInfo[2];
+      gcInfo[0] = Interns.info("GcCount"+ gcName, "GC Count for "+ gcName);
+      gcInfo[1] = Interns.info("GcTimeMillis"+ gcName, "GC Time for "+ gcName);
+      gcInfoCache.put(gcName, gcInfo);
+    }
+    return gcInfo;
+  }
+
+  private void getThreadUsage(MetricsRecordBuilder rb) {
+    int threadsNew = 0;
+    int threadsRunnable = 0;
+    int threadsBlocked = 0;
+    int threadsWaiting = 0;
+    int threadsTimedWaiting = 0;
+    int threadsTerminated = 0;
+    long threadIds[] = threadMXBean.getAllThreadIds();
+    for (ThreadInfo threadInfo : threadMXBean.getThreadInfo(threadIds, 0)) {
+      if (threadInfo == null) continue; // race protection
+      switch (threadInfo.getThreadState()) {
+        case NEW:           threadsNew++;           break;
+        case RUNNABLE:      threadsRunnable++;      break;
+        case BLOCKED:       threadsBlocked++;       break;
+        case WAITING:       threadsWaiting++;       break;
+        case TIMED_WAITING: threadsTimedWaiting++;  break;
+        case TERMINATED:    threadsTerminated++;    break;
+      }
+    }
+    rb.addGauge(ThreadsNew, threadsNew)
+      .addGauge(ThreadsRunnable, threadsRunnable)
+      .addGauge(ThreadsBlocked, threadsBlocked)
+      .addGauge(ThreadsWaiting, threadsWaiting)
+      .addGauge(ThreadsTimedWaiting, threadsTimedWaiting)
+      .addGauge(ThreadsTerminated, threadsTerminated);
+  }
+
+  private void getEventCounters(MetricsRecordBuilder rb) {
+    rb.addCounter(LogFatal, EventCounter.getFatal())
+      .addCounter(LogError, EventCounter.getError())
+      .addCounter(LogWarn, EventCounter.getWarn())
+      .addCounter(LogInfo, EventCounter.getInfo());
+  }
 }

Added: hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java?rev=1101682&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java Tue May 10 23:56:54 2011
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.source;
+
+import com.google.common.base.Objects;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.metrics2.MetricsInfo;
+
+/**
+ * JVM and logging related metrics info instances
+ */
+@InterfaceAudience.Private
+public enum JvmMetricsInfo implements MetricsInfo {
+  JvmMetrics("JVM related metrics etc."), // record infoß
+  // metrics
+  MemNonHeapUsedM("Non-heap memory used in MB"),
+  MemNonHeapCommittedM("Non-heap memory committed in MB"),
+  MemHeapUsedM("Heap memory used in MB"),
+  MemHeapCommittedM("Heap memory committed in MB"),
+  GcCount("Total GC count"),
+  GcTimeMillis("Total GC time in milliseconds"),
+  ThreadsNew("Number of new threads"),
+  ThreadsRunnable("Number of runnable threads"),
+  ThreadsBlocked("Number of blocked threads"),
+  ThreadsWaiting("Number of waiting threads"),
+  ThreadsTimedWaiting("Number of timed waiting threads"),
+  ThreadsTerminated("Number of terminated threads"),
+  LogFatal("Total number of fatal log events"),
+  LogError("Total number of error log events"),
+  LogWarn("Total number of warning log events"),
+  LogInfo("Total number of info log events");
+
+  private final String desc;
+
+  JvmMetricsInfo(String desc) { this.desc = desc; }
+
+  @Override public String description() { return desc; }
+
+  @Override public String toString() {
+  return Objects.toStringHelper(this)
+      .add("name", name()).add("description", desc)
+      .toString();
+  }
+}

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1101682&r1=1101681&r2=1101682&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/security/UserGroupInformation.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/security/UserGroupInformation.java Tue May 10 23:56:54 2011
@@ -52,14 +52,11 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.metrics.MetricsContext;
-import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.hadoop.metrics.MetricsUtil;
-import org.apache.hadoop.metrics.Updater;
-import org.apache.hadoop.metrics.util.MetricsBase;
-import org.apache.hadoop.metrics.util.MetricsRegistry;
-import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.metrics2.annotation.Metric;
+import org.apache.hadoop.metrics2.annotation.Metrics;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.metrics2.lib.MutableRate;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.Shell;
@@ -87,34 +84,15 @@ public class UserGroupInformation {
    * UgiMetrics maintains UGI activity statistics
    * and publishes them through the metrics interfaces.
    */
-  static class UgiMetrics implements Updater {
-    final MetricsTimeVaryingRate loginSuccess;
-    final MetricsTimeVaryingRate loginFailure;
-    private final MetricsRecord metricsRecord;
-    private final MetricsRegistry registry;
-
-    UgiMetrics() {
-      registry = new MetricsRegistry();
-      loginSuccess = new MetricsTimeVaryingRate("loginSuccess", registry,
-          "Rate of successful kerberos logins and time taken in milliseconds");
-      loginFailure = new MetricsTimeVaryingRate("loginFailure", registry,
-          "Rate of failed kerberos logins and time taken in milliseconds");
-      final MetricsContext metricsContext = MetricsUtil.getContext("ugi");
-      metricsRecord = MetricsUtil.createRecord(metricsContext, "ugi");
-      metricsContext.registerUpdater(this);
-    }
-
-    /**
-     * Push the metrics to the monitoring subsystem on doUpdate() call.
-     */
-    @Override
-    public void doUpdates(final MetricsContext context) {
-      synchronized (this) {
-        for (MetricsBase m : registry.getMetricsList()) {
-          m.pushMetric(metricsRecord);
-        }
-      }
-      metricsRecord.update();
+  @Metrics(about="User and group related metrics", context="ugi")
+  static class UgiMetrics {
+    @Metric("Rate of successful kerberos logins and latency (milliseconds)")
+    MutableRate loginSuccess;
+    @Metric("Rate of failed kerberos logins and latency (milliseconds)")
+    MutableRate loginFailure;
+
+    static UgiMetrics create() {
+      return DefaultMetricsSystem.instance().register(new UgiMetrics());
     }
   }
   
@@ -180,7 +158,7 @@ public class UserGroupInformation {
   }
 
   /** Metrics to track UGI activity */
-  static UgiMetrics metrics = new UgiMetrics();
+  static UgiMetrics metrics = UgiMetrics.create();
   /** Are the static variables that depend on configuration initialized? */
   private static boolean isInitialized = false;
   /** Should we use Kerberos configuration? */
@@ -630,13 +608,13 @@ public class UserGroupInformation {
         new LoginContext(HadoopConfiguration.KEYTAB_KERBEROS_CONFIG_NAME, subject);
       start = System.currentTimeMillis();
       login.login();
-      metrics.loginSuccess.inc(System.currentTimeMillis() - start);
+      metrics.loginSuccess.add(System.currentTimeMillis() - start);
       loginUser = new UserGroupInformation(subject);
       loginUser.setLogin(login);
       loginUser.setAuthenticationMethod(AuthenticationMethod.KERBEROS);
     } catch (LoginException le) {
       if (start > 0) {
-        metrics.loginFailure.inc(System.currentTimeMillis() - start);
+        metrics.loginFailure.add(System.currentTimeMillis() - start);
       }
       throw new IOException("Login failure for " + user + " from keytab " + 
                             path, le);
@@ -694,12 +672,12 @@ public class UserGroupInformation {
         LOG.info("Initiating re-login for " + keytabPrincipal);
         start = System.currentTimeMillis();
         login.login();
-        metrics.loginSuccess.inc(System.currentTimeMillis() - start);
+        metrics.loginSuccess.add(System.currentTimeMillis() - start);
         setLogin(login);
       }
     } catch (LoginException le) {
       if (start > 0) {
-        metrics.loginFailure.inc(System.currentTimeMillis() - start);
+        metrics.loginFailure.add(System.currentTimeMillis() - start);
       }
       throw new IOException("Login failure for " + keytabPrincipal + 
           " from keytab " + keytabFile, le);
@@ -779,7 +757,7 @@ public class UserGroupInformation {
        
       start = System.currentTimeMillis();
       login.login();
-      metrics.loginSuccess.inc(System.currentTimeMillis() - start);
+      metrics.loginSuccess.add(System.currentTimeMillis() - start);
       UserGroupInformation newLoginUser = new UserGroupInformation(subject);
       newLoginUser.setLogin(login);
       newLoginUser.setAuthenticationMethod(AuthenticationMethod.KERBEROS);
@@ -787,7 +765,7 @@ public class UserGroupInformation {
       return newLoginUser;
     } catch (LoginException le) {
       if (start > 0) {
-        metrics.loginFailure.inc(System.currentTimeMillis() - start);
+        metrics.loginFailure.add(System.currentTimeMillis() - start);
       }
       throw new IOException("Login failure for " + user + " from keytab " + 
                             path, le);

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/util/VersionInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/util/VersionInfo.java?rev=1101682&r1=1101681&r2=1101682&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/util/VersionInfo.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/util/VersionInfo.java Tue May 10 23:56:54 2011
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.util;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.HadoopVersionAnnotation;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -29,6 +31,8 @@ import org.apache.hadoop.classification.
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class VersionInfo {
+  private static final Log LOG = LogFactory.getLog(VersionInfo.class);
+
   private static Package myPackage;
   private static HadoopVersionAnnotation version;
   
@@ -112,6 +116,7 @@ public class VersionInfo {
   }
   
   public static void main(String[] args) {
+    LOG.debug("version: "+ version);
     System.out.println("Hadoop " + getVersion());
     System.out.println("Subversion " + getUrl() + " -r " + getRevision());
     System.out.println("Compiled by " + getUser() + " on " + getDate());

Modified: hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java?rev=1101682&r1=1101681&r2=1101682&view=diff
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java (original)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java Tue May 10 23:56:54 2011
@@ -25,25 +25,22 @@ import java.lang.management.ManagementFa
 import java.lang.management.ThreadInfo;
 import java.lang.management.ThreadMXBean;
 import java.lang.reflect.Method;
+import java.util.Arrays;
 
 import junit.framework.TestCase;
 
-import java.util.Arrays;
-
 import org.apache.commons.logging.*;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.io.UTF8;
 import org.apache.hadoop.io.Writable;
-
-import org.apache.hadoop.metrics.spi.NullContext;
-import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.security.authorize.PolicyProvider;
 import org.apache.hadoop.security.authorize.Service;
 import org.apache.hadoop.security.AccessControlException;
+import static org.apache.hadoop.test.MetricsAsserts.*;
 
 import static org.mockito.Mockito.*;
 
@@ -288,22 +285,17 @@ public class TestRPC extends TestCase {
     assertEquals(stringResult, null);
     
     // Check rpcMetrics 
-    server.rpcMetrics.doUpdates(new NullContext());
-    
-    assertEquals(3, server.rpcMetrics.rpcProcessingTime.getPreviousIntervalNumOps());
-    assertTrue(server.rpcMetrics.sentBytes.getPreviousIntervalValue() > 0);
-    assertTrue(server.rpcMetrics.receivedBytes.getPreviousIntervalValue() > 0);
+    MetricsRecordBuilder rb = getMetrics(server.rpcMetrics.name());
+    assertCounter("RpcProcessingTimeNumOps", 3L, rb);
+    assertCounterGt("SentBytes", 0L, rb);
+    assertCounterGt("ReceivedBytes", 0L, rb);
     
     // Number of calls to echo method should be 2
-    server.rpcDetailedMetrics.doUpdates(new NullContext());
-    MetricsTimeVaryingRate metrics = 
-      (MetricsTimeVaryingRate)server.rpcDetailedMetrics.registry.get("echo");
-    assertEquals(2, metrics.getPreviousIntervalNumOps());
+    rb = getMetrics(server.rpcDetailedMetrics.name());
+    assertCounter("EchoNumOps", 2L, rb);
     
     // Number of calls to ping method should be 1
-    metrics = 
-      (MetricsTimeVaryingRate)server.rpcDetailedMetrics.registry.get("ping");
-    assertEquals(1, metrics.getPreviousIntervalNumOps());
+    assertCounter("PingNumOps", 1L, rb);
     
     String[] stringResults = proxy.echo(new String[]{"foo","bar"});
     assertTrue(Arrays.equals(stringResults, new String[]{"foo","bar"}));
@@ -426,23 +418,16 @@ public class TestRPC extends TestCase {
       if (proxy != null) {
         RPC.stopProxy(proxy);
       }
+      MetricsRecordBuilder rb = getMetrics(server.rpcMetrics.name());
       if (expectFailure) {
-        assertEquals("Wrong number of authorizationFailures ", 1,  
-            server.getRpcMetrics().authorizationFailures
-            .getCurrentIntervalValue());
+        assertCounter("RpcAuthorizationFailures", 1, rb);
       } else {
-        assertEquals("Wrong number of authorizationSuccesses ", 1, 
-            server.getRpcMetrics().authorizationSuccesses
-            .getCurrentIntervalValue());
+        assertCounter("RpcAuthorizationSuccesses", 1, rb);
       }
       //since we don't have authentication turned ON, we should see 
       // 0 for the authentication successes and 0 for failure
-      assertEquals("Wrong number of authenticationFailures ", 0, 
-          server.getRpcMetrics().authenticationFailures
-          .getCurrentIntervalValue());
-      assertEquals("Wrong number of authenticationSuccesses ", 0,
-          server.getRpcMetrics().authenticationSuccesses
-          .getCurrentIntervalValue());
+      assertCounter("RpcAuthenticationFailures", 0, rb);
+      assertCounter("RpcAuthenticationSuccesses", 0, rb);
     }
   }
   

Added: hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/source/TestJvmMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/source/TestJvmMetrics.java?rev=1101682&view=auto
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/source/TestJvmMetrics.java (added)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/source/TestJvmMetrics.java Tue May 10 23:56:54 2011
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.source;
+
+import org.junit.Test;
+import static org.mockito.Mockito.*;
+import static org.apache.hadoop.test.MetricsAsserts.*;
+
+import org.apache.hadoop.metrics2.MetricsCollector;
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.*;
+import static org.apache.hadoop.metrics2.impl.MsInfo.*;
+
+public class TestJvmMetrics {
+
+  @Test public void testPresence() {
+    MetricsRecordBuilder rb = getMetrics(new JvmMetrics("test", "test"));
+    MetricsCollector mc = rb.parent();
+
+    verify(mc).addRecord(JvmMetrics);
+    verify(rb).tag(ProcessName, "test");
+    verify(rb).tag(SessionId, "test");
+    for (JvmMetricsInfo info : JvmMetricsInfo.values()) {
+      if (info.name().startsWith("Mem"))
+        verify(rb).addGauge(eq(info), anyFloat());
+      else if (info.name().startsWith("Gc"))
+        verify(rb).addCounter(eq(info), anyLong());
+      else if (info.name().startsWith("Threads"))
+        verify(rb).addGauge(eq(info), anyInt());
+      else if (info.name().startsWith("Log"))
+        verify(rb).addCounter(eq(info), anyLong());
+    }
+  }
+}

Modified: hadoop/common/trunk/src/test/core/org/apache/hadoop/security/TestUserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/security/TestUserGroupInformation.java?rev=1101682&r1=1101681&r2=1101682&view=diff
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/security/TestUserGroupInformation.java (original)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/security/TestUserGroupInformation.java Tue May 10 23:56:54 2011
@@ -33,12 +33,14 @@ import javax.security.auth.login.AppConf
 import javax.security.auth.login.LoginContext;
 
 import junit.framework.Assert;
+import org.junit.Test;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
-import org.junit.Test;
+import static org.apache.hadoop.test.MetricsAsserts.*;
 
 public class TestUserGroupInformation {
   final private static String USER_NAME = "user1@HADOOP.APACHE.ORG";
@@ -335,18 +337,17 @@ public class TestUserGroupInformation {
     Assert.assertTrue(user1 == user2);
   }
   
-  public static void verifyLoginMetrics(int success, int failure)
+  public static void verifyLoginMetrics(long success, int failure)
       throws IOException {
     // Ensure metrics related to kerberos login is updated.
-    UserGroupInformation.UgiMetrics metrics = UserGroupInformation.metrics;
-    metrics.doUpdates(null);
+    MetricsRecordBuilder rb = getMetrics("UgiMetrics");
     if (success > 0) {
-      assertEquals(success, metrics.loginSuccess.getPreviousIntervalNumOps());
-      assertTrue(metrics.loginSuccess.getPreviousIntervalAverageTime() > 0);
+      assertCounter("LoginSuccessNumOps", success, rb);
+      assertGaugeGt("LoginSuccessAvgTime", 0, rb);
     }
     if (failure > 0) {
-      assertEquals(failure, metrics.loginFailure.getPreviousIntervalNumOps());
-      assertTrue(metrics.loginFailure.getPreviousIntervalAverageTime() > 0);
+      assertCounter("LoginFailureNumPos", failure, rb);
+      assertGaugeGt("LoginFailureAvgTime", 0, rb);
     }
   }
 

Modified: hadoop/common/trunk/src/test/findbugsExcludeFile.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/findbugsExcludeFile.xml?rev=1101682&r1=1101681&r2=1101682&view=diff
==============================================================================
--- hadoop/common/trunk/src/test/findbugsExcludeFile.xml (original)
+++ hadoop/common/trunk/src/test/findbugsExcludeFile.xml Tue May 10 23:56:54 2011
@@ -259,4 +259,15 @@
 	  The switch condition fall through is intentional and for performance
 	  purposes.
     -->
+
+    <Match>
+      <Class name="org.apache.hadoop.log.EventCounter"/>
+      <!-- backward compatibility -->
+      <Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS"/>
+    </Match>
+    <Match>
+      <Class name="org.apache.hadoop.metrics.jvm.EventCounter"/>
+      <!-- backward compatibility -->
+      <Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS"/>
+    </Match>
  </FindBugsFilter>