You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2011/03/04 05:42:39 UTC

svn commit: r1077672 - in /hadoop/common/branches/branch-0.20-security-patches: ./ conf/ src/core/org/apache/hadoop/ipc/metrics/ src/core/org/apache/hadoop/metrics2/ src/core/org/apache/hadoop/metrics2/filter/ src/core/org/apache/hadoop/metrics2/impl/ ...

Author: omalley
Date: Fri Mar  4 04:42:38 2011
New Revision: 1077672

URL: http://svn.apache.org/viewvc?rev=1077672&view=rev
Log:
commit 415d41a3b2c5339e50c724a866b4d9a0d269d4eb
Author: Luke Lu <ll...@yahoo-inc.com>
Date:   Fri Sep 10 10:58:41 2010 -0700

     Post Fred metrics rollup
    
    - Fix javadoc warnings from @Override in examples.
      in metrics2 package.html and an empty @return tag in MetricsRegistry.
    - Add more //@Override comments to methods in instrumentation
      classes that are potential candidates for abstract methods.
    - Rename sample* methods in some impl classes to snapshot* to
      be consistent with api (renamed from sample* in reviews.)
    - Renamed some metrics system internal metrics for consistency
    - Remove some apparently stale (zero size, result of merge?) source files
    - Change default retry count to 1 and randomize sleep interval.
    - Add more description/examples in hadoop-metrics2.properties.example
    - Add javadoc link to more metrics instrumentation examples.
    - Dedup registration of system MBeans.
    - Make sink threads daemon threads.
    - Introduce a minimum delay in retries.
    - Remove metrics INFO from task logs.
    - Add metrics system filter test.
    - Fix metrics.jar ant target.
    - Add an optional MetricsCache for dense sinks.

Added:
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/util/MetricsCache.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/metrics2/util/TestMetricsCache.java
Removed:
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeStatistics.java
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeStatisticsMBean.java
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeStatistics.java
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeStatisticsMBean.java
Modified:
    hadoop/common/branches/branch-0.20-security-patches/build.xml
    hadoop/common/branches/branch-0.20-security-patches/conf/hadoop-metrics2.properties.example
    hadoop/common/branches/branch-0.20-security-patches/conf/log4j.properties
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/ipc/metrics/RpcInstrumentation.java
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/MetricsTag.java
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/filter/AbstractPatternFilter.java
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/impl/MetricsConfig.java
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/lib/MetricsRegistry.java
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/package.html
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/UgiInstrumentation.java
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeInstrumentation.java
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMBean.java
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeInstrumentation.java
    hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskRunner.java
    hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskTracker.java
    hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskTrackerMetricsSource.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java

Modified: hadoop/common/branches/branch-0.20-security-patches/build.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/build.xml?rev=1077672&r1=1077671&r2=1077672&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/build.xml (original)
+++ hadoop/common/branches/branch-0.20-security-patches/build.xml Fri Mar  4 04:42:38 2011
@@ -698,15 +698,15 @@
   </target>
 
   <!-- ================================================================== -->
-  <!-- Make the Hadoop metrics jar. (for use outside Hadoop)              -->
+  <!-- Make the Hadoop metrics plugin dev/sdk jar. (for use outside Hadoop)              -->
   <!-- ================================================================== -->
   <!--                                                                    -->
   <!-- ================================================================== -->
-  <target name="metrics.jar" depends="compile-core" description="Make the Hadoop metrics jar. (for use outside Hadoop)">
-    <jar jarfile="${build.dir}/hadoop-metrics-${version}.jar"
+  <target name="metrics.jar" depends="compile-core" description="Make the Hadoop metrics plugin dev/sdk jar. (for use outside Hadoop)">
+    <jar jarfile="${build.dir}/hadoop-metrics-dev-${version}.jar"
          basedir="${build.classes}">
-      <include name="**/metrics/**" />
-      <exclude name="**/package.html" />
+      <include name="**/metrics2/*.class" />
+      <include name="**/metrics2/util/*.class" />
     </jar>
   </target>
 

Modified: hadoop/common/branches/branch-0.20-security-patches/conf/hadoop-metrics2.properties.example
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/conf/hadoop-metrics2.properties.example?rev=1077672&r1=1077671&r2=1077672&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/conf/hadoop-metrics2.properties.example (original)
+++ hadoop/common/branches/branch-0.20-security-patches/conf/hadoop-metrics2.properties.example Fri Mar  4 04:42:38 2011
@@ -1,3 +1,16 @@
-#[prefix].[source|sink|jmx].[instance].[options]
-#namenode.sink.file0.class=org.apache.hadoop.metrics2.sink.FileSink
-#namenode.sink.file0.filename=nn.out
+# syntax: [prefix].[source|sink|jmx].[instance].[options]
+# See package.html for org.apache.hadoop.metrics2 for details
+
+*.sink.file.class=org.apache.hadoop.metrics2.sink.FileSink
+
+#namenode.sink.file.filename=namenode-metrics.out
+
+#datanode.sink.file.filename=datanode-metrics.out
+
+#jobtracker.sink.file.filename=jobtracker-metrics.out
+
+#tasktracker.sink.file.filename=tasktracker-metrics.out
+
+#maptask.sink.file.filename=maptask-metrics.out
+
+#reducetask.sink.file.filename=reducetask-metrics.out

Modified: hadoop/common/branches/branch-0.20-security-patches/conf/log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/conf/log4j.properties?rev=1077672&r1=1077671&r2=1077672&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/conf/log4j.properties (original)
+++ hadoop/common/branches/branch-0.20-security-patches/conf/log4j.properties Fri Mar  4 04:42:38 2011
@@ -104,9 +104,11 @@ log4j.logger.org.apache.hadoop.fs.FSName
 
 # Custom Logging levels
 
+hadoop.metrics.log.level=INFO
 #log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
 #log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
 #log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
+log4j.logger.org.apache.hadoop.metrics2=${hadoop.metrics.log.level}
 
 # Jets3t library
 log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR

Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/ipc/metrics/RpcInstrumentation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/ipc/metrics/RpcInstrumentation.java?rev=1077672&r1=1077671&r2=1077672&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/ipc/metrics/RpcInstrumentation.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/ipc/metrics/RpcInstrumentation.java Fri Mar  4 04:42:38 2011
@@ -111,11 +111,13 @@ public class RpcInstrumentation implemen
 
   // Start of public instrumentation methods that could be extracted to an
   // abstract class if we decide to allow custom instrumentation classes a la
-  // JobTrackerInstrumenation.
+  // JobTrackerInstrumenation. The methods with //@Override comment are
+  // candidates for abstract methods in a abstract instrumentation class
 
   /**
    * One authentication failure event
    */
+  //@Override
   public void incrAuthenticationFailures() {
     this.authenticationFailures.incr();
   }
@@ -123,6 +125,7 @@ public class RpcInstrumentation implemen
   /**
    * One authentication success event
    */
+  //@Override
   public void incrAuthenticationSuccesses() {
     this.authenticationSuccesses.incr();
   }
@@ -130,6 +133,7 @@ public class RpcInstrumentation implemen
   /**
    * One authorization success event
    */
+  //@Override
   public void incrAuthorizationSuccesses() {
     this.authorizationSuccesses.incr();
   }
@@ -137,6 +141,7 @@ public class RpcInstrumentation implemen
   /**
    * One authorization failure event
    */
+  //@Override
   public void incrAuthorizationFailures() {
     this.authorizationFailures.incr();
   }
@@ -144,6 +149,7 @@ public class RpcInstrumentation implemen
   /**
    * Shutdown the instrumentation for the process
    */
+  //@Override
   public void shutdown() {
     LOG.info("shut down");
   }
@@ -152,6 +158,7 @@ public class RpcInstrumentation implemen
    * Increment sent bytes by count
    * @param count to increment
    */
+  //@Override
   public void incrSentBytes(int count) {
     this.sentBytes.incr(count);
   }
@@ -160,6 +167,7 @@ public class RpcInstrumentation implemen
    * Increment received bytes by count
    * @param count to increment
    */
+  //@Override
   public void incrReceivedBytes(int count) {
     this.receivedBytes.incr(count);
   }
@@ -168,6 +176,7 @@ public class RpcInstrumentation implemen
    * Add an RPC queue time sample
    * @param qTime
    */
+  //@Override
   public void addRpcQueueTime(int qTime) {
     this.rpcQueueTime.add(qTime);
   }
@@ -176,6 +185,7 @@ public class RpcInstrumentation implemen
    * Add an RPC processing time sample
    * @param processingTime
    */
+  //@Override
   public void addRpcProcessingTime(int processingTime) {
     this.rpcProcessingTime.add(processingTime);
   }
@@ -185,6 +195,7 @@ public class RpcInstrumentation implemen
    * @param methodName  method name of the RPC
    * @param processingTime  elapsed processing time of the RPC
    */
+  //@Override
   public void addRpcProcessingTime(String methodName, int processingTime) {
     detailed.addRpcProcessingTime(methodName, processingTime);
   }

Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/MetricsTag.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/MetricsTag.java?rev=1077672&r1=1077671&r2=1077672&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/MetricsTag.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/MetricsTag.java Fri Mar  4 04:42:38 2011
@@ -63,7 +63,6 @@ public class MetricsTag {
     return value;
   }
 
-  // Mostly for testing
   @Override
   public boolean equals(Object obj) {
     if (obj == null) {

Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/filter/AbstractPatternFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/filter/AbstractPatternFilter.java?rev=1077672&r1=1077671&r2=1077672&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/filter/AbstractPatternFilter.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/filter/AbstractPatternFilter.java Fri Mar  4 04:42:38 2011
@@ -22,10 +22,13 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
+
 import org.apache.commons.configuration.SubsetConfiguration;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
 import org.apache.hadoop.metrics2.MetricsException;
 import org.apache.hadoop.metrics2.MetricsFilter;
-import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsTag;
 
 /**
@@ -33,6 +36,8 @@ import org.apache.hadoop.metrics2.Metric
  */
 abstract class AbstractPatternFilter extends MetricsFilter {
 
+  private static final Log LOG = LogFactory.getLog(AbstractPatternFilter.class);
+
   protected static final String INCLUDE_KEY = "include";
   protected static final String EXCLUDE_KEY = "exclude";
   protected static final String INCLUDE_TAGS_KEY = "include.tags";
@@ -53,14 +58,17 @@ abstract class AbstractPatternFilter ext
   public void init(SubsetConfiguration conf) {
     String patternString = conf.getString(INCLUDE_KEY);
     if (patternString != null && !patternString.isEmpty()) {
+      LOG.debug("got include pattern: "+ patternString);
       setIncludePattern(compile(patternString));
     }
     patternString = conf.getString(EXCLUDE_KEY);
     if (patternString != null && !patternString.isEmpty()) {
+      LOG.debug("got include pattern: "+ patternString);
       setExcludePattern(compile(patternString));
     }
     String[] patternStrings = conf.getStringArray(INCLUDE_TAGS_KEY);
     if (patternStrings != null && patternStrings.length != 0) {
+      LOG.debug("got include tags pattern: "+ patternStrings);
       for (String pstr : patternStrings) {
         Matcher matcher = tagPattern.matcher(pstr);
         if (!matcher.matches()) {
@@ -71,6 +79,7 @@ abstract class AbstractPatternFilter ext
     }
     patternStrings = conf.getStringArray(EXCLUDE_TAGS_KEY);
     if (patternStrings != null && patternStrings.length != 0) {
+      LOG.debug("got exclude tags pattern: "+ patternStrings);
       for (String pstr : patternStrings) {
         Matcher matcher = tagPattern.matcher(pstr);
         if (!matcher.matches()) {

Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/impl/MetricsConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/impl/MetricsConfig.java?rev=1077672&r1=1077671&r2=1077672&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/impl/MetricsConfig.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/impl/MetricsConfig.java Fri Mar  4 04:42:38 2011
@@ -33,6 +33,7 @@ import org.apache.commons.configuration.
 import org.apache.commons.configuration.SubsetConfiguration;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.metrics2.MetricsFilter;
 import org.apache.hadoop.metrics2.MetricsPlugin;
 
 import org.apache.hadoop.util.StringUtils;
@@ -55,7 +56,7 @@ class MetricsConfig extends SubsetConfig
   static final String RETRY_BACKOFF_KEY = "retry.backoff";
   static final int RETRY_BACKOFF_DEFAULT = 2; // back off factor
   static final String RETRY_COUNT_KEY = "retry.count";
-  static final int RETRY_COUNT_DEFAULT = 3;
+  static final int RETRY_COUNT_DEFAULT = 1;
 
   static final String JMX_CACHE_TTL_KEY = "jmx.cache.ttl";
   static final int JMX_CACHE_TTL_DEFAULT = 10000; // millis
@@ -93,7 +94,8 @@ class MetricsConfig extends SubsetConfig
   static MetricsConfig loadFirst(String prefix, String... fileNames) {
     for (String fname : fileNames) {
       try {
-        PropertiesConfiguration cf = new PropertiesConfiguration(fname);
+        Configuration cf = new PropertiesConfiguration(fname)
+            .interpolatedConfiguration();
         LOG.info("loaded properties from "+ fname);
         return new MetricsConfig(cf, prefix);
       }
@@ -182,6 +184,12 @@ class MetricsConfig extends SubsetConfig
     }
   }
 
+  MetricsFilter getFilter(String prefix) {
+    // don't create filter instances without out options
+    if (subset(prefix).isEmpty()) return null;
+    return (MetricsFilter) getPlugin(prefix);
+  }
+
   @Override
   public String toString() {
     return toString(this);

Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java?rev=1077672&r1=1077671&r2=1077672&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java Fri Mar  4 04:42:38 2011
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.metrics2.impl;
 
+import java.util.Random;
 import org.apache.hadoop.metrics2.lib.MetricMutableGaugeInt;
 import org.apache.hadoop.metrics2.lib.MetricsRegistry;
 import org.apache.hadoop.metrics2.lib.MetricMutableCounterInt;
@@ -77,10 +78,11 @@ class MetricsSinkAdapter {
     this.retryCount = retryCount;
     this.queue = new SinkQueue<MetricsBuffer>(
         Contracts.checkArg(queueCapacity, queueCapacity > 0, "queue capacity"));
-    latency = registry.newStat(name +"_latency", "End to end latency",
-                               "ops", "time");
-    dropped = registry.newCounter(name +"_dropped", "Dropped updates", 0);
-    qsize = registry.newGauge(name + "_qsize", "Queue size", 0);
+    latency = registry.newStat("sink."+ name +".latency",
+                               "Sink end to end latency", "ops", "time");
+    dropped = registry.newCounter("sink."+ name +".dropped",
+                                  "Dropped updates per sink", 0);
+    qsize = registry.newGauge("sink."+ name + ".qsize", "Queue size", 0);
 
     sinkThread = new Thread() {
       @Override public void run() {
@@ -88,6 +90,7 @@ class MetricsSinkAdapter {
       }
     };
     sinkThread.setName(name);
+    sinkThread.setDaemon(true);
   }
 
   boolean putMetrics(MetricsBuffer buffer, long logicalTime) {
@@ -103,6 +106,8 @@ class MetricsSinkAdapter {
   void publishMetricsFromQueue() {
     int retryDelay = firstRetryDelay;
     int n = retryCount;
+    int minDelay = Math.min(500, retryDelay * 1000); // millis
+    Random rng = new Random(System.nanoTime());
     while (!stopping) {
       try {
         queue.consumeAll(consumer);
@@ -115,11 +120,12 @@ class MetricsSinkAdapter {
       }
       catch (Exception e) {
         if (n > 0) {
+          int awhile = rng.nextInt(retryDelay * 1000 - minDelay) + minDelay;
           if (!inError) {
-            LOG.error("Got sink exception, retry in "+ retryDelay +"s", e);
+            LOG.error("Got sink exception, retry in "+ awhile +"ms", e);
           }
           retryDelay *= retryBackoff;
-          try { Thread.sleep(retryDelay * 1000); }
+          try { Thread.sleep(awhile); }
           catch (InterruptedException e2) {
             LOG.info(name +" thread interrupted while waiting for retry", e2);
           }
@@ -127,7 +133,8 @@ class MetricsSinkAdapter {
         }
         else {
           if (!inError) {
-            LOG.error("Got sink exception and over retry limit!", e);
+            LOG.error("Got sink exception and over retry limit, "+
+                      "suppressing further error messages", e);
           }
           queue.clear();
           inError = true; // Don't keep complaining ad infinitum
@@ -139,6 +146,7 @@ class MetricsSinkAdapter {
   void publishMetrics(MetricsBuffer buffer) {
     long ts = 0;
     for (MetricsBuffer.Entry entry : buffer) {
+      LOG.debug("sourceFilter="+ sourceFilter);
       if (sourceFilter == null || sourceFilter.accepts(entry.name())) {
         for (MetricsRecordImpl record : entry.records()) {
           if ((context == null || context.equals(record.context())) &&
@@ -186,7 +194,7 @@ class MetricsSinkAdapter {
     return description;
   }
 
-  void sample(MetricsRecordBuilder rb, boolean all) {
+  void snapshot(MetricsRecordBuilder rb, boolean all) {
     registry.snapshot(rb, all);
   }
 

Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java?rev=1077672&r1=1077671&r2=1077672&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java Fri Mar  4 04:42:38 2011
@@ -78,8 +78,8 @@ class MetricsSourceAdapter implements Dy
                        MetricsSource source, Iterable<MetricsTag> injectedTags,
                        int period, MetricsConfig conf) {
     this(prefix, name, description, source, injectedTags,
-        (MetricsFilter) conf.getPlugin(RECORD_FILTER_KEY),
-        (MetricsFilter) conf.getPlugin(METRIC_FILTER_KEY), period);
+        conf.getFilter(RECORD_FILTER_KEY),
+        conf.getFilter(METRIC_FILTER_KEY), period);
   }
 
   void start() {
@@ -87,7 +87,8 @@ class MetricsSourceAdapter implements Dy
       LOG.warn("MBean Source "+ name +" already initialized!");
     }
     mbeanName = MBeans.register(prefix, name, this);
-    LOG.debug("MBean for source "+ name +" registered.", new Throwable());
+    LOG.info("MBean for source "+ name +" registered.");
+    LOG.debug("Stacktrace: "+ new Throwable());
   }
 
   @Override

Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java?rev=1077672&r1=1077671&r2=1077672&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java Fri Mar  4 04:42:38 2011
@@ -72,8 +72,8 @@ public class MetricsSystemImpl implement
   private final Map<String, MetricsSinkAdapter> sinks;
   private final List<Callback> callbacks;
   private final MetricsBuilderImpl metricsBuilder;
-  private final MetricMutableStat sampleStat =
-      new MetricMutableStat("sample", "sampling stats", "ops", "time", true);
+  private final MetricMutableStat snapshotStat =
+      new MetricMutableStat("snapshot", "snapshot stats", "ops", "time", true);
   private final MetricMutableStat publishStat =
       new MetricMutableStat("publish", "publishing stats", "ops", "time", true);
   private final MetricMutableCounterLong dropStat =
@@ -175,8 +175,8 @@ public class MetricsSystemImpl implement
   }
 
   @Override
-  public synchronized <T extends MetricsSource> T register(final String name,
-      final String desc, final T source) {
+  public synchronized <T extends MetricsSource>
+  T register(final String name, final String desc, final T source) {
     if (monitoring) {
       registerSource(name, desc, source);
     }
@@ -189,7 +189,6 @@ public class MetricsSystemImpl implement
       }
 
     });
-    LOG.debug("Registered source "+ name);
     return source;
   }
 
@@ -209,11 +208,12 @@ public class MetricsSystemImpl implement
           injectedTags, period, config.subset(SOURCE_KEY));
     sources.put(name, sa);
     sa.start();
+    LOG.debug("Registered source "+ name);
   }
 
   @Override
-  public synchronized <T extends MetricsSink> T register(final String name,
-      final String description, final T sink) {
+  public synchronized <T extends MetricsSink>
+  T register(final String name, final String description, final T sink) {
     if (config != null) {
       registerSink(name, description, sink);
     }
@@ -226,7 +226,6 @@ public class MetricsSystemImpl implement
       }
 
     });
-    LOG.debug("Registered sink "+ name);
     return sink;
   }
 
@@ -243,6 +242,7 @@ public class MetricsSystemImpl implement
         : newSink(name, desc, sink, config.subset(SINK_KEY));
     sinks.put(name, sa);
     sa.start();
+    LOG.debug("Registered sink "+ name);
   }
 
   @Override
@@ -265,8 +265,8 @@ public class MetricsSystemImpl implement
 
   @Override
   public synchronized void refreshMBeans() {
-    for (Entry<String, MetricsSourceAdapter> entry : sources.entrySet()) {
-      entry.getValue().refreshMBean();
+    for (MetricsSourceAdapter sa : sources.values()) {
+      sa.refreshMBean();
     }
   }
 
@@ -300,43 +300,43 @@ public class MetricsSystemImpl implement
             }
           }
         }, millis, millis);
-    LOG.info("Scheduled sampling period at "+ period +" second(s).");
+    LOG.info("Scheduled snapshot period at "+ period +" second(s).");
   }
 
   synchronized void onTimerEvent() {
     logicalTime += period;
     if (sinks.size() > 0) {
-      publishMetrics(sampleMetrics());
+      publishMetrics(snapshotMetrics());
     }
   }
 
   /**
-   * Sample all the sources for a snapshot of metrics/tags
+   * snapshot all the sources for a snapshot of metrics/tags
    * @return  the metrics buffer containing the snapshot
    */
-  synchronized MetricsBuffer sampleMetrics() {
+  synchronized MetricsBuffer snapshotMetrics() {
     metricsBuilder.clear();
     MetricsBufferBuilder bufferBuilder = new MetricsBufferBuilder();
 
     for (Entry<String, MetricsSourceAdapter> entry : sources.entrySet()) {
       if (sourceFilter == null || sourceFilter.accepts(entry.getKey())) {
-        sampleMetrics(entry.getValue(), bufferBuilder);
+        snapshotMetrics(entry.getValue(), bufferBuilder);
       }
     }
     if (publishSelfMetrics) {
-      sampleMetrics(sysSource, bufferBuilder);
+      snapshotMetrics(sysSource, bufferBuilder);
     }
     MetricsBuffer buffer = bufferBuilder.get();
     return buffer;
   }
 
-  private void sampleMetrics(MetricsSourceAdapter sa,
-                             MetricsBufferBuilder bufferBuilder) {
+  private void snapshotMetrics(MetricsSourceAdapter sa,
+                               MetricsBufferBuilder bufferBuilder) {
     long startTime = System.currentTimeMillis();
     bufferBuilder.add(sa.name(), sa.getMetrics(metricsBuilder, false));
     metricsBuilder.clear();
-    sampleStat.add(System.currentTimeMillis() - startTime);
-    LOG.debug("Sampled source "+ sa.name());
+    snapshotStat.add(System.currentTimeMillis() - startTime);
+    LOG.debug("Snapshotted source "+ sa.name());
   }
 
   /**
@@ -345,9 +345,9 @@ public class MetricsSystemImpl implement
    */
   synchronized void publishMetrics(MetricsBuffer buffer) {
     int dropped = 0;
-    for (Entry<String, MetricsSinkAdapter> entry : sinks.entrySet()) {
+    for (MetricsSinkAdapter sa : sinks.values()) {
       long startTime = System.currentTimeMillis();
-      dropped += entry.getValue().putMetrics(buffer, logicalTime) ? 0 : 1;
+      dropped += sa.putMetrics(buffer, logicalTime) ? 0 : 1;
       publishStat.add(System.currentTimeMillis() - startTime);
     }
     dropStat.incr(dropped);
@@ -369,6 +369,7 @@ public class MetricsSystemImpl implement
                 sa.source().getClass().getName() +")");
       sa.stop();
     }
+    sysSource.stop();
     sources.clear();
   }
 
@@ -395,26 +396,23 @@ public class MetricsSystemImpl implement
   }
 
   private synchronized void configureSinks() {
-    Map<String, MetricsConfig> confs = config.getInstanceConfigs(SINK_KEY);
+    sinkConfigs = config.getInstanceConfigs(SINK_KEY);
     int confPeriod = 0;
-    for (Entry<String, MetricsConfig> entry : confs.entrySet()) {
+    for (Entry<String, MetricsConfig> entry : sinkConfigs.entrySet()) {
       MetricsConfig conf = entry.getValue();
       int sinkPeriod = conf.getInt(PERIOD_KEY, PERIOD_DEFAULT);
       confPeriod = confPeriod == 0 ? sinkPeriod
                                    : MathUtils.gcd(confPeriod, sinkPeriod);
-      String sinkName = conf.getString(NAME_KEY);
-      if (sinkName != null && !sinkName.isEmpty()) {
-        // named config is for internally registered sinks
-        sinkConfigs.put(sinkName, conf);
-      }
-      else {
-        sinkName = "sink"+ entry.getKey();
-      }
+      String sinkName = entry.getKey();
+      LOG.debug("sink "+ sinkName +" config:\n"+ conf);
       try {
         MetricsSinkAdapter sa = newSink(sinkName,
             conf.getString(DESC_KEY, sinkName), conf);
-        sa.start();
-        sinks.put(sinkName, sa);
+        // we allow config of later registered sinks
+        if (sa != null) {
+          sa.start();
+          sinks.put(sinkName, sa);
+        }
       }
       catch (Exception e) {
         LOG.warn("Error creating "+ sinkName, e);
@@ -427,9 +425,9 @@ public class MetricsSystemImpl implement
   static MetricsSinkAdapter newSink(String name, String desc, MetricsSink sink,
                                     MetricsConfig conf) {
     return new MetricsSinkAdapter(name, desc, sink, conf.getString(CONTEXT_KEY),
-        (MetricsFilter) conf.getPlugin(SOURCE_FILTER_KEY),
-        (MetricsFilter) conf.getPlugin(RECORD_FILTER_KEY),
-        (MetricsFilter) conf.getPlugin(METRIC_FILTER_KEY),
+        conf.getFilter(SOURCE_FILTER_KEY),
+        conf.getFilter(RECORD_FILTER_KEY),
+        conf.getFilter(METRIC_FILTER_KEY),
         conf.getInt(PERIOD_KEY, PERIOD_DEFAULT),
         conf.getInt(QUEUE_CAPACITY_KEY, QUEUE_CAPACITY_DEFAULT),
         conf.getInt(RETRY_DELAY_KEY, RETRY_DELAY_DEFAULT),
@@ -439,12 +437,13 @@ public class MetricsSystemImpl implement
 
   static MetricsSinkAdapter newSink(String name, String desc,
                                     MetricsConfig conf) {
-    return newSink(name, desc, (MetricsSink) conf.getPlugin(""), conf);
+    MetricsSink sink = conf.getPlugin("");
+    if (sink == null) return null;
+    return newSink(name, desc, sink, conf);
   }
 
   private void configureSources() {
-    sourceFilter =
-        (MetricsFilter) config.getPlugin(PREFIX_DEFAULT + SOURCE_FILTER_KEY);
+    sourceFilter = config.getFilter(PREFIX_DEFAULT + SOURCE_FILTER_KEY);
     Map<String, MetricsConfig> confs = config.getInstanceConfigs(SOURCE_KEY);
     for (Entry<String, MetricsConfig> entry : confs.entrySet()) {
      sourceConfigs.put(entry.getKey(), entry.getValue());
@@ -484,11 +483,11 @@ public class MetricsSystemImpl implement
             .addGauge(NUM_SOURCES_KEY, NUM_SOURCES_DESC, numSources)
             .addGauge(NUM_SINKS_KEY, NUM_SINKS_DESC, numSinks);
         synchronized(MetricsSystemImpl.this) {
-          for (Entry<String, MetricsSinkAdapter> entry : sinks.entrySet()) {
-            entry.getValue().sample(rb, all);
+          for (MetricsSinkAdapter sa : sinks.values()) {
+            sa.snapshot(rb, all);
           }
         }
-        sampleStat.snapshot(rb, all);
+        snapshotStat.snapshot(rb, all);
         publishStat.snapshot(rb, all);
         dropStat.snapshot(rb, all);
       }

Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/lib/MetricsRegistry.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/lib/MetricsRegistry.java?rev=1077672&r1=1077671&r2=1077672&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/lib/MetricsRegistry.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/lib/MetricsRegistry.java Fri Mar  4 04:42:38 2011
@@ -287,7 +287,7 @@ public class MetricsRegistry {
   /**
    * Set the metrics context tag
    * @param name of the context
-   * @return
+   * @return the registry itself as a convenience
    */
   public MetricsRegistry setContext(String name) {
     return tag(CONTEXT_KEY, CONTEXT_DESC, name);

Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/package.html
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/package.html?rev=1077672&r1=1077671&r2=1077672&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/package.html (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/package.html Fri Mar  4 04:42:38 2011
@@ -165,7 +165,9 @@
       identify a particular sink instance. The asterisk (<code>*</code>) can be
       used to specify default options.
     </p>
-    <p>Consult the metrics instrumentation in jvm, rpc, hdfs and mapred, etc.
+    <p>Consult the metrics instrumentation in
+      {@link org.apache.hadoop.metrics2.source.JvmMetricsSource},
+      {@link org.apache.hadoop.ipc.metrics.RpcInstrumentation}, etc.
       for more examples.
     </p>
 
@@ -184,7 +186,7 @@
       accept metrics from context <code>foo</code> only.
     </p>
     <pre>
-    *.source.filter.class=org.apache.hadoop.metrics.filter.GlobFilter
+    *.source.filter.class=org.apache.hadoop.metrics2.filter.GlobFilter
     test.*.source.filter.include=foo
     test.*.source.filter.exclude=bar</pre>
     <p>In this example, we specify a source filter that includes source
@@ -217,9 +219,9 @@
       final MetricMutaleStat stat0 =
           registry.newStat("myStat", "my stat description", "ops", "time");
 
-      @Override public void setGauge0(int value) { gauge0.set(value); }
-      @Override public void incrCounter0() { counter0.incr(); }
-      @Override public void addStat0(long elapsed) { stat0.add(elapsed); }
+      &#064;Override public void setGauge0(int value) { gauge0.set(value); }
+      &#064;Override public void incrCounter0() { counter0.incr(); }
+      &#064;Override public void addStat0(long elapsed) { stat0.add(elapsed); }
 
       public void getMetrics(MetricsBuilder builder, boolean all) {
         registry.snapshot(builder.addRecord(registry.name()), all);

Added: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/util/MetricsCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/util/MetricsCache.java?rev=1077672&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/util/MetricsCache.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/metrics2/util/MetricsCache.java Fri Mar  4 04:42:38 2011
@@ -0,0 +1,123 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.util;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.metrics2.Metric;
+import org.apache.hadoop.metrics2.MetricsRecord;
+import org.apache.hadoop.metrics2.MetricsTag;
+
+/**
+ * A metrics cache for sinks that don't support sparse updates.
+ */
+public class MetricsCache {
+
+  private static final long serialVersionUID = 1L;
+  private final Map<String, RecMap> map = new HashMap<String, RecMap>();
+
+  static class RecMap extends HashMap<Collection<MetricsTag>, Record> {
+    private static final long serialVersionUID = 1L;
+  }
+
+  /**
+   * Cached record
+   */
+  public static class Record {
+    final Map<String, String> tags = new LinkedHashMap<String, String>();
+    final Map<String, Number> metrics = new LinkedHashMap<String, Number>();
+
+    /**
+     * Get the tag value
+     * @param key name of the tag
+     * @return the tag value
+     */
+    public String getTag(String key) {
+      return tags.get(key);
+    }
+
+    /**
+     * Get the metric value
+     * @param key name of the metric
+     * @return the metric value
+     */
+    public Number getMetric(String key) {
+      return metrics.get(key);
+    }
+
+    /**
+     * @return entry set of metrics
+     */
+    public Set<Map.Entry<String, Number>> metrics() {
+      return metrics.entrySet();
+    }
+  }
+
+  /**
+   * Update the cache and return the cached record
+   * @param mr the update record
+   * @param includingTags cache tag values (for later lookup by name) if true
+   * @return the updated cached record
+   */
+  public Record update(MetricsRecord mr, boolean includingTags) {
+    String name = mr.name();
+    RecMap recMap = map.get(name);
+    if (recMap == null) {
+      recMap = new RecMap();
+      map.put(name, recMap);
+    }
+    Collection<MetricsTag> tags = (Collection<MetricsTag>)mr.tags();
+    Record rec = recMap.get(tags);
+    if (rec == null) {
+      rec = new Record();
+      recMap.put(tags, rec);
+    }
+    for (Metric m : mr.metrics()) {
+      rec.metrics.put(m.name(), m.value());
+    }
+    if (includingTags) {
+      // mostly for some sinks that include tags as part of a dense schema
+      for (MetricsTag t : mr.tags()) {
+        rec.tags.put(t.name(), t.value());
+      }
+    }
+    return rec;
+  }
+
+  public Record update(MetricsRecord mr) {
+    return update(mr, false);
+  }
+
+  /**
+   * Get the cached record
+   * @param name of the record
+   * @param tags of the record
+   * @return the cached record or null
+   */
+  public Record get(String name, Collection<MetricsTag> tags) {
+    RecMap tmap = map.get(name);
+    if (tmap == null) return null;
+    return tmap.get(tags);
+  }
+
+}

Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/UgiInstrumentation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/UgiInstrumentation.java?rev=1077672&r1=1077671&r2=1077672&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/UgiInstrumentation.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/UgiInstrumentation.java Fri Mar  4 04:42:38 2011
@@ -28,7 +28,7 @@ import org.apache.hadoop.metrics2.lib.Me
 
 class UgiInstrumentation implements MetricsSource {
 
-  final MetricsRegistry registry = new MetricsRegistry("ugi");
+  final MetricsRegistry registry = new MetricsRegistry("ugi").setContext("ugi");
   final MetricMutableStat loginSuccess = registry.newStat("loginSuccess");
   final MetricMutableStat loginFailure = registry.newStat("loginFailure");
 
@@ -37,10 +37,12 @@ class UgiInstrumentation implements Metr
     registry.snapshot(builder.addRecord(registry.name()), all);
   }
 
+  //@Override
   void addLoginSuccess(long elapsed) {
     loginSuccess.add(elapsed);
   }
 
+  //@Override
   void addLoginFailure(long elapsed) {
     loginFailure.add(elapsed);
   }

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeInstrumentation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeInstrumentation.java?rev=1077672&r1=1077671&r2=1077672&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeInstrumentation.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeInstrumentation.java Fri Mar  4 04:42:38 2011
@@ -74,10 +74,12 @@ public class DataNodeInstrumentation imp
     registry.setContext("dfs").tag("sessionId", "", sessionId);
   }
 
+  //@Override
   public void shutdown() {
     // metrics system shutdown would suffice
   }
 
+  //@Override
   public void resetAllMinMax() {
     readBlockOp.resetMinMax();
     writeBlockOp.resetMinMax();
@@ -88,82 +90,102 @@ public class DataNodeInstrumentation imp
     blockReports.resetMinMax();
   }
 
+  //@Override
   public void addHeartBeat(long latency) {
     heartbeats.add(latency);
   }
 
+  //@Override
   public void addBlockReport(long latency) {
     blockReports.add(latency);
   }
 
+  //@Override
   public void incrBlocksReplicated(int delta) {
     blocksReplicated.incr(delta);
   }
 
+  //@Override
   public void incrBlocksWritten() {
     blocksWritten.incr();
   }
 
+  //@Override
   public void incrBlocksRemoved(int delta) {
     blocksRemoved.incr(delta);
   }
 
+  //@Override
   public void incrBytesWritten(int delta) {
     bytesWritten.incr(delta);
   }
 
+  //@Override
   public void incrBlockVerificationFailures() {
     blockVerificationFailures.incr();
   }
 
+  //@Override
   public void incrBlocksVerified() {
     blocksVerified.incr();
   }
 
+  //@Override
   public void addReadBlockOp(long latency) {
     readBlockOp.add(latency);
   }
 
+  //@Override
   public void incrReadsFromLocalClient() {
     readsFromLocalClient.incr();
   }
 
+  //@Override
   public void incrReadsFromRemoteClient() {
     readsFromRemoteClient.incr();
   }
 
+  //@Override
   public void addWriteBlockOp(long latency) {
     writeBlockOp.add(latency);
   }
 
+  //@Override
   public void incrWritesFromLocalClient() {
     writesFromLocalClient.incr();
   }
 
+  //@Override
   public void incrWritesFromRemoteClient() {
     writesFromRemoteClient.incr();
   }
 
+  //@Override
   public void addReplaceBlockOp(long latency) {
     replaceBlockOp.add(latency);
   }
 
+  //@Override
   public void addCopyBlockOp(long latency) {
     copyBlockOp.add(latency);
   }
 
+  //@Override
   public void addBlockChecksumOp(long latency) {
     blockChecksumOp.add(latency);
   }
 
+  //@Override
   public void incrBytesRead(int delta) {
     bytesRead.incr(delta);
   }
 
+  //@Override
   public void incrBlocksRead() {
     blocksRead.incr();
   }
 
+  @Override
   public void getMetrics(MetricsBuilder builder, boolean all) {
     registry.snapshot(builder.addRecord(registry.name()), all);
   }

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMBean.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMBean.java?rev=1077672&r1=1077671&r2=1077672&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMBean.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMBean.java Fri Mar  4 04:42:38 2011
@@ -24,13 +24,14 @@ package org.apache.hadoop.hdfs.server.na
  * It is also used for publishing via JMX (hence we follow the JMX naming
  * convention.)
  * 
- * Note we have not used the MetricsDynamicMBeanBase to implement this
- * because the interface for the NameNodeStateMBean is stable and should
+ * Note we have not used the metrics system to implement this MBean,
+ * because the interface for the FSNamesystemMBean is stable and should
  * be published as an interface.
  * 
  * <p>
- * Name Node runtime activity statistic  info is report in another MBean
- * @see org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeActivityMBean
+ * Name Node runtime activity statistics is report in another MBean via the
+ * metrics system.
+ * @see org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeInstrumentation
  *
  */
 public interface FSNamesystemMBean {

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeInstrumentation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeInstrumentation.java?rev=1077672&r1=1077671&r2=1077672&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeInstrumentation.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeInstrumentation.java Fri Mar  4 04:42:38 2011
@@ -90,94 +90,97 @@ public class NameNodeInstrumentation imp
                        new NameNodeInstrumentation(conf));
   }
 
+  //@Override
   public void shutdown() {
     // metrics system shutdown would suffice
   }
 
+  //@override
   public final void incrNumGetBlockLocations() {
     numGetBlockLocations.incr();
   }
 
-  
+  //@Override
   public final void incrNumFilesCreated() {
     numFilesCreated.incr();
   }
 
-  
+  //@Override
   public final void incrNumCreateFileOps() {
     numCreateFileOps.incr();
   }
 
-  
+  //@Override
   public final void incrNumFilesAppended() {
     numFilesAppended.incr();
   }
 
-  
+  //@Override
   public final void incrNumAddBlockOps() {
     numAddBlockOps.incr();
   }
 
-  
+  //@Override
   public final void incrNumFilesRenamed() {
     numFilesRenamed.incr();
   }
 
-  
+  //@Override
   public void incrFilesDeleted(int delta) {
     numFilesDeleted.incr(delta);
   }
 
-  
+  //@Override
   public final void incrNumDeleteFileOps() {
     numDeleteFileOps.incr();
   }
 
-  
+  //@Override
   public final void incrNumGetListingOps() {
     numGetListingOps.incr();
   }
 
-  
+  //@Override
   public final void incrNumFilesInGetListingOps(int delta) {
     numFilesInGetListingOps.incr(delta);
   }
 
-  
+  //@Override
   public final void incrNumFileInfoOps() {
     numFileInfoOps.incr();
   }
 
-  
+  //@Override
   public final void addTransaction(long latency) {
     transactions.add(latency);
   }
 
-  
+  //@Override
   public final void incrTransactionsBatchedInSync() {
     transactionsBatchedInSync.incr();
   }
 
-  
+  //@Override
   public final void addSync(long elapsed) {
     syncs.add(elapsed);
   }
 
-  
+  //@Override
   public final void setFsImageLoadTime(long elapsed) {
     fsImageLoadTime.set((int) elapsed);
   }
 
-  
+  //@Override
   public final void addBlockReport(long latency) {
     blockReport.add(latency);
   }
 
-  
+  //@Override
   public final void setSafeModeTime(long elapsed) {
     safeModeTime.set((int) elapsed);
   }
 
+  @Override
   public void getMetrics(MetricsBuilder builder, boolean all) {
     registry.snapshot(builder.addRecord(registry.name()), all);
   }

Modified: hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskRunner.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskRunner.java?rev=1077672&r1=1077671&r2=1077672&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskRunner.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskRunner.java Fri Mar  4 04:42:38 2011
@@ -61,7 +61,8 @@ abstract class TaskRunner extends Thread
     "mapreduce.admin.reduce.child.java.opts";
 
   static final String DEFAULT_MAPRED_ADMIN_JAVA_OPTS =
-    "-Djava.net.preferIPv4Stack=true";
+    "-Djava.net.preferIPv4Stack=true " +
+    "-Dhadoop.metrics.log.level=WARN ";
 
   static final String MAPRED_ADMIN_USER_SHELL =
     "mapreduce.admin.user.shell";

Modified: hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskTracker.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskTracker.java?rev=1077672&r1=1077671&r2=1077672&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskTracker.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskTracker.java Fri Mar  4 04:42:38 2011
@@ -672,7 +672,7 @@ public class TaskTracker implements MRCo
     }
     try {
       java.lang.reflect.Constructor<? extends TaskTrackerInstrumentation> c =
-        metricsInst.getConstructor(new Class[] {TaskTracker.class} );
+        metricsInst.getConstructor(new Class<?>[] {TaskTracker.class} );
       this.myInstrumentation = c.newInstance(this);
     } catch(Exception e) {
       //Reflection can throw lots of exceptions -- handle them all by

Modified: hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskTrackerMetricsSource.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskTrackerMetricsSource.java?rev=1077672&r1=1077671&r2=1077672&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskTrackerMetricsSource.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskTrackerMetricsSource.java Fri Mar  4 04:42:38 2011
@@ -55,6 +55,7 @@ public class TaskTrackerMetricsSource ex
     registry.setContext("mapred").tag("sessionId", "", sessionId);
   }
 
+  @Override
   public void getMetrics(MetricsBuilder builder, boolean all) {
     mapsRunning.set(tt.mapTotal);
     redsRunning.set(tt.reduceTotal);

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java?rev=1077672&r1=1077671&r2=1077672&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java Fri Mar  4 04:42:38 2011
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.metrics2.impl;
 
+import org.apache.commons.configuration.SubsetConfiguration;
 import org.apache.hadoop.metrics2.lib.MetricMutableCounterLong;
 import org.apache.hadoop.metrics2.lib.MetricMutableStat;
 import org.apache.hadoop.metrics2.lib.MetricMutableGaugeLong;
@@ -47,35 +48,50 @@ public class TestMetricsSystemImpl {
   private static final Log LOG = LogFactory.getLog(TestMetricsSystemImpl.class);
   @Captor private ArgumentCaptor<MetricsRecord> r1;
   @Captor private ArgumentCaptor<MetricsRecord> r2;
+  @Captor private ArgumentCaptor<MetricsRecord> r3;
   private static String hostname = MetricsSystemImpl.getHostname();
 
   @Test public void testInitFirst() throws Exception {
     ConfigBuilder cb = new ConfigBuilder().add("default.period", 8)
-        .add("Test.sink.0.class", "org.apache.hadoop.metrics.sink.FileSink")
+        .add("source.filter.class",
+             "org.apache.hadoop.metrics2.filter.GlobFilter")
+        .add("test.*.source.filter.class", "${source.filter.class}")
+        .add("test.*.source.filter.exclude", "s1*")
+        .add("test.sink.sink3.source.filter.class", "${source.filter.class}")
+        .add("test.sink.sink3.source.filter.exclude", "s2*")
         .save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
     MetricsSystemImpl ms = new MetricsSystemImpl("Test");
     ms.start();
     TestSource s1 = ms.register("s1", "s1 desc", new TestSource("s1rec"));
+    TestSource s2 = ms.register("s2", "s2 desc", new TestSource("s2rec"));
+    TestSource s3 = ms.register("s3", "s3 desc", new TestSource("s3rec"));
     s1.s1.add(0);
+    s2.s1.add(0);
+    s3.s1.add(0);
     MetricsSink sink1 = mock(MetricsSink.class);
     MetricsSink sink2 = mock(MetricsSink.class);
+    MetricsSink sink3 = mock(MetricsSink.class);
     ms.register("sink1", "sink1 desc", sink1);
     ms.register("sink2", "sink2 desc", sink2);
+    ms.register("sink3", "sink3 desc", sink3);
     ms.onTimerEvent();  // trigger something interesting
     ms.stop();
 
-    verify(sink1, times(2)).putMetrics(r1.capture());
+    verify(sink1, times(3)).putMetrics(r1.capture()); // 2 + 1 sys source
     List<MetricsRecord> mr1 = r1.getAllValues();
-    verify(sink2, times(2)).putMetrics(r2.capture());
+    verify(sink2, times(3)).putMetrics(r2.capture()); // ditto
     List<MetricsRecord> mr2 = r2.getAllValues();
-    checkMetricsRecords(mr1);
+    verify(sink3, times(2)).putMetrics(r3.capture()); // 1 + 1 (s1, s2 filtered)
+    List<MetricsRecord> mr3 = r3.getAllValues();
+    checkMetricsRecords(mr1, "s2rec");
     assertEquals("output", mr1, mr2);
+    checkMetricsRecords(mr3, "s3rec");
   }
 
-  static void checkMetricsRecords(List<MetricsRecord> recs) {
+  static void checkMetricsRecords(List<MetricsRecord> recs, String expected) {
     LOG.debug(recs);
     MetricsRecord r = recs.get(0);
-    assertEquals("name", "s1rec", r.name());
+    assertEquals("name", expected, r.name());
     assertEquals("tags", new MetricsTag[] {
       new MetricsTag("context", "Metrics context", "test"),
       new MetricsTag("hostName", "Local hostname", hostname)}, r.tags());

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/metrics2/util/TestMetricsCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/metrics2/util/TestMetricsCache.java?rev=1077672&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/metrics2/util/TestMetricsCache.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/metrics2/util/TestMetricsCache.java Fri Mar  4 04:42:38 2011
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.util;
+
+import java.util.Arrays;
+import java.util.Collection;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.metrics2.Metric;
+import org.apache.hadoop.metrics2.MetricsRecord;
+import org.apache.hadoop.metrics2.MetricsTag;
+
+public class TestMetricsCache {
+  private static final Log LOG = LogFactory.getLog(TestMetricsCache.class);
+
+  @Test public void testUpdate() {
+    MetricsCache cache = new MetricsCache();
+    MetricsRecord mr = makeRecord("r",
+        Arrays.asList(makeTag("t", "tv")),
+        Arrays.asList(makeMetric("m", 0), makeMetric("m1", 1)));
+
+    MetricsCache.Record cr = cache.update(mr);
+    verify(mr).name();
+    verify(mr).tags();
+    verify(mr).metrics();
+    assertEquals("same record size", cr.metrics.size(),
+                 ((Collection<Metric>)mr.metrics()).size());
+    assertEquals("same metric value", 0, cr.getMetric("m"));
+
+    MetricsRecord mr2 = makeRecord("r",
+        Arrays.asList(makeTag("t", "tv")),
+        Arrays.asList(makeMetric("m", 2), makeMetric("m2", 42)));
+    cr = cache.update(mr2);
+    assertEquals("contains 3 metric", 3, cr.metrics.size());
+    assertEquals("updated metric value", 2, cr.getMetric("m"));
+    assertEquals("old metric value", 1, cr.getMetric("m1"));
+    assertEquals("new metric value", 42, cr.getMetric("m2"));
+
+    MetricsRecord mr3 = makeRecord("r",
+        Arrays.asList(makeTag("t", "tv3")), // different tag value
+        Arrays.asList(makeMetric("m3", 3)));
+    cr = cache.update(mr3); // should get a new record
+    assertEquals("contains 1 metric", 1, cr.metrics.size());
+    assertEquals("updated metric value", 3, cr.getMetric("m3"));
+    // tags cache should be empty so far
+    assertEquals("no tags", 0, cr.tags.size());
+    // until now
+    cr = cache.update(mr3, true);
+    assertEquals("Got 1 tag", 1, cr.tags.size());
+    assertEquals("Tag value", "tv3", cr.getTag("t"));
+    assertEquals("Metric value", 3, cr.getMetric("m3"));
+  }
+
+  @Test public void testGet() {
+    MetricsCache cache = new MetricsCache();
+    assertNull("empty", cache.get("r", Arrays.asList(makeTag("t", "t"))));
+    MetricsRecord mr = makeRecord("r",
+        Arrays.asList(makeTag("t", "t")),
+        Arrays.asList(makeMetric("m", 1)));
+    cache.update(mr);
+    MetricsCache.Record cr = cache.get("r", (Collection<MetricsTag>)mr.tags());
+    LOG.debug("tags="+ (Collection<MetricsTag>)mr.tags() +" cr="+ cr);
+
+    assertNotNull("Got record", cr);
+    assertEquals("contains 1 metric", 1, cr.metrics.size());
+    assertEquals("new metric value", 1, cr.getMetric("m"));
+  }
+
+  private MetricsRecord makeRecord(String name, Collection<MetricsTag> tags,
+                                   Collection<Metric> metrics) {
+    MetricsRecord mr = mock(MetricsRecord.class);
+    when(mr.name()).thenReturn(name);
+    when(mr.tags()).thenReturn(tags);
+    when(mr.metrics()).thenReturn(metrics);
+    return mr;
+  }
+
+  private MetricsTag makeTag(String name, String value) {
+    return new MetricsTag(name, "", value);
+  }
+
+  private Metric makeMetric(String name, Number value) {
+    Metric metric = mock(Metric.class);
+    when(metric.name()).thenReturn(name);
+    when(metric.value()).thenReturn(value);
+    return metric;
+  }
+}