You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ac...@apache.org on 2011/05/06 09:28:46 UTC

svn commit: r1100113 [4/5] - in /hadoop/common/trunk: ./ conf/ ivy/ src/java/org/apache/hadoop/metrics/file/ src/java/org/apache/hadoop/metrics2/ src/java/org/apache/hadoop/metrics2/annotation/ src/java/org/apache/hadoop/metrics2/filter/ src/java/org/a...

Added: hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/sink/FileSink.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/sink/FileSink.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/sink/FileSink.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/sink/FileSink.java Fri May  6 07:28:43 2011
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.sink;
+
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.PrintWriter;
+
+import org.apache.commons.configuration.SubsetConfiguration;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.metrics2.AbstractMetric;
+import org.apache.hadoop.metrics2.MetricsException;
+import org.apache.hadoop.metrics2.MetricsRecord;
+import org.apache.hadoop.metrics2.MetricsSink;
+import org.apache.hadoop.metrics2.MetricsTag;
+
+/**
+ * A metrics sink that writes to a file
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class FileSink implements MetricsSink {
+  private static final String FILENAME_KEY = "filename";
+  private PrintWriter writer;
+
+  @Override
+  public void init(SubsetConfiguration conf) {
+    String filename = conf.getString(FILENAME_KEY);
+    try {
+      writer = filename == null
+          ? new PrintWriter(System.out)
+          : new PrintWriter(new FileWriter(new File(filename), true));
+    }
+    catch (Exception e) {
+      throw new MetricsException("Error creating "+ filename, e);
+    }
+  }
+
+  @Override
+  public void putMetrics(MetricsRecord record) {
+    writer.print(record.timestamp());
+    writer.print(" ");
+    writer.print(record.context());
+    writer.print(".");
+    writer.print(record.name());
+    String separator = ": ";
+    for (MetricsTag tag : record.tags()) {
+      writer.print(separator);
+      separator = ", ";
+      writer.print(tag.name());
+      writer.print("=");
+      writer.print(tag.value());
+    }
+    for (AbstractMetric metric : record.metrics()) {
+      writer.print(separator);
+      separator = ", ";
+      writer.print(metric.name());
+      writer.print("=");
+      writer.print(metric.value());
+    }
+    writer.println();
+  }
+
+  @Override
+  public void flush() {
+    writer.flush();
+  }
+}

Added: hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/sink/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/sink/package-info.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/sink/package-info.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/sink/package-info.java Fri May  6 07:28:43 2011
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Builtin metrics sinks
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+package org.apache.hadoop.metrics2.sink;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
\ No newline at end of file

Added: hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/source/JvmMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/source/JvmMetrics.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/source/JvmMetrics.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/source/JvmMetrics.java Fri May  6 07:28:43 2011
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.source;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * JVM related metrics. Mostly used by various servers as part of the metrics
+ * they export.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class JvmMetrics {
+  // placeholder for javadoc to prevent broken links, until
+  // HADOOP-6920
+}

Added: hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/Contracts.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/Contracts.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/Contracts.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/Contracts.java Fri May  6 07:28:43 2011
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.util;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+
+/**
+ * Additional helpers (besides guava Preconditions) for programming by contract
+ */
+@InterfaceAudience.Private
+public class Contracts {
+
+  private Contracts() {}
+
+  /**
+   * Check an argument for false conditions
+   * @param <T> type of the argument
+   * @param arg the argument to check
+   * @param expression  the boolean expression for the condition
+   * @param msg the error message if {@code expression} is false
+   * @return the argument for convenience
+   */
+  public static <T> T checkArg(T arg, boolean expression, Object msg) {
+    if (!expression) {
+      throw new IllegalArgumentException(String.valueOf(msg) +": "+ arg);
+    }
+    return arg;
+  }
+
+  /**
+   * Check an argument for false conditions
+   * @param arg the argument to check
+   * @param expression  the boolean expression for the condition
+   * @param msg the error message if {@code expression} is false
+   * @return the argument for convenience
+   */
+  public static int checkArg(int arg, boolean expression, Object msg) {
+    if (!expression) {
+      throw new IllegalArgumentException(String.valueOf(msg) +": "+ arg);
+    }
+    return arg;
+  }
+
+  /**
+   * Check an argument for false conditions
+   * @param arg the argument to check
+   * @param expression  the boolean expression for the condition
+   * @param msg the error message if {@code expression} is false
+   * @return the argument for convenience
+   */
+  public static long checkArg(long arg, boolean expression, Object msg) {
+    if (!expression) {
+      throw new IllegalArgumentException(String.valueOf(msg) +": "+ arg);
+    }
+    return arg;
+  }
+
+  /**
+   * Check an argument for false conditions
+   * @param arg the argument to check
+   * @param expression  the boolean expression for the condition
+   * @param msg the error message if {@code expression} is false
+   * @return the argument for convenience
+   */
+  public static float checkArg(float arg, boolean expression, Object msg) {
+    if (!expression) {
+      throw new IllegalArgumentException(String.valueOf(msg) +": "+ arg);
+    }
+    return arg;
+  }
+
+  /**
+   * Check an argument for false conditions
+   * @param arg the argument to check
+   * @param expression  the boolean expression for the condition
+   * @param msg the error message if {@code expression} is false
+   * @return the argument for convenience
+   */
+  public static double checkArg(double arg, boolean expression, Object msg) {
+    if (!expression) {
+      throw new IllegalArgumentException(String.valueOf(msg) +": "+ arg);
+    }
+    return arg;
+  }
+}

Added: hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/MBeans.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/MBeans.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/MBeans.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/MBeans.java Fri May  6 07:28:43 2011
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.util;
+
+import java.lang.management.ManagementFactory;
+import javax.management.MBeanServer;
+import javax.management.ObjectName;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+
+/**
+ * This util class provides a method to register an MBean using
+ * our standard naming convention as described in the doc
+ *  for {link {@link #register(String, String, Object)}
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class MBeans {
+  private static final Log LOG = LogFactory.getLog(MBeans.class);
+
+  /**
+   * Register the MBean using our standard MBeanName format
+   * "hadoop:service=<serviceName>,name=<nameName>"
+   * Where the <serviceName> and <nameName> are the supplied parameters
+   *
+   * @param serviceName
+   * @param nameName
+   * @param theMbean - the MBean to register
+   * @return the named used to register the MBean
+   */
+  static public ObjectName register(String serviceName, String nameName,
+                                    Object theMbean) {
+    final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
+    ObjectName name = getMBeanName(serviceName, nameName);
+    try {
+      mbs.registerMBean(theMbean, name);
+      LOG.debug("Registered "+ name);
+      return name;
+    } catch (Exception e) {
+      LOG.warn("Error registering "+ name, e);
+    }
+    return null;
+  }
+
+  static public void unregister(ObjectName mbeanName) {
+    LOG.debug("Unregistering "+ mbeanName);
+    final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
+    if (mbeanName == null) {
+      LOG.debug("Stacktrace: ", new Throwable());
+      return;
+    }
+    try {
+      mbs.unregisterMBean(mbeanName);
+    } catch (Exception e) {
+      LOG.warn("Error unregistering "+ mbeanName, e);
+    }
+  }
+
+  static private ObjectName getMBeanName(String serviceName, String nameName) {
+    ObjectName name = null;
+    String nameStr = "Hadoop:service="+ serviceName +",name="+ nameName;
+    try {
+      name = DefaultMetricsSystem.newMBeanName(nameStr);
+    } catch (Exception e) {
+      LOG.warn("Error creating MBean object name: "+ nameStr, e);
+    }
+    return name;
+  }
+}

Added: hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/MetricsCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/MetricsCache.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/MetricsCache.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/MetricsCache.java Fri May  6 07:28:43 2011
@@ -0,0 +1,175 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.util;
+
+import java.util.Collection;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Set;
+
+import com.google.common.base.Objects;
+import com.google.common.collect.Maps;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.metrics2.AbstractMetric;
+import org.apache.hadoop.metrics2.MetricsRecord;
+import org.apache.hadoop.metrics2.MetricsTag;
+
+/**
+ * A metrics cache for sinks that don't support sparse updates.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class MetricsCache {
+  static final Log LOG = LogFactory.getLog(MetricsCache.class);
+  static final int MAX_RECS_PER_NAME_DEFAULT = 1000;
+
+  private final Map<String, RecordCache> map = Maps.newHashMap();
+  private final int maxRecsPerName;
+
+  class RecordCache
+      extends LinkedHashMap<Collection<MetricsTag>, Record> {
+    private static final long serialVersionUID = 1L;
+    private boolean gotOverflow = false;
+
+    @Override
+    protected boolean removeEldestEntry(Map.Entry<Collection<MetricsTag>,
+                                                  Record> eldest) {
+      boolean overflow = size() > maxRecsPerName;
+      if (overflow && !gotOverflow) {
+        LOG.warn("Metrics cache overflow at "+ size() +" for "+ eldest);
+        gotOverflow = true;
+      }
+      return overflow;
+    }
+  }
+
+  /**
+   * Cached record
+   */
+  public static class Record {
+    final Map<String, String> tags = Maps.newHashMap();
+    final Map<String, Number> metrics = Maps.newHashMap();
+
+    /**
+     * Lookup a tag value
+     * @param key name of the tag
+     * @return the tag value
+     */
+    public String getTag(String key) {
+      return tags.get(key);
+    }
+
+    /**
+     * Lookup a metric value
+     * @param key name of the metric
+     * @return the metric value
+     */
+    public Number getMetric(String key) {
+      return metrics.get(key);
+    }
+
+    /**
+     * @return the entry set of the tags of the record
+     */
+    public Set<Map.Entry<String, String>> tags() {
+      return tags.entrySet();
+    }
+
+    /**
+     * @return entry set of the metrics of the record
+     */
+    public Set<Map.Entry<String, Number>> metrics() {
+      return metrics.entrySet();
+    }
+
+    @Override public String toString() {
+      return Objects.toStringHelper(this)
+          .add("tags", tags).add("metrics", metrics)
+          .toString();
+    }
+  }
+
+  public MetricsCache() {
+    this(MAX_RECS_PER_NAME_DEFAULT);
+  }
+
+  /**
+   * Construct a metrics cache
+   * @param maxRecsPerName  limit of the number records per record name
+   */
+  public MetricsCache(int maxRecsPerName) {
+    this.maxRecsPerName = maxRecsPerName;
+  }
+
+  /**
+   * Update the cache and return the current cached record
+   * @param mr the update record
+   * @param includingTags cache tag values (for later lookup by name) if true
+   * @return the updated cache record
+   */
+  public Record update(MetricsRecord mr, boolean includingTags) {
+    String name = mr.name();
+    RecordCache recordCache = map.get(name);
+    if (recordCache == null) {
+      recordCache = new RecordCache();
+      map.put(name, recordCache);
+    }
+    Collection<MetricsTag> tags = mr.tags();
+    Record record = recordCache.get(tags);
+    if (record == null) {
+      record = new Record();
+      recordCache.put(tags, record);
+    }
+    for (AbstractMetric m : mr.metrics()) {
+      record.metrics.put(m.name(), m.value());
+    }
+    if (includingTags) {
+      // mostly for some sinks that include tags as part of a dense schema
+      for (MetricsTag t : mr.tags()) {
+        record.tags.put(t.name(), t.value());
+      }
+    }
+    return record;
+  }
+
+  /**
+   * Update the cache and return the current cache record
+   * @param mr the update record
+   * @return the updated cache record
+   */
+  public Record update(MetricsRecord mr) {
+    return update(mr, false);
+  }
+
+  /**
+   * Get the cached record
+   * @param name of the record
+   * @param tags of the record
+   * @return the cached record or null
+   */
+  public Record get(String name, Collection<MetricsTag> tags) {
+    RecordCache rc = map.get(name);
+    if (rc == null) return null;
+    return rc.get(tags);
+  }
+}

Added: hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/SampleStat.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/SampleStat.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/SampleStat.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/SampleStat.java Fri May  6 07:28:43 2011
@@ -0,0 +1,167 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.util;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+
+/**
+ * Helper to compute running sample stats
+ */
+@InterfaceAudience.Private
+public class SampleStat {
+  private final MinMax minmax = new MinMax();
+  private long numSamples = 0;
+  private double a0, a1, s0, s1;
+
+  /**
+   * Construct a new running sample stat
+   */
+  public SampleStat() {
+    a0 = s0 = 0.0;
+  }
+
+  public void reset() {
+    numSamples = 0;
+    a0 = s0 = 0.0;
+    minmax.reset();
+  }
+
+  // We want to reuse the object, sometimes.
+  void reset(long numSamples, double a0, double a1, double s0, double s1,
+             MinMax minmax) {
+    this.numSamples = numSamples;
+    this.a0 = a0;
+    this.a1 = a1;
+    this.s0 = s0;
+    this.s1 = s1;
+    this.minmax.reset(minmax);
+  }
+
+  /**
+   * Copy the values to other (saves object creation and gc.)
+   * @param other the destination to hold our values
+   */
+  public void copyTo(SampleStat other) {
+    other.reset(numSamples, a0, a1, s0, s1, minmax);
+  }
+
+  /**
+   * Add a sample the running stat.
+   * @param x the sample number
+   * @return  self
+   */
+  public SampleStat add(double x) {
+    minmax.add(x);
+    return add(1, x);
+  }
+
+  /**
+   * Add some sample and a partial sum to the running stat.
+   * Note, min/max is not evaluated using this method.
+   * @param nSamples  number of samples
+   * @param x the partial sum
+   * @return  self
+   */
+  public SampleStat add(long nSamples, double x) {
+    numSamples += nSamples;
+
+    if (numSamples == 1) {
+      a0 = a1 = x;
+      s0 = 0.0;
+    }
+    else {
+      // The Welford method for numerical stability
+      a1 = a0 + (x - a0) / numSamples;
+      s1 = s0 + (x - a0) * (x - a1);
+      a0 = a1;
+      s0 = s1;
+    }
+    return this;
+  }
+
+  /**
+   * @return  the total number of samples
+   */
+  public long numSamples() {
+    return numSamples;
+  }
+
+  /**
+   * @return  the arithmetic mean of the samples
+   */
+  public double mean() {
+    return numSamples > 0 ? a1 : 0.0;
+  }
+
+  /**
+   * @return  the variance of the samples
+   */
+  public double variance() {
+    return numSamples > 1 ? s1 / (numSamples - 1) : 0.0;
+  }
+
+  /**
+   * @return  the standard deviation of the samples
+   */
+  public double stddev() {
+    return Math.sqrt(variance());
+  }
+
+  /**
+   * @return  the minimum value of the samples
+   */
+  public double min() {
+    return minmax.min();
+  }
+
+  /**
+   * @return  the maximum value of the samples
+   */
+  public double max() {
+    return minmax.max();
+  }
+
+  /**
+   * Helper to keep running min/max
+   */
+  @SuppressWarnings("PublicInnerClass")
+  public static class MinMax {
+
+    private double min = Double.MAX_VALUE;
+    private double max = Double.MIN_VALUE;
+
+    public void add(double value) {
+      if (value > max) max = value;
+      if (value < min) min = value;
+    }
+
+    public double min() { return min; }
+    public double max() { return max; }
+
+    public void reset() {
+      min = Double.MAX_VALUE;
+      max = Double.MIN_VALUE;
+    }
+
+    public void reset(MinMax other) {
+      min = other.min();
+      max = other.max();
+    }
+  }
+}

Added: hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/Servers.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/Servers.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/Servers.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/Servers.java Fri May  6 07:28:43 2011
@@ -0,0 +1,72 @@
+/*
+ * Util.java
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.hadoop.metrics2.util;
+
+import java.net.InetSocketAddress;
+import java.util.List;
+
+import com.google.common.collect.Lists;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Helpers to handle server addresses
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class Servers {
+  /**
+   * This class is not intended to be instantiated
+   */
+  private Servers() {}
+
+  /**
+   * Parses a space and/or comma separated sequence of server specifications
+   * of the form <i>hostname</i> or <i>hostname:port</i>.  If
+   * the specs string is null, defaults to localhost:defaultPort.
+   *
+   * @param specs   server specs (see description)
+   * @param defaultPort the default port if not specified
+   * @return a list of InetSocketAddress objects.
+   */
+  public static List<InetSocketAddress> parse(String specs, int defaultPort) {
+    List<InetSocketAddress> result = Lists.newArrayList();
+    if (specs == null) {
+      result.add(new InetSocketAddress("localhost", defaultPort));
+    }
+    else {
+      String[] specStrings = specs.split("[ ,]+");
+      for (String specString : specStrings) {
+        int colon = specString.indexOf(':');
+        if (colon < 0 || colon == specString.length() - 1) {
+          result.add(new InetSocketAddress(specString, defaultPort));
+        } else {
+          String hostname = specString.substring(0, colon);
+          int port = Integer.parseInt(specString.substring(colon+1));
+          result.add(new InetSocketAddress(hostname, port));
+        }
+      }
+    }
+    return result;
+  }
+}

Added: hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/package-info.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/package-info.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics2/util/package-info.java Fri May  6 07:28:43 2011
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * General helpers for implementing source and sinks
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+package org.apache.hadoop.metrics2.util;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;

Added: hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/filter/TestPatternFilter.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/filter/TestPatternFilter.java (added)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/filter/TestPatternFilter.java Fri May  6 07:28:43 2011
@@ -0,0 +1,144 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.filter;
+
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.commons.configuration.SubsetConfiguration;
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+import org.apache.hadoop.metrics2.MetricsTag;
+import org.apache.hadoop.metrics2.impl.ConfigBuilder;
+import static org.apache.hadoop.metrics2.lib.Interns.*;
+
+public class TestPatternFilter {
+
+  /**
+   * Filters should default to accept
+   */
+  @Test public void emptyConfigShouldAccept() {
+    SubsetConfiguration empty = new ConfigBuilder().subset("");
+    shouldAccept(empty, "anything");
+    shouldAccept(empty, Arrays.asList(tag("key", "desc", "value")));
+  }
+
+  /**
+   * Filters should handle white-listing correctly
+   */
+  @Test public void includeOnlyShouldOnlyIncludeMatched() {
+    SubsetConfiguration wl = new ConfigBuilder()
+        .add("p.include", "foo")
+        .add("p.include.tags", "foo:f").subset("p");
+    shouldAccept(wl, "foo");
+    shouldAccept(wl, Arrays.asList(tag("bar", "", ""),
+                                   tag("foo", "", "f")));
+    shouldReject(wl, "bar");
+    shouldReject(wl, Arrays.asList(tag("bar", "", "")));
+    shouldReject(wl, Arrays.asList(tag("foo", "", "boo")));
+  }
+
+  /**
+   * Filters should handle black-listing correctly
+   */
+  @Test public void excludeOnlyShouldOnlyExcludeMatched() {
+    SubsetConfiguration bl = new ConfigBuilder()
+        .add("p.exclude", "foo")
+        .add("p.exclude.tags", "foo:f").subset("p");
+    shouldAccept(bl, "bar");
+    shouldAccept(bl, Arrays.asList(tag("bar", "", "")));
+    shouldReject(bl, "foo");
+    shouldReject(bl, Arrays.asList(tag("bar", "", ""),
+                                   tag("foo", "", "f")));
+  }
+
+  /**
+   * Filters should accepts unmatched item when both include and
+   * exclude patterns are present.
+   */
+  @Test public void shouldAcceptUnmatchedWhenBothAreConfigured() {
+    SubsetConfiguration c = new ConfigBuilder()
+        .add("p.include", "foo")
+        .add("p.include.tags", "foo:f")
+        .add("p.exclude", "bar")
+        .add("p.exclude.tags", "bar:b").subset("p");
+    shouldAccept(c, "foo");
+    shouldAccept(c, Arrays.asList(tag("foo", "", "f")));
+    shouldReject(c, "bar");
+    shouldReject(c, Arrays.asList(tag("bar", "", "b")));
+    shouldAccept(c, "foobar");
+    shouldAccept(c, Arrays.asList(tag("foobar", "", "")));
+  }
+
+  /**
+   * Include patterns should take precedence over exclude patterns
+   */
+  @Test public void includeShouldOverrideExclude() {
+    SubsetConfiguration c = new ConfigBuilder()
+        .add("p.include", "foo")
+        .add("p.include.tags", "foo:f")
+        .add("p.exclude", "foo")
+        .add("p.exclude.tags", "foo:f").subset("p");
+    shouldAccept(c, "foo");
+    shouldAccept(c, Arrays.asList(tag("foo", "", "f")));
+  }
+
+  static void shouldAccept(SubsetConfiguration conf, String s) {
+    assertTrue("accepts "+ s, newGlobFilter(conf).accepts(s));
+    assertTrue("accepts "+ s, newRegexFilter(conf).accepts(s));
+  }
+
+  static void shouldAccept(SubsetConfiguration conf, List<MetricsTag> tags) {
+    assertTrue("accepts "+ tags, newGlobFilter(conf).accepts(tags));
+    assertTrue("accepts "+ tags, newRegexFilter(conf).accepts(tags));
+  }
+
+  static void shouldReject(SubsetConfiguration conf, String s) {
+    assertTrue("rejects "+ s, !newGlobFilter(conf).accepts(s));
+    assertTrue("rejects "+ s, !newRegexFilter(conf).accepts(s));
+  }
+
+  static void shouldReject(SubsetConfiguration conf, List<MetricsTag> tags) {
+    assertTrue("rejects "+ tags, !newGlobFilter(conf).accepts(tags));
+    assertTrue("rejects "+ tags, !newRegexFilter(conf).accepts(tags));
+  }
+
+  /**
+   * Create a new glob filter with a config object
+   * @param conf  the config object
+   * @return the filter
+   */
+  public static GlobFilter newGlobFilter(SubsetConfiguration conf) {
+    GlobFilter f = new GlobFilter();
+    f.init(conf);
+    return f;
+  }
+
+  /**
+   * Create a new regex filter with a config object
+   * @param conf  the config object
+   * @return the filter
+   */
+  public static RegexFilter newRegexFilter(SubsetConfiguration conf) {
+    RegexFilter f = new RegexFilter();
+    f.init(conf);
+    return f;
+  }
+}

Added: hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/ConfigBuilder.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/ConfigBuilder.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/ConfigBuilder.java (added)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/ConfigBuilder.java Fri May  6 07:28:43 2011
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import org.apache.commons.configuration.PropertiesConfiguration;
+import org.apache.commons.configuration.SubsetConfiguration;
+
+/**
+ * Helper class for building configs, mostly used in tests
+ */
+public class ConfigBuilder {
+  /** The built config */
+  public final PropertiesConfiguration config;
+
+  /**
+   * Default constructor
+   */
+  public ConfigBuilder() {
+    config = new PropertiesConfiguration();
+  }
+
+  /**
+   * Add a property to the config
+   * @param key of the property
+   * @param value of the property
+   * @return self
+   */
+  public ConfigBuilder add(String key, Object value) {
+    config.addProperty(key, value);
+    return this;
+  }
+
+  /**
+   * Save the config to a file
+   * @param filename  to save
+   * @return self
+   * @throws RuntimeException
+   */
+  public ConfigBuilder save(String filename) {
+    try {
+      config.save(filename);
+    }
+    catch (Exception e) {
+      throw new RuntimeException("Error saving config", e);
+    }
+    return this;
+  }
+
+  /**
+   * Return a subset configuration (so getParent() can be used.)
+   * @param prefix  of the subset
+   * @return the subset config
+   */
+  public SubsetConfiguration subset(String prefix) {
+    return new SubsetConfiguration(config, prefix, ".");
+  }
+}
+

Added: hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/ConfigUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/ConfigUtil.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/ConfigUtil.java (added)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/ConfigUtil.java Fri May  6 07:28:43 2011
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import java.io.PrintStream;
+import java.util.Iterator;
+
+import static org.junit.Assert.*;
+import org.apache.commons.configuration.Configuration;
+import org.apache.commons.configuration.PropertiesConfiguration;
+
+/**
+ * Helpers for config tests and debugging
+ */
+class ConfigUtil {
+
+  static void dump(Configuration c) {
+    dump(null, c, System.out);
+  }
+
+  static void dump(String header, Configuration c) {
+    dump(header, c, System.out);
+  }
+
+  static void dump(String header, Configuration c, PrintStream out) {
+    PropertiesConfiguration p = new PropertiesConfiguration();
+    p.copy(c);
+    if (header != null) {
+      out.println(header);
+    }
+    try { p.save(out); }
+    catch (Exception e) {
+      throw new RuntimeException("Error saving config", e);
+    }
+  }
+
+  static void assertEq(Configuration expected, Configuration actual) {
+    // Check that the actual config contains all the properties of the expected
+    for (Iterator<?> it = expected.getKeys(); it.hasNext();) {
+      String key = (String) it.next();
+      assertTrue("actual should contain "+ key, actual.containsKey(key));
+      assertEquals("value of "+ key, expected.getProperty(key),
+                                     actual.getProperty(key));
+    }
+    // Check that the actual config has no extra properties
+    for (Iterator<?> it = actual.getKeys(); it.hasNext();) {
+      String key = (String) it.next();
+      assertTrue("expected should contain "+ key, expected.containsKey(key));
+    }
+  }
+}

Added: hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/MetricsLists.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/MetricsLists.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/MetricsLists.java (added)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/MetricsLists.java Fri May  6 07:28:43 2011
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+/**
+ * Helper to create metrics list for testing
+ */
+class MetricsLists {
+
+  static MetricsRecordBuilderImpl builder(String name) {
+    return new MetricsCollectorImpl().addRecord(name);
+  }
+}

Added: hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsCollectorImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsCollectorImpl.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsCollectorImpl.java (added)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsCollectorImpl.java Fri May  6 07:28:43 2011
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+import org.apache.commons.configuration.SubsetConfiguration;
+import static org.apache.hadoop.metrics2.filter.TestPatternFilter.*;
+import static org.apache.hadoop.metrics2.lib.Interns.*;
+
+public class TestMetricsCollectorImpl {
+
+  @Test public void recordBuilderShouldNoOpIfFiltered() {
+    SubsetConfiguration fc = new ConfigBuilder()
+        .add("p.exclude", "foo").subset("p");
+    MetricsCollectorImpl mb = new MetricsCollectorImpl();
+    mb.setRecordFilter(newGlobFilter(fc));
+    MetricsRecordBuilderImpl rb = mb.addRecord("foo");
+    rb.tag(info("foo", ""), "value").addGauge(info("g0", ""), 1);
+    assertEquals("no tags", 0, rb.tags().size());
+    assertEquals("no metrics", 0, rb.metrics().size());
+    assertNull("null record", rb.getRecord());
+    assertEquals("no records", 0, mb.getRecords().size());
+  }
+
+  @Test public void testPerMetricFiltering() {
+    SubsetConfiguration fc = new ConfigBuilder()
+        .add("p.exclude", "foo").subset("p");
+    MetricsCollectorImpl mb = new MetricsCollectorImpl();
+    mb.setMetricFilter(newGlobFilter(fc));
+    MetricsRecordBuilderImpl rb = mb.addRecord("foo");
+    rb.tag(info("foo", ""), "").addCounter(info("c0", ""), 0)
+      .addGauge(info("foo", ""), 1);
+    assertEquals("1 tag", 1, rb.tags().size());
+    assertEquals("1 metric", 1, rb.metrics().size());
+    assertEquals("expect foo tag", "foo", rb.tags().get(0).name());
+    assertEquals("expect c0", "c0", rb.metrics().get(0).name());
+  }
+}

Added: hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java (added)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java Fri May  6 07:28:43 2011
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import java.util.Map;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+import org.apache.commons.configuration.Configuration;
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.Log;
+import static org.apache.hadoop.metrics2.impl.ConfigUtil.*;
+
+/**
+ * Test metrics configuration
+ */
+public class TestMetricsConfig {
+  static final Log LOG = LogFactory.getLog(TestMetricsConfig.class);
+
+  /**
+   * Common use cases
+   * @throws Exception
+   */
+  @Test public void testCommon() throws Exception {
+    String filename = getTestFilename("test-metrics2");
+    new ConfigBuilder()
+        .add("*.foo", "default foo")
+        .add("p1.*.bar", "p1 default bar")
+        .add("p1.t1.*.bar", "p1.t1 default bar")
+        .add("p1.t1.i1.name", "p1.t1.i1.name")
+        .add("p1.t1.42.bar", "p1.t1.42.bar")
+        .add("p1.t2.i1.foo", "p1.t2.i1.foo")
+        .add("p2.*.foo", "p2 default foo")
+        .save(filename);
+
+    MetricsConfig mc = MetricsConfig.create("p1", filename);
+    LOG.debug("mc:"+ mc);
+
+    Configuration expected = new ConfigBuilder()
+        .add("*.bar", "p1 default bar")
+        .add("t1.*.bar", "p1.t1 default bar")
+        .add("t1.i1.name", "p1.t1.i1.name")
+        .add("t1.42.bar", "p1.t1.42.bar")
+        .add("t2.i1.foo", "p1.t2.i1.foo")
+        .config;
+
+    assertEq(expected, mc);
+
+    testInstances(mc);
+  }
+
+  private void testInstances(MetricsConfig c) throws Exception {
+    Map<String, MetricsConfig> map = c.getInstanceConfigs("t1");
+    Map<String, MetricsConfig> map2 = c.getInstanceConfigs("t2");
+
+    assertEquals("number of t1 instances", 2, map.size());
+    assertEquals("number of t2 instances", 1, map2.size());
+    assertTrue("contains t1 instance i1", map.containsKey("i1"));
+    assertTrue("contains t1 instance 42", map.containsKey("42"));
+    assertTrue("contains t2 instance i1", map2.containsKey("i1"));
+
+    MetricsConfig t1i1 = map.get("i1");
+    MetricsConfig t1i42 = map.get("42");
+    MetricsConfig t2i1 = map2.get("i1");
+    LOG.debug("--- t1 instance i1:"+ t1i1);
+    LOG.debug("--- t1 instance 42:"+ t1i42);
+    LOG.debug("--- t2 instance i1:"+ t2i1);
+
+    Configuration t1expected1 = new ConfigBuilder()
+        .add("name", "p1.t1.i1.name").config;
+    Configuration t1expected42 = new ConfigBuilder()
+         .add("bar", "p1.t1.42.bar").config;
+    Configuration t2expected1 = new ConfigBuilder()
+        .add("foo", "p1.t2.i1.foo").config;
+
+    assertEq(t1expected1, t1i1);
+    assertEq(t1expected42, t1i42);
+    assertEq(t2expected1, t2i1);
+
+    LOG.debug("asserting foo == default foo");
+    // Check default lookups
+    assertEquals("value of foo in t1 instance i1", "default foo",
+                 t1i1.getString("foo"));
+    assertEquals("value of bar in t1 instance i1", "p1.t1 default bar",
+                 t1i1.getString("bar"));
+    assertEquals("value of foo in t1 instance 42", "default foo",
+                 t1i42.getString("foo"));
+    assertEquals("value of foo in t2 instance i1", "p1.t2.i1.foo",
+                 t2i1.getString("foo"));
+    assertEquals("value of bar in t2 instance i1", "p1 default bar",
+                 t2i1.getString("bar"));
+  }
+
+  /**
+   * Should throw if missing config files
+   */
+  @Test public void testMissingFiles() {
+    try {
+      MetricsConfig.create("JobTracker", "non-existent.properties");
+    }
+    catch (MetricsConfigException e) {
+      assertTrue("expected the 'cannot locate configuration' exception",
+                 e.getMessage().startsWith("Cannot locate configuration"));
+      return;
+    }
+    fail("should've thrown");
+  }
+
+  /**
+   * Test the config file load order
+   * @throws Exception
+   */
+  @Test public void testLoadFirst() throws Exception {
+    String filename = getTestFilename("hadoop-metrics2-p1");
+    new ConfigBuilder().add("p1.foo", "p1foo").save(filename);
+
+    MetricsConfig mc = MetricsConfig.create("p1");
+    MetricsConfig mc2 = MetricsConfig.create("p1", "na1", "na2", filename);
+    Configuration expected = new ConfigBuilder().add("foo", "p1foo").config;
+
+    assertEq(expected, mc);
+    assertEq(expected, mc2);
+  }
+
+  /**
+   * Return a test filename in the class path
+   * @param basename
+   * @return the filename
+   */
+  public static String getTestFilename(String basename) {
+    return "build/classes/"+ basename +".properties";
+  }
+}

Added: hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java (added)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java Fri May  6 07:28:43 2011
@@ -0,0 +1,165 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import java.util.List;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.mockito.ArgumentCaptor;
+import org.mockito.Captor;
+import org.mockito.runners.MockitoJUnitRunner;
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
+
+import com.google.common.collect.Iterables;
+
+import org.apache.commons.configuration.SubsetConfiguration;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.metrics2.MetricsException;
+import static org.apache.hadoop.test.MoreAsserts.*;
+import org.apache.hadoop.metrics2.MetricsRecord;
+import org.apache.hadoop.metrics2.MetricsSink;
+import org.apache.hadoop.metrics2.MetricsSource;
+import org.apache.hadoop.metrics2.MetricsSystem;
+import org.apache.hadoop.metrics2.MetricsTag;
+import org.apache.hadoop.metrics2.annotation.*;
+import static org.apache.hadoop.metrics2.lib.Interns.*;
+import org.apache.hadoop.metrics2.lib.MetricsRegistry;
+import org.apache.hadoop.metrics2.lib.MutableCounterLong;
+import org.apache.hadoop.metrics2.lib.MutableRate;
+import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
+
+/**
+ * Test the MetricsSystemImpl class
+ */
+@RunWith(MockitoJUnitRunner.class)
+public class TestMetricsSystemImpl {
+  private static final Log LOG = LogFactory.getLog(TestMetricsSystemImpl.class);
+  @Captor private ArgumentCaptor<MetricsRecord> r1;
+  @Captor private ArgumentCaptor<MetricsRecord> r2;
+  private static String hostname = MetricsSystemImpl.getHostname();
+
+  public static class TestSink implements MetricsSink {
+
+    @Override public void putMetrics(MetricsRecord record) {
+      LOG.debug(record);
+    }
+
+    @Override public void flush() {}
+
+    @Override public void init(SubsetConfiguration conf) {
+      LOG.debug(MetricsConfig.toString(conf));
+    }
+  }
+
+  @Test public void testInitFirst() throws Exception {
+    ConfigBuilder cb = new ConfigBuilder().add("*.period", 8)
+        //.add("test.sink.plugin.urls", getPluginUrlsAsString())
+        .add("test.sink.test.class", TestSink.class.getName())
+        .add("test.*.source.filter.exclude", "s0")
+        .add("test.source.s1.metric.filter.exclude", "X*")
+        .add("test.sink.sink1.metric.filter.exclude", "Y*")
+        .add("test.sink.sink2.metric.filter.exclude", "Y*")
+        .save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
+    MetricsSystemImpl ms = new MetricsSystemImpl("Test");
+    ms.start();
+    ms.register("s0", "s0 desc", new TestSource("s0rec"));
+    TestSource s1 = ms.register("s1", "s1 desc", new TestSource("s1rec"));
+    s1.c1.incr();
+    s1.xxx.incr();
+    s1.g1.set(2);
+    s1.yyy.incr(2);
+    s1.s1.add(0);
+    MetricsSink sink1 = mock(MetricsSink.class);
+    MetricsSink sink2 = mock(MetricsSink.class);
+    ms.registerSink("sink1", "sink1 desc", sink1);
+    ms.registerSink("sink2", "sink2 desc", sink2);
+    ms.onTimerEvent();  // trigger something interesting
+    ms.stop();
+
+    verify(sink1, times(2)).putMetrics(r1.capture());
+    List<MetricsRecord> mr1 = r1.getAllValues();
+    verify(sink2, times(2)).putMetrics(r2.capture());
+    List<MetricsRecord> mr2 = r2.getAllValues();
+    checkMetricsRecords(mr1);
+    assertEquals("output", mr1, mr2);
+  }
+
+  @Test public void testRegisterDups() {
+    MetricsSystem ms = new MetricsSystemImpl();
+    TestSource ts1 = new TestSource("ts1");
+    TestSource ts2 = new TestSource("ts2");
+    ms.register("ts1", "", ts1);
+    MetricsSource s1 = ms.getSource("ts1");
+    assertNotNull(s1);
+    // should work when metrics system is not started
+    ms.register("ts1", "", ts2);
+    MetricsSource s2 = ms.getSource("ts1");
+    assertNotNull(s2);
+    assertNotSame(s1, s2);
+  }
+
+  @Test(expected=MetricsException.class) public void testRegisterDupError() {
+    MetricsSystem ms = new MetricsSystemImpl("test");
+    TestSource ts = new TestSource("ts");
+    ms.register(ts);
+    ms.register(ts);
+  }
+
+
+  private void checkMetricsRecords(List<MetricsRecord> recs) {
+    LOG.debug(recs);
+    MetricsRecord r = recs.get(0);
+    assertEquals("name", "s1rec", r.name());
+    assertEquals("tags", new MetricsTag[] {
+      tag(MsInfo.Context, "test"),
+      tag(MsInfo.Hostname, hostname)}, r.tags());
+    assertEquals("metrics", MetricsLists.builder("")
+      .addCounter(info("C1", "C1 desc"), 1L)
+      .addGauge(info("G1", "G1 desc"), 2L)
+      .addCounter(info("S1NumOps", "Number of ops for s1"), 1L)
+      .addGauge(info("S1AvgTime", "Average time for s1"), 0.0)
+      .metrics(), r.metrics());
+
+    r = recs.get(1);
+    assertTrue("NumActiveSinks should be 3", Iterables.contains(r.metrics(),
+               new MetricGaugeInt(MsInfo.NumActiveSinks, 3)));
+  }
+
+  @Metrics(context="test")
+  private static class TestSource {
+    @Metric("C1 desc") MutableCounterLong c1;
+    @Metric("XXX desc") MutableCounterLong xxx;
+    @Metric("G1 desc") MutableGaugeLong g1;
+    @Metric("YYY desc") MutableGaugeLong yyy;
+    @Metric MutableRate s1;
+    final MetricsRegistry registry;
+
+    TestSource(String recName) {
+      registry = new MetricsRegistry(recName);
+    }
+  }
+
+  private static String getPluginUrlsAsString() {
+    return "file:metrics2-test-plugin.jar";
+  }
+}

Added: hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsVisitor.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsVisitor.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsVisitor.java (added)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsVisitor.java Fri May  6 07:28:43 2011
@@ -0,0 +1,89 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import java.util.List;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+import org.junit.runner.RunWith;
+
+import static org.mockito.Mockito.*;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Captor;
+import org.mockito.runners.MockitoJUnitRunner;
+
+import org.apache.hadoop.metrics2.MetricsVisitor;
+import org.apache.hadoop.metrics2.AbstractMetric;
+import org.apache.hadoop.metrics2.MetricsInfo;
+import static org.apache.hadoop.metrics2.lib.Interns.*;
+import org.apache.hadoop.metrics2.lib.MetricsRegistry;
+
+/**
+ * Test the metric visitor interface
+ */
+@RunWith(MockitoJUnitRunner.class)
+public class TestMetricsVisitor {
+  @Captor private ArgumentCaptor<MetricsInfo> c1;
+  @Captor private ArgumentCaptor<MetricsInfo> c2;
+  @Captor private ArgumentCaptor<MetricsInfo> g1;
+  @Captor private ArgumentCaptor<MetricsInfo> g2;
+  @Captor private ArgumentCaptor<MetricsInfo> g3;
+  @Captor private ArgumentCaptor<MetricsInfo> g4;
+
+  /**
+   * Test the common use cases
+   */
+  @Test public void testCommon() {
+    MetricsVisitor visitor = mock(MetricsVisitor.class);
+    MetricsRegistry registry = new MetricsRegistry("test");
+    List<AbstractMetric> metrics = MetricsLists.builder("test")
+        .addCounter(info("c1", "int counter"), 1)
+        .addCounter(info("c2", "long counter"), 2L)
+        .addGauge(info("g1", "int gauge"), 5)
+        .addGauge(info("g2", "long gauge"), 6L)
+        .addGauge(info("g3", "float gauge"), 7f)
+        .addGauge(info("g4", "double gauge"), 8d)
+        .metrics();
+
+    for (AbstractMetric metric : metrics) {
+      metric.visit(visitor);
+    }
+
+    verify(visitor).counter(c1.capture(), eq(1));
+    assertEquals("c1 name", "c1", c1.getValue().name());
+    assertEquals("c1 description", "int counter", c1.getValue().description());
+    verify(visitor).counter(c2.capture(), eq(2L));
+    assertEquals("c2 name", "c2", c2.getValue().name());
+    assertEquals("c2 description", "long counter", c2.getValue().description());
+    verify(visitor).gauge(g1.capture(), eq(5));
+    assertEquals("g1 name", "g1", g1.getValue().name());
+    assertEquals("g1 description", "int gauge", g1.getValue().description());
+    verify(visitor).gauge(g2.capture(), eq(6L));
+    assertEquals("g2 name", "g2", g2.getValue().name());
+    assertEquals("g2 description", "long gauge", g2.getValue().description());
+    verify(visitor).gauge(g3.capture(), eq(7f));
+    assertEquals("g3 name", "g3", g3.getValue().name());
+    assertEquals("g3 description", "float gauge", g3.getValue().description());
+    verify(visitor).gauge(g4.capture(), eq(8d));
+    assertEquals("g4 name", "g4", g4.getValue().name());
+    assertEquals("g4 description", "double gauge", g4.getValue().description());
+  }
+
+}

Added: hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/TestSinkQueue.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/TestSinkQueue.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/TestSinkQueue.java (added)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/impl/TestSinkQueue.java Fri May  6 07:28:43 2011
@@ -0,0 +1,267 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import java.util.ConcurrentModificationException;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import static org.apache.hadoop.metrics2.impl.SinkQueue.*;
+
+/**
+ * Test the half-blocking metrics sink queue
+ */
+public class TestSinkQueue {
+  private final Log LOG = LogFactory.getLog(TestSinkQueue.class);
+
+  /**
+   * Test common use case
+   * @throws Exception
+   */
+  @Test public void testCommon() throws Exception {
+    final SinkQueue<Integer> q = new SinkQueue<Integer>(2);
+    q.enqueue(1);
+    assertEquals("queue front", 1, (int) q.front());
+    assertEquals("queue back", 1, (int) q.back());
+    assertEquals("element", 1, (int) q.dequeue());
+
+    assertTrue("should enqueue", q.enqueue(2));
+    q.consume(new Consumer<Integer>() {
+      @Override public void consume(Integer e) {
+        assertEquals("element", 2, (int) e);
+      }
+    });
+    assertTrue("should enqueue", q.enqueue(3));
+    assertEquals("element", 3, (int) q.dequeue());
+    assertEquals("queue size", 0, q.size());
+    assertEquals("queue front", null, q.front());
+    assertEquals("queue back", null, q.back());
+  }
+
+  /**
+   * Test blocking when queue is empty
+   * @throws Exception
+   */
+  @Test public void testEmptyBlocking() throws Exception {
+    final SinkQueue<Integer> q = new SinkQueue<Integer>(2);
+    final Runnable trigger = mock(Runnable.class);
+    // try consuming emtpy equeue and blocking
+    Thread t = new Thread() {
+      @Override public void run() {
+        try {
+          assertEquals("element", 1, (int) q.dequeue());
+          q.consume(new Consumer<Integer>() {
+            @Override public void consume(Integer e) {
+              assertEquals("element", 2, (int) e);
+              trigger.run();
+            }
+          });
+        }
+        catch (InterruptedException e) {
+          LOG.warn("Interrupted", e);
+        }
+      }
+    };
+    t.start();
+    Thread.yield(); // Let the other block
+    q.enqueue(1);
+    q.enqueue(2);
+    t.join();
+    verify(trigger).run();
+  }
+
+  /**
+   * Test nonblocking enqueue when queue is full
+   * @throws Exception
+   */
+  @Test public void testFull() throws Exception {
+    final SinkQueue<Integer> q = new SinkQueue<Integer>(1);
+    q.enqueue(1);
+
+    assertTrue("should drop", !q.enqueue(2));
+    assertEquals("element", 1, (int) q.dequeue());
+
+    q.enqueue(3);
+    q.consume(new Consumer<Integer>() {
+      @Override public void consume(Integer e) {
+        assertEquals("element", 3, (int) e);
+      }
+    });
+    assertEquals("queue size", 0, q.size());
+  }
+
+  /**
+   * Test the consumeAll method
+   * @throws Exception
+   */
+  @Test public void testConsumeAll() throws Exception {
+    final int capacity = 64;  // arbitrary
+    final SinkQueue<Integer> q = new SinkQueue<Integer>(capacity);
+
+    for (int i = 0; i < capacity; ++i) {
+      assertTrue("should enqueue", q.enqueue(i));
+    }
+    assertTrue("should not enqueue", !q.enqueue(capacity));
+
+    final Runnable trigger = mock(Runnable.class);
+    q.consumeAll(new Consumer<Integer>() {
+      private int expected = 0;
+      @Override public void consume(Integer e) {
+        assertEquals("element", expected++, (int) e);
+        trigger.run();
+      }
+    });
+
+    verify(trigger, times(capacity)).run();
+  }
+
+  /**
+   * Test the consumer throwing exceptions
+   * @throws Exception
+   */
+  @Test public void testConsumerException() throws Exception {
+    final SinkQueue<Integer> q = new SinkQueue<Integer>(1);
+    final RuntimeException ex = new RuntimeException("expected");
+    q.enqueue(1);
+
+    try {
+      q.consume(new Consumer<Integer>() {
+        @Override public void consume(Integer e) {
+          throw ex;
+        }
+      });
+    }
+    catch (Exception expected) {
+      assertSame("consumer exception", ex, expected);
+    }
+    // The queue should be in consistent state after exception
+    assertEquals("queue size", 1, q.size());
+    assertEquals("element", 1, (int) q.dequeue());
+  }
+
+  /**
+   * Test the clear method
+   */
+  @Test public void testClear() {
+    final SinkQueue<Integer> q = new SinkQueue<Integer>(128);
+    for (int i = 0; i < q.capacity() + 97; ++i) {
+      q.enqueue(i);
+    }
+    assertEquals("queue size", q.capacity(), q.size());
+    q.clear();
+    assertEquals("queue size", 0, q.size());
+  }
+
+  /**
+   * Test consumers that take their time.
+   * @throws Exception
+   */
+  @Test public void testHangingConsumer() throws Exception {
+    SinkQueue<Integer> q = newSleepingConsumerQueue(2, 1, 2);
+    assertEquals("queue back", 2, (int) q.back());
+    assertTrue("should drop", !q.enqueue(3)); // should not block
+    assertEquals("queue size", 2, q.size());
+    assertEquals("queue head", 1, (int) q.front());
+    assertEquals("queue back", 2, (int) q.back());
+  }
+
+  /**
+   * Test concurrent consumer access, which is illegal
+   * @throws Exception
+   */
+  @Test public void testConcurrentConsumers() throws Exception {
+    final SinkQueue<Integer> q = newSleepingConsumerQueue(2, 1);
+    assertTrue("should enqueue", q.enqueue(2));
+    assertEquals("queue back", 2, (int) q.back());
+    assertTrue("should drop", !q.enqueue(3)); // should not block
+    shouldThrowCME(new Fun() {
+      @Override public void run() {
+        q.clear();
+      }
+    });
+    shouldThrowCME(new Fun() {
+      @Override public void run() throws Exception {
+        q.consume(null);
+      }
+    });
+    shouldThrowCME(new Fun() {
+      @Override public void run() throws Exception {
+        q.consumeAll(null);
+      }
+    });
+    shouldThrowCME(new Fun() {
+      @Override public void run() throws Exception {
+        q.dequeue();
+      }
+    });
+    // The queue should still be in consistent state after all the exceptions
+    assertEquals("queue size", 2, q.size());
+    assertEquals("queue front", 1, (int) q.front());
+    assertEquals("queue back", 2, (int) q.back());
+  }
+
+  private void shouldThrowCME(Fun callback) throws Exception {
+    try {
+      callback.run();
+    }
+    catch (ConcurrentModificationException e) {
+      LOG.info(e);
+      return;
+    }
+    fail("should've thrown");
+  }
+
+  private SinkQueue<Integer> newSleepingConsumerQueue(int capacity,
+                                                      int... values) {
+    final SinkQueue<Integer> q = new SinkQueue<Integer>(capacity);
+    for (int i : values) {
+      q.enqueue(i);
+    }
+    Thread t = new Thread() {
+      @Override public void run() {
+        try {
+          q.consume(new Consumer<Integer>() {
+            @Override
+            public void consume(Integer e) throws InterruptedException {
+              LOG.info("sleeping");
+              Thread.sleep(1000 * 86400); // a long time
+            }
+          });
+        }
+        catch (InterruptedException ex) {
+          LOG.warn("Interrupted", ex);
+        }
+      }
+    };
+    t.setName("Sleeping consumer");
+    t.setDaemon(true);  // so jvm can exit
+    t.start();
+    Thread.yield(); // Let the consumer consume
+    LOG.debug("Returning new sleeping consumer queue");
+    return q;
+  }
+
+  static interface Fun {
+    void run() throws Exception;
+  }
+}

Added: hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/lib/TestInterns.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/lib/TestInterns.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/lib/TestInterns.java (added)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/lib/TestInterns.java Fri May  6 07:28:43 2011
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.lib;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+import org.apache.hadoop.metrics2.MetricsInfo;
+import org.apache.hadoop.metrics2.MetricsTag;
+import static org.apache.hadoop.metrics2.lib.Interns.*;
+
+public class TestInterns {
+
+  @Test public void testInfo() {
+    MetricsInfo info = info("m", "m desc");
+    assertSame("same info", info, info("m", "m desc"));
+  }
+
+  @Test public void testTag() {
+    MetricsTag tag = tag("t", "t desc", "t value");
+    assertSame("same tag", tag, tag("t", "t desc", "t value"));
+  }
+
+  @Test public void testInfoOverflow() {
+    MetricsInfo i0 = info("m0", "m desc");
+    for (int i = 0; i < MAX_INFO_NAMES + 1; ++i) {
+      info("m"+ i, "m desc");
+      if (i < MAX_INFO_NAMES) {
+        assertSame("m0 is still there", i0, info("m0", "m desc"));
+      }
+    }
+    assertNotSame("m0 is gone", i0, info("m0", "m desc"));
+
+    MetricsInfo i1 = info("m1", "m desc");
+    for (int i = 0; i < MAX_INFO_DESCS; ++i) {
+      info("m1", "m desc"+ i);
+      if (i < MAX_INFO_DESCS - 1) {
+        assertSame("i1 is still there", i1, info("m1", "m desc"));
+      }
+    }
+    assertNotSame("i1 is gone", i1,  info("m1", "m desc"));
+  }
+
+  @Test public void testTagOverflow() {
+    MetricsTag t0 = tag("t0", "t desc", "t value");
+    for (int i = 0; i < MAX_TAG_NAMES + 1; ++i) {
+      tag("t"+ i, "t desc", "t value");
+      if (i < MAX_TAG_NAMES) {
+        assertSame("t0 still there", t0, tag("t0", "t desc", "t value"));
+      }
+    }
+    assertNotSame("t0 is gone", t0, tag("t0", "t desc", "t value"));
+
+    MetricsTag t1 = tag("t1", "t desc", "t value");
+    for (int i = 0; i < MAX_TAG_VALUES; ++i) {
+      tag("t1", "t desc", "t value"+ i);
+      if (i < MAX_TAG_VALUES -1) {
+        assertSame("t1 is still there", t1, tag("t1", "t desc", "t value"));
+      }
+    }
+    assertNotSame("t1 is gone", t1, tag("t1", "t desc", "t value"));
+  }
+}

Added: hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/lib/TestMetricsAnnotations.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/lib/TestMetricsAnnotations.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/lib/TestMetricsAnnotations.java (added)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/lib/TestMetricsAnnotations.java Fri May  6 07:28:43 2011
@@ -0,0 +1,202 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.lib;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
+import static org.apache.hadoop.test.MockitoMaker.*;
+
+import org.apache.hadoop.metrics2.MetricsCollector;
+import org.apache.hadoop.metrics2.MetricsException;
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import org.apache.hadoop.metrics2.MetricsSource;
+import org.apache.hadoop.metrics2.annotation.Metric;
+import org.apache.hadoop.metrics2.annotation.Metric.*;
+import org.apache.hadoop.metrics2.annotation.Metrics;
+import org.apache.hadoop.metrics2.impl.MsInfo;
+import static org.apache.hadoop.metrics2.lib.Interns.*;
+import static org.apache.hadoop.test.MetricsAsserts.*;
+
+public class TestMetricsAnnotations {
+
+  static class MyMetrics {
+    @Metric MutableCounterInt c1;
+    @Metric({"Counter2", "Counter2 desc"}) MutableCounterLong c2;
+    @Metric MutableGaugeInt g1, g2;
+    @Metric("g3 desc") MutableGaugeLong g3;
+    @Metric MutableRate r1;
+    @Metric MutableStat s1;
+    @Metric MutableRates rs1;
+  }
+
+  @Test public void testFields() {
+    MyMetrics metrics = new MyMetrics();
+    MetricsSource source = MetricsAnnotations.makeSource(metrics);
+
+    metrics.c1.incr();
+    metrics.c2.incr();
+    metrics.g1.incr();
+    metrics.g2.incr();
+    metrics.g3.incr();
+    metrics.r1.add(1);
+    metrics.s1.add(1);
+    metrics.rs1.add("rs1", 1);
+
+    MetricsRecordBuilder rb = getMetrics(source);
+
+    verify(rb).addCounter(info("C1", "C1"), 1);
+    verify(rb).addCounter(info("Counter2", "Counter2 desc"), 1L);
+    verify(rb).addGauge(info("G1", "G1"), 1);
+    verify(rb).addGauge(info("G2", "G2"), 1);
+    verify(rb).addGauge(info("G3", "g3 desc"), 1L);
+    verify(rb).addCounter(info("R1NumOps", "Number of ops for r1"), 1L);
+    verify(rb).addGauge(info("R1AvgTime", "Average time for r1"), 1.0);
+    verify(rb).addCounter(info("S1NumOps", "Number of ops for s1"), 1L);
+    verify(rb).addGauge(info("S1AvgTime", "Average time for s1"), 1.0);
+    verify(rb).addCounter(info("Rs1NumOps", "Number of ops for rs1"), 1L);
+    verify(rb).addGauge(info("Rs1AvgTime", "Average time for rs1"), 1.0);
+  }
+
+  static class BadMetrics {
+    @Metric Integer i0;
+  }
+
+  @Test(expected=MetricsException.class) public void testBadFields() {
+    MetricsAnnotations.makeSource(new BadMetrics());
+  }
+
+  static class MyMetrics2 {
+    @Metric int getG1() { return 1; }
+    @Metric long getG2() { return 2; }
+    @Metric float getG3() { return 3; }
+    @Metric double getG4() { return 4; }
+    @Metric(type=Type.COUNTER) int getC1() { return 1; }
+    @Metric(type=Type.COUNTER) long getC2() { return 2; }
+    @Metric(type=Type.TAG) String getT1() { return "t1"; }
+  }
+
+  @Test public void testMethods() {
+    MyMetrics2 metrics = new MyMetrics2();
+    MetricsSource source = MetricsAnnotations.makeSource(metrics);
+    MetricsRecordBuilder rb = getMetrics(source);
+
+    verify(rb).addGauge(info("G1", "G1"), 1);
+    verify(rb).addGauge(info("G2", "G2"), 2L);
+    verify(rb).addGauge(info("G3", "G3"), 3.0f);
+    verify(rb).addGauge(info("G4", "G4"), 4.0);
+    verify(rb).addCounter(info("C1", "C1"), 1);
+    verify(rb).addCounter(info("C2", "C2"), 2L);
+    verify(rb).tag(info("T1", "T1"), "t1");
+  }
+
+  static class BadMetrics2 {
+    @Metric int foo(int i) { return i; }
+  }
+
+  @Test(expected=IllegalArgumentException.class)
+  public void testBadMethodWithArgs() {
+    MetricsAnnotations.makeSource(new BadMetrics2());
+  }
+
+  static class BadMetrics3 {
+    @Metric boolean foo() { return true; }
+  }
+
+  @Test(expected=MetricsException.class)
+  public void testBadMethodReturnType() {
+    MetricsAnnotations.makeSource(new BadMetrics3());
+  }
+
+  @Metrics(about="My metrics", context="foo")
+  static class MyMetrics3 {
+    @Metric int getG1() { return 1; }
+  }
+
+  @Test public void testClasses() {
+    MetricsRecordBuilder rb = getMetrics(
+        MetricsAnnotations.makeSource(new MyMetrics3()));
+    MetricsCollector collector = rb.parent();
+
+    verify(collector).addRecord(info("MyMetrics3", "My metrics"));
+    verify(rb).add(tag(MsInfo.Context, "foo"));
+  }
+
+  static class HybridMetrics implements MetricsSource {
+    final MetricsRegistry registry = new MetricsRegistry("HybridMetrics")
+        .setContext("hybrid");
+    @Metric("C0 desc") MutableCounterInt C0;
+
+    @Metric int getG0() { return 0; }
+
+    public void getMetrics(MetricsCollector collector, boolean all) {
+      collector.addRecord("foo")
+                  .setContext("foocontext")
+                  .addCounter(info("C1", "C1 desc"), 1)
+                  .endRecord()
+               .addRecord("bar")
+                  .setContext("barcontext")
+                  .addGauge(info("G1", "G1 desc"), 1);
+      registry.snapshot(collector.addRecord(registry.info()), all);
+    }
+  }
+
+  @Test public void testHybrid() {
+    HybridMetrics metrics = new HybridMetrics();
+    MetricsSource source = MetricsAnnotations.makeSource(metrics);
+
+    assertSame(metrics, source);
+    metrics.C0.incr();
+    MetricsRecordBuilder rb = getMetrics(source);
+    MetricsCollector collector = rb.parent();
+
+    verify(collector).addRecord("foo");
+    verify(collector).addRecord("bar");
+    verify(collector).addRecord(info("HybridMetrics", "HybridMetrics"));
+    verify(rb).setContext("foocontext");
+    verify(rb).addCounter(info("C1", "C1 desc"), 1);
+    verify(rb).setContext("barcontext");
+    verify(rb).addGauge(info("G1", "G1 desc"), 1);
+    verify(rb).add(tag(MsInfo.Context, "hybrid"));
+    verify(rb).addCounter(info("C0", "C0 desc"), 1);
+    verify(rb).addGauge(info("G0", "G0"), 0);
+  }
+
+  @Metrics(context="hybrid")
+  static class BadHybridMetrics implements MetricsSource {
+
+    @Metric MutableCounterInt c1;
+
+    public void getMetrics(MetricsCollector collector, boolean all) {
+      collector.addRecord("foo");
+    }
+  }
+
+  @Test(expected=MetricsException.class) public void testBadHybrid() {
+    MetricsAnnotations.makeSource(new BadHybridMetrics());
+  }
+
+  static class EmptyMetrics {
+    int foo;
+  }
+
+  @Test(expected=MetricsException.class) public void testEmptyMetrics() {
+    MetricsAnnotations.makeSource(new EmptyMetrics());
+  }
+}

Added: hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/lib/TestMetricsRegistry.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/lib/TestMetricsRegistry.java?rev=1100113&view=auto
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/lib/TestMetricsRegistry.java (added)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/metrics2/lib/TestMetricsRegistry.java Fri May  6 07:28:43 2011
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.lib;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
+
+import org.apache.hadoop.metrics2.MetricsException;
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import static org.apache.hadoop.metrics2.lib.Interns.*;
+import static org.apache.hadoop.test.MetricsAsserts.*;
+
+/**
+ * Test the metric registry class
+ */
+public class TestMetricsRegistry {
+
+  /**
+   * Test various factory methods
+   */
+  @Test public void testNewMetrics() {
+    final MetricsRegistry r = new MetricsRegistry("test");
+    r.newCounter("c1", "c1 desc", 1);
+    r.newCounter("c2", "c2 desc", 2L);
+    r.newGauge("g1", "g1 desc", 3);
+    r.newGauge("g2", "g2 desc", 4L);
+    r.newStat("s1", "s1 desc", "ops", "time");
+
+    assertEquals("num metrics in registry", 5, r.metrics().size());
+    assertTrue("c1 found", r.get("c1") instanceof MutableCounterInt);
+    assertTrue("c2 found", r.get("c2") instanceof MutableCounterLong);
+    assertTrue("g1 found", r.get("g1") instanceof MutableGaugeInt);
+    assertTrue("g2 found", r.get("g2") instanceof MutableGaugeLong);
+    assertTrue("s1 found", r.get("s1") instanceof MutableStat);
+
+    expectMetricsException("Metric name c1 already exists", new Runnable() {
+      public void run() { r.newCounter("c1", "test dup", 0); }
+    });
+  }
+
+  /**
+   * Test the add by name method
+   */
+  @Test public void testAddByName() {
+    MetricsRecordBuilder rb = mockMetricsRecordBuilder();
+    final MetricsRegistry r = new MetricsRegistry("test");
+    r.add("s1", 42);
+    r.get("s1").snapshot(rb);
+    verify(rb).addCounter(info("S1NumOps", "Number of ops for s1"), 1L);
+    verify(rb).addGauge(info("S1AvgTime", "Average time for s1"), 42.0);
+
+    r.newCounter("c1", "test add", 1);
+    r.newGauge("g1", "test add", 1);
+
+    expectMetricsException("Unsupported add", new Runnable() {
+      public void run() { r.add("c1", 42); }
+    });
+
+    expectMetricsException("Unsupported add", new Runnable() {
+      public void run() { r.add("g1", 42); }
+    });
+  }
+
+  private void expectMetricsException(String prefix, Runnable fun) {
+    try {
+      fun.run();
+    }
+    catch (MetricsException e) {
+      assertTrue("expected exception", e.getMessage().startsWith(prefix));
+      return;
+    }
+    fail("should've thrown '"+ prefix +"...'");
+  }
+}