You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by an...@apache.org on 2015/12/15 21:12:58 UTC

hbase git commit: HBASE-14534 Bump yammer/coda/dropwizard metrics dependency version

Repository: hbase
Updated Branches:
  refs/heads/master 3e2606316 -> abe30b52a


HBASE-14534 Bump yammer/coda/dropwizard metrics dependency version


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/abe30b52
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/abe30b52
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/abe30b52

Branch: refs/heads/master
Commit: abe30b52a8036078f833dc5b3d2b03daa2e93dfc
Parents: 3e26063
Author: Mikhail Antonov <an...@apache.org>
Authored: Tue Dec 15 12:11:27 2015 -0800
Committer: Mikhail Antonov <an...@apache.org>
Committed: Tue Dec 15 12:11:27 2015 -0800

----------------------------------------------------------------------
 hbase-client/pom.xml                            |   2 +-
 .../hadoop/hbase/client/MetricsConnection.java  |  87 ++++-----
 .../hbase/client/TestMetricsConnection.java     |   6 +-
 hbase-hadoop2-compat/pom.xml                    |   2 +-
 .../hadoop/metrics2/lib/MutableHistogram.java   |  16 +-
 hbase-it/pom.xml                                |   2 +-
 .../hbase/IntegrationTestRegionReplicaPerf.java |   6 +-
 .../src/main/resources/supplemental-models.xml  |   2 +-
 hbase-server/pom.xml                            |   2 +-
 .../tmpl/regionserver/BlockCacheViewTmpl.jamon  |   2 +-
 .../tmpl/regionserver/ServerMetricsTmpl.jamon   |   2 +-
 .../hadoop/hbase/io/hfile/AgeSnapshot.java      |  14 +-
 .../hadoop/hbase/io/hfile/BlockCacheUtil.java   |  22 ++-
 .../hadoop/hbase/io/hfile/CacheStats.java       |  10 +-
 .../hbase/io/hfile/HFilePrettyPrinter.java      | 189 +++++++++++++++----
 .../hbase/mapreduce/TableMapReduceUtil.java     |   6 +-
 .../hadoop/hbase/util/YammerHistogramUtils.java |  28 +--
 .../hadoop/hbase/PerformanceEvaluation.java     |  22 +--
 .../hadoop/hbase/TestPerformanceEvaluation.java |  16 +-
 .../hadoop/hbase/client/TestClientPushback.java |  11 +-
 .../hbase/wal/WALPerformanceEvaluation.java     |  38 ++--
 hbase-shaded/pom.xml                            |   4 +-
 hbase-shell/pom.xml                             |   2 +-
 pom.xml                                         |   6 +-
 24 files changed, 311 insertions(+), 186 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-client/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-client/pom.xml b/hbase-client/pom.xml
index 401e28e..ed20a68 100644
--- a/hbase-client/pom.xml
+++ b/hbase-client/pom.xml
@@ -190,7 +190,7 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>com.yammer.metrics</groupId>
+      <groupId>io.dropwizard.metrics</groupId>
       <artifactId>metrics-core</artifactId>
     </dependency>
   </dependencies>

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
index 3863c37..4fdc587 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
@@ -20,12 +20,12 @@ package org.apache.hadoop.hbase.client;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.protobuf.Descriptors.MethodDescriptor;
 import com.google.protobuf.Message;
-import com.yammer.metrics.core.Counter;
-import com.yammer.metrics.core.Histogram;
-import com.yammer.metrics.core.MetricsRegistry;
-import com.yammer.metrics.core.Timer;
-import com.yammer.metrics.reporting.JmxReporter;
-import com.yammer.metrics.util.RatioGauge;
+import com.codahale.metrics.Counter;
+import com.codahale.metrics.Histogram;
+import com.codahale.metrics.MetricRegistry;
+import com.codahale.metrics.Timer;
+import com.codahale.metrics.JmxReporter;
+import com.codahale.metrics.RatioGauge;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
@@ -40,11 +40,13 @@ import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 
+import static com.codahale.metrics.MetricRegistry.name;
+
 /**
  * This class is for maintaining the various connection statistics and publishing them through
  * the metrics interfaces.
  *
- * This class manages its own {@link MetricsRegistry} and {@link JmxReporter} so as to not
+ * This class manages its own {@link MetricRegistry} and {@link JmxReporter} so as to not
  * conflict with other uses of Yammer Metrics within the client application. Instantiating
  * this class implicitly creates and "starts" instances of these classes; be sure to call
  * {@link #shutdown()} to terminate the thread pools they allocate.
@@ -109,18 +111,21 @@ public class MetricsConnection {
     @VisibleForTesting final Histogram reqHist;
     @VisibleForTesting final Histogram respHist;
 
-    private CallTracker(MetricsRegistry registry, String name, String subName, String scope) {
+    private CallTracker(MetricRegistry registry, String name, String subName, String scope) {
       StringBuilder sb = new StringBuilder(CLIENT_SVC).append("_").append(name);
       if (subName != null) {
         sb.append("(").append(subName).append(")");
       }
       this.name = sb.toString();
-      this.callTimer = registry.newTimer(MetricsConnection.class, DRTN_BASE + this.name, scope);
-      this.reqHist = registry.newHistogram(MetricsConnection.class, REQ_BASE + this.name, scope);
-      this.respHist = registry.newHistogram(MetricsConnection.class, RESP_BASE + this.name, scope);
+      this.callTimer = registry.timer(name(MetricsConnection.class,
+        DRTN_BASE + this.name, scope));
+      this.reqHist = registry.histogram(name(MetricsConnection.class,
+        REQ_BASE + this.name, scope));
+      this.respHist = registry.histogram(name(MetricsConnection.class,
+        RESP_BASE + this.name, scope));
     }
 
-    private CallTracker(MetricsRegistry registry, String name, String scope) {
+    private CallTracker(MetricRegistry registry, String name, String scope) {
       this(registry, name, null, scope);
     }
 
@@ -141,12 +146,12 @@ public class MetricsConnection {
     final Histogram memstoreLoadHist;
     final Histogram heapOccupancyHist;
 
-    public RegionStats(MetricsRegistry registry, String name) {
+    public RegionStats(MetricRegistry registry, String name) {
       this.name = name;
-      this.memstoreLoadHist = registry.newHistogram(MetricsConnection.class,
-          MEMLOAD_BASE + this.name);
-      this.heapOccupancyHist = registry.newHistogram(MetricsConnection.class,
-          HEAP_BASE + this.name);
+      this.memstoreLoadHist = registry.histogram(name(MetricsConnection.class,
+          MEMLOAD_BASE + this.name));
+      this.heapOccupancyHist = registry.histogram(name(MetricsConnection.class,
+          HEAP_BASE + this.name));
     }
 
     public void update(ClientProtos.RegionLoadStats regionStatistics) {
@@ -161,10 +166,13 @@ public class MetricsConnection {
     final Counter delayRunners;
     final Histogram delayIntevalHist;
 
-    public RunnerStats(MetricsRegistry registry) {
-      this.normalRunners = registry.newCounter(MetricsConnection.class, "normalRunnersCount");
-      this.delayRunners = registry.newCounter(MetricsConnection.class, "delayRunnersCount");
-      this.delayIntevalHist = registry.newHistogram(MetricsConnection.class, "delayIntervalHist");
+    public RunnerStats(MetricRegistry registry) {
+      this.normalRunners = registry.counter(
+        name(MetricsConnection.class, "normalRunnersCount"));
+      this.delayRunners = registry.counter(
+        name(MetricsConnection.class, "delayRunnersCount"));
+      this.delayIntevalHist = registry.histogram(
+        name(MetricsConnection.class, "delayIntervalHist"));
     }
 
     public void incrNormalRunners() {
@@ -233,19 +241,19 @@ public class MetricsConnection {
    */
   private static final int CONCURRENCY_LEVEL = 256;
 
-  private final MetricsRegistry registry;
+  private final MetricRegistry registry;
   private final JmxReporter reporter;
   private final String scope;
 
   private final NewMetric<Timer> timerFactory = new NewMetric<Timer>() {
     @Override public Timer newMetric(Class<?> clazz, String name, String scope) {
-      return registry.newTimer(clazz, name, scope);
+      return registry.timer(name(clazz, name, scope));
     }
   };
 
   private final NewMetric<Histogram> histogramFactory = new NewMetric<Histogram>() {
     @Override public Histogram newMetric(Class<?> clazz, String name, String scope) {
-      return registry.newHistogram(clazz, name, scope);
+      return registry.histogram(name(clazz, name, scope));
     }
   };
 
@@ -275,30 +283,26 @@ public class MetricsConnection {
 
   public MetricsConnection(final ConnectionImplementation conn) {
     this.scope = conn.toString();
-    this.registry = new MetricsRegistry();
+    this.registry = new MetricRegistry();
     final ThreadPoolExecutor batchPool = (ThreadPoolExecutor) conn.getCurrentBatchPool();
     final ThreadPoolExecutor metaPool = (ThreadPoolExecutor) conn.getCurrentMetaLookupPool();
 
-    this.registry.newGauge(this.getClass(), "executorPoolActiveThreads", scope,
+    this.registry.register(name(this.getClass(), "executorPoolActiveThreads", scope),
         new RatioGauge() {
-          @Override protected double getNumerator() {
-            return batchPool.getActiveCount();
-          }
-          @Override protected double getDenominator() {
-            return batchPool.getMaximumPoolSize();
+          @Override
+          protected Ratio getRatio() {
+            return Ratio.of(batchPool.getActiveCount(), batchPool.getMaximumPoolSize());
           }
         });
-    this.registry.newGauge(this.getClass(), "metaPoolActiveThreads", scope,
+    this.registry.register(name(this.getClass(), "metaPoolActiveThreads", scope),
         new RatioGauge() {
-          @Override protected double getNumerator() {
-            return metaPool.getActiveCount();
-          }
-          @Override protected double getDenominator() {
-            return metaPool.getMaximumPoolSize();
+          @Override
+          protected Ratio getRatio() {
+            return Ratio.of(metaPool.getActiveCount(), metaPool.getMaximumPoolSize());
           }
         });
-    this.metaCacheHits = registry.newCounter(this.getClass(), "metaCacheHits", scope);
-    this.metaCacheMisses = registry.newCounter(this.getClass(), "metaCacheMisses", scope);
+    this.metaCacheHits = registry.counter(name(this.getClass(), "metaCacheHits", scope));
+    this.metaCacheMisses = registry.counter(name(this.getClass(), "metaCacheMisses", scope));
     this.getTracker = new CallTracker(this.registry, "Get", scope);
     this.scanTracker = new CallTracker(this.registry, "Scan", scope);
     this.appendTracker = new CallTracker(this.registry, "Mutate", "Append", scope);
@@ -308,13 +312,12 @@ public class MetricsConnection {
     this.multiTracker = new CallTracker(this.registry, "Multi", scope);
     this.runnerStats = new RunnerStats(this.registry);
 
-    this.reporter = new JmxReporter(this.registry);
+    this.reporter = JmxReporter.forRegistry(this.registry).build();
     this.reporter.start();
   }
 
   public void shutdown() {
-    this.reporter.shutdown();
-    this.registry.shutdown();
+    this.reporter.stop();
   }
 
   /** Produce an instance of {@link CallStats} for clients to attach to RPCs. */

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java
index 88a653e..5191880 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java
@@ -112,9 +112,9 @@ public class TestMetricsConnection {
         METRICS.getTracker, METRICS.scanTracker, METRICS.multiTracker, METRICS.appendTracker,
         METRICS.deleteTracker, METRICS.incrementTracker, METRICS.putTracker
     }) {
-      Assert.assertEquals("Failed to invoke callTimer on " + t, loop, t.callTimer.count());
-      Assert.assertEquals("Failed to invoke reqHist on " + t, loop, t.reqHist.count());
-      Assert.assertEquals("Failed to invoke respHist on " + t, loop, t.respHist.count());
+      Assert.assertEquals("Failed to invoke callTimer on " + t, loop, t.callTimer.getCount());
+      Assert.assertEquals("Failed to invoke reqHist on " + t, loop, t.reqHist.getCount());
+      Assert.assertEquals("Failed to invoke respHist on " + t, loop, t.respHist.getCount());
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-hadoop2-compat/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-hadoop2-compat/pom.xml b/hbase-hadoop2-compat/pom.xml
index bae47d9..100a297 100644
--- a/hbase-hadoop2-compat/pom.xml
+++ b/hbase-hadoop2-compat/pom.xml
@@ -182,7 +182,7 @@ limitations under the License.
       <version>${hadoop-two.version}</version>
     </dependency>
     <dependency>
-      <groupId>com.yammer.metrics</groupId>
+      <groupId>io.dropwizard.metrics</groupId>
       <artifactId>metrics-core</artifactId>
     </dependency>
     <dependency>

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableHistogram.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableHistogram.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableHistogram.java
index de29940..c7ff940 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableHistogram.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableHistogram.java
@@ -26,9 +26,9 @@ import org.apache.hadoop.metrics2.MetricHistogram;
 import org.apache.hadoop.metrics2.MetricsInfo;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 
-import com.yammer.metrics.stats.ExponentiallyDecayingSample;
-import com.yammer.metrics.stats.Sample;
-import com.yammer.metrics.stats.Snapshot;
+import com.codahale.metrics.ExponentiallyDecayingReservoir;
+import com.codahale.metrics.Reservoir;
+import com.codahale.metrics.Snapshot;
 
 /**
  * A histogram implementation that runs in constant space, and exports to hadoop2's metrics2 system.
@@ -43,7 +43,7 @@ public class MutableHistogram extends MutableMetric implements MetricHistogram {
 
   protected final String name;
   protected final String desc;
-  private final Sample sample;
+  private final Reservoir reservoir;
   private final AtomicLong min;
   private final AtomicLong max;
   private final AtomicLong sum;
@@ -56,7 +56,7 @@ public class MutableHistogram extends MutableMetric implements MetricHistogram {
   public MutableHistogram(String name, String description) {
     this.name = StringUtils.capitalize(name);
     this.desc = StringUtils.uncapitalize(description);
-    sample = new ExponentiallyDecayingSample(DEFAULT_SAMPLE_SIZE, DEFAULT_ALPHA);
+    reservoir = new ExponentiallyDecayingReservoir(DEFAULT_SAMPLE_SIZE, DEFAULT_ALPHA);
     count = new AtomicLong();
     min = new AtomicLong(Long.MAX_VALUE);
     max = new AtomicLong(Long.MIN_VALUE);
@@ -66,7 +66,7 @@ public class MutableHistogram extends MutableMetric implements MetricHistogram {
   public void add(final long val) {
     setChanged();
     count.incrementAndGet();
-    sample.update(val);
+    reservoir.update(val);
     setMax(val);
     setMin(val);
     sum.getAndAdd(val);
@@ -119,9 +119,9 @@ public class MutableHistogram extends MutableMetric implements MetricHistogram {
       updateSnapshotMetrics(metricsRecordBuilder);
     }
   }
-  
+
   public void updateSnapshotMetrics(MetricsRecordBuilder metricsRecordBuilder) {
-      final Snapshot s = sample.getSnapshot();
+      final Snapshot s = reservoir.getSnapshot();
       metricsRecordBuilder.addCounter(Interns.info(name + NUM_OPS_METRIC_NAME, desc), count.get());
 
       metricsRecordBuilder.addGauge(Interns.info(name + MIN_METRIC_NAME, desc), getMin());

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-it/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml
index c36be34..92243f0 100644
--- a/hbase-it/pom.xml
+++ b/hbase-it/pom.xml
@@ -234,7 +234,7 @@
       <version>${jersey.version}</version>
     </dependency>
     <dependency>
-      <groupId>com.yammer.metrics</groupId>
+      <groupId>io.dropwizard.metrics</groupId>
       <artifactId>metrics-core</artifactId>
     </dependency>
     <dependency>

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java
index d05e039..db52c78 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hbase;
 
 import com.google.common.base.Objects;
 import com.google.common.collect.Sets;
-import com.yammer.metrics.core.Histogram;
+import com.codahale.metrics.Histogram;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -73,12 +73,12 @@ public class IntegrationTestRegionReplicaPerf extends IntegrationTestBase {
   private static final String NUM_RS_KEY = "numRs";
   private static final String NUM_RS_DEFAULT = "" + 3;
 
-  /** Extract a descriptive statistic from a {@link com.yammer.metrics.core.Histogram}. */
+  /** Extract a descriptive statistic from a {@link com.codahale.metrics.Histogram}. */
   private enum Stat {
     STDEV {
       @Override
       double apply(Histogram hist) {
-        return hist.stdDev();
+        return hist.getSnapshot().getStdDev();
       }
     },
     FOUR_9S {

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-resource-bundle/src/main/resources/supplemental-models.xml
----------------------------------------------------------------------
diff --git a/hbase-resource-bundle/src/main/resources/supplemental-models.xml b/hbase-resource-bundle/src/main/resources/supplemental-models.xml
index d01f188..2f94226 100644
--- a/hbase-resource-bundle/src/main/resources/supplemental-models.xml
+++ b/hbase-resource-bundle/src/main/resources/supplemental-models.xml
@@ -219,7 +219,7 @@ under the License.
   </supplement>
   <supplement>
     <project>
-      <groupId>com.yammer.metrics</groupId>
+      <groupId>io.dropwizard.metrics</groupId>
       <artifactId>metrics-core</artifactId>
 
       <licenses>

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-server/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index 94f0b72..26aad71 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -435,7 +435,7 @@
        <optional>true</optional>
     </dependency>
     <dependency>
-      <groupId>com.yammer.metrics</groupId>
+      <groupId>io.dropwizard.metrics</groupId>
       <artifactId>metrics-core</artifactId>
     </dependency>
     <dependency>

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheViewTmpl.jamon
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheViewTmpl.jamon b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheViewTmpl.jamon
index 523d1b9..c23cf75 100644
--- a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheViewTmpl.jamon
+++ b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheViewTmpl.jamon
@@ -37,7 +37,7 @@ org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
 org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator;
 org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator.Bucket;
 org.apache.hadoop.util.StringUtils;
-com.yammer.metrics.stats.Snapshot;
+com.codahale.metrics.Snapshot;
 </%import>
 <%java>
   BlockCache bc = cacheConfig == null ? null : cacheConfig.getBlockCache();

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/ServerMetricsTmpl.jamon
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/ServerMetricsTmpl.jamon b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/ServerMetricsTmpl.jamon
index 113bee1..13ccc3b 100644
--- a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/ServerMetricsTmpl.jamon
+++ b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/ServerMetricsTmpl.jamon
@@ -32,7 +32,7 @@ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo;
 org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionLoad;
 org.apache.hadoop.hbase.util.DirectMemoryUtils;
 org.apache.hadoop.util.StringUtils;
-com.yammer.metrics.stats.Snapshot;
+com.codahale.metrics.Snapshot;
 java.lang.management.ManagementFactory;
 </%import>
 <div class="tabbable">

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AgeSnapshot.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AgeSnapshot.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AgeSnapshot.java
index 24a4e32..79acec0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AgeSnapshot.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AgeSnapshot.java
@@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.io.hfile;
 
 import org.codehaus.jackson.annotate.JsonIgnoreProperties;
 
-import com.yammer.metrics.core.Histogram;
-import com.yammer.metrics.stats.Snapshot;
+import com.codahale.metrics.Histogram;
+import com.codahale.metrics.Snapshot;
 
 /**
  * Snapshot of block cache age in cache.
@@ -28,11 +28,9 @@ import com.yammer.metrics.stats.Snapshot;
  */
 @JsonIgnoreProperties({"ageHistogram", "snapshot"})
 public class AgeSnapshot {
-  private final Histogram ageHistogram;
   private final Snapshot snapshot;
 
   AgeSnapshot(final Histogram ageHistogram) {
-    this.ageHistogram = ageHistogram;
     this.snapshot = ageHistogram.getSnapshot();
   }
 
@@ -57,18 +55,18 @@ public class AgeSnapshot {
   }
 
   public double getMean() {
-    return this.ageHistogram.mean();
+    return this.snapshot.getMean();
   }
 
   public double getMax() {
-    return ageHistogram.max();
+    return snapshot.getMax();
   }
 
   public double getMin() {
-    return ageHistogram.min();
+    return snapshot.getMin();
   }
 
   public double getStdDev() {
-    return ageHistogram.stdDev();
+    return snapshot.getStdDev();
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.java
index 94638da..d81871f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.java
@@ -31,9 +31,11 @@ import org.codehaus.jackson.map.JsonMappingException;
 import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jackson.map.SerializationConfig;
 
-import com.yammer.metrics.core.Histogram;
-import com.yammer.metrics.core.MetricsRegistry;
-import com.yammer.metrics.stats.Snapshot;
+import com.codahale.metrics.Histogram;
+import com.codahale.metrics.MetricRegistry;
+import com.codahale.metrics.Snapshot;
+
+import static com.codahale.metrics.MetricRegistry.name;
 
 /**
  * Utilty for aggregating counts in CachedBlocks and toString/toJSON CachedBlocks and BlockCaches.
@@ -44,7 +46,7 @@ public class BlockCacheUtil {
   /**
    * Needed making histograms.
    */
-  private static final MetricsRegistry METRICS = new MetricsRegistry();
+  private static final MetricRegistry METRICS = new MetricRegistry();
 
   /**
    * Needed generating JSON.
@@ -189,7 +191,7 @@ public class BlockCacheUtil {
     private final long now = System.nanoTime();
     private final int max;
     public static final int DEFAULT_MAX = 100000;
- 
+
     CachedBlocksByFile() {
       this(null);
     }
@@ -204,7 +206,7 @@ public class BlockCacheUtil {
      */
     private NavigableMap<String, NavigableSet<CachedBlock>> cachedBlockByFile =
       new ConcurrentSkipListMap<String, NavigableSet<CachedBlock>>();
-    Histogram age = METRICS.newHistogram(CachedBlocksByFile.class, "age");
+    Histogram age = METRICS.histogram(name(CachedBlocksByFile.class, "age"));
 
     /**
      * @param cb
@@ -274,11 +276,11 @@ public class BlockCacheUtil {
 
     @Override
     public String toString() {
-      Snapshot snapshot = this.age.getSnapshot();
-      return "count=" + count + ", dataBlockCount=" + this.dataBlockCount + ", size=" + size +
+      Snapshot snapshot = age.getSnapshot();
+      return "count=" + count + ", dataBlockCount=" + dataBlockCount + ", size=" + size +
           ", dataSize=" + getDataSize() +
-          ", mean age=" + this.age.mean() + ", stddev age=" + this.age.stdDev() +
-          ", min age=" + this.age.min() + ", max age=" + this.age.max() +
+          ", mean age=" + snapshot.getMean() + ", stddev age=" + snapshot.getStdDev() +
+          ", min age=" + snapshot.getMin() + ", max age=" + snapshot.getMax() +
           ", 95th percentile age=" + snapshot.get95thPercentile() +
           ", 99th percentile age=" + snapshot.get99thPercentile();
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheStats.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheStats.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheStats.java
index fff6585..50e8bbb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheStats.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheStats.java
@@ -22,8 +22,10 @@ import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 
-import com.yammer.metrics.core.Histogram;
-import com.yammer.metrics.core.MetricsRegistry;
+import com.codahale.metrics.Histogram;
+import com.codahale.metrics.MetricRegistry;
+
+import static com.codahale.metrics.MetricRegistry.name;
 
 /**
  * Class that implements cache metrics.
@@ -33,7 +35,7 @@ public class CacheStats {
   /**
    * Needed making histograms.
    */
-  private static final MetricsRegistry METRICS = new MetricsRegistry();
+  private static final MetricRegistry METRICS = new MetricRegistry();
 
   /** Sliding window statistics. The number of metric periods to include in
    * sliding window hit ratio calculations.
@@ -113,7 +115,7 @@ public class CacheStats {
     this.hitCachingCounts = initializeZeros(numPeriodsInWindow);
     this.requestCounts = initializeZeros(numPeriodsInWindow);
     this.requestCachingCounts = initializeZeros(numPeriodsInWindow);
-    this.ageAtEviction = METRICS.newHistogram(CacheStats.class, name + ".ageAtEviction");
+    this.ageAtEviction = METRICS.histogram(name(CacheStats.class, name + ".ageAtEviction"));
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
index 2818d88..86d183b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
@@ -23,6 +23,7 @@ import java.io.ByteArrayOutputStream;
 import java.io.DataInput;
 import java.io.IOException;
 import java.io.PrintStream;
+import java.text.DateFormat;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -32,6 +33,8 @@ import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 import java.util.SortedMap;
+import java.util.TimeZone;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
@@ -41,6 +44,7 @@ import org.apache.commons.cli.OptionGroup;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.commons.cli.PosixParser;
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
@@ -73,12 +77,18 @@ import org.apache.hadoop.hbase.util.Writables;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
-import com.yammer.metrics.core.Histogram;
-import com.yammer.metrics.core.Metric;
-import com.yammer.metrics.core.MetricName;
-import com.yammer.metrics.core.MetricPredicate;
-import com.yammer.metrics.core.MetricsRegistry;
-import com.yammer.metrics.reporting.ConsoleReporter;
+import com.codahale.metrics.Histogram;
+import com.codahale.metrics.Counter;
+import com.codahale.metrics.Gauge;
+import com.codahale.metrics.Meter;
+import com.codahale.metrics.MetricFilter;
+import com.codahale.metrics.MetricRegistry;
+import com.codahale.metrics.ConsoleReporter;
+import com.codahale.metrics.ScheduledReporter;
+import com.codahale.metrics.Snapshot;
+import com.codahale.metrics.Timer;
+
+import static com.codahale.metrics.MetricRegistry.name;
 
 /**
  * Implements pretty-printing functionality for {@link HFile}s.
@@ -544,13 +554,17 @@ public class HFilePrettyPrinter extends Configured implements Tool {
   }
 
   private static class KeyValueStatsCollector {
-    private final MetricsRegistry metricsRegistry = new MetricsRegistry();
+    private final MetricRegistry metricsRegistry = new MetricRegistry();
     private final ByteArrayOutputStream metricsOutput = new ByteArrayOutputStream();
-    private final SimpleReporter simpleReporter = new SimpleReporter(metricsRegistry, new PrintStream(metricsOutput));
-    Histogram keyLen = metricsRegistry.newHistogram(HFilePrettyPrinter.class, "Key length");
-    Histogram valLen = metricsRegistry.newHistogram(HFilePrettyPrinter.class, "Val length");
-    Histogram rowSizeBytes = metricsRegistry.newHistogram(HFilePrettyPrinter.class, "Row size (bytes)");
-    Histogram rowSizeCols = metricsRegistry.newHistogram(HFilePrettyPrinter.class, "Row size (columns)");
+    private final SimpleReporter simpleReporter = SimpleReporter.forRegistry(metricsRegistry).
+        outputTo(new PrintStream(metricsOutput)).filter(MetricFilter.ALL).build();
+
+    Histogram keyLen = metricsRegistry.histogram(name(HFilePrettyPrinter.class, "Key length"));
+    Histogram valLen = metricsRegistry.histogram(name(HFilePrettyPrinter.class, "Val length"));
+    Histogram rowSizeBytes = metricsRegistry.histogram(
+      name(HFilePrettyPrinter.class, "Row size (bytes)"));
+    Histogram rowSizeCols = metricsRegistry.histogram(
+      name(HFilePrettyPrinter.class, "Row size (columns)"));
 
     long curRowBytes = 0;
     long curRowCols = 0;
@@ -600,9 +614,8 @@ public class HFilePrettyPrinter extends Configured implements Tool {
         return "no data available for statistics";
 
       // Dump the metrics to the output stream
-      simpleReporter.shutdown();
-      simpleReporter.run();
-      metricsRegistry.shutdown();
+      simpleReporter.stop();
+      simpleReporter.report();
 
       return
               metricsOutput.toString() +
@@ -610,35 +623,137 @@ public class HFilePrettyPrinter extends Configured implements Tool {
     }
   }
 
-  private static class SimpleReporter extends ConsoleReporter {
-    private final PrintStream out;
+  /**
+   * Almost identical to ConsoleReporter, but extending ScheduledReporter,
+   * as extending ConsoleReporter in this version of dropwizard is now too much trouble.
+   */
+  private static class SimpleReporter extends ScheduledReporter {
+    /**
+     * Returns a new {@link Builder} for {@link ConsoleReporter}.
+     *
+     * @param registry the registry to report
+     * @return a {@link Builder} instance for a {@link ConsoleReporter}
+     */
+    public static Builder forRegistry(MetricRegistry registry) {
+      return new Builder(registry);
+    }
+
+    /**
+     * A builder for {@link SimpleReporter} instances. Defaults to using the default locale and
+     * time zone, writing to {@code System.out}, converting rates to events/second, converting
+     * durations to milliseconds, and not filtering metrics.
+     */
+    public static class Builder {
+      private final MetricRegistry registry;
+      private PrintStream output;
+      private Locale locale;
+      private TimeZone timeZone;
+      private TimeUnit rateUnit;
+      private TimeUnit durationUnit;
+      private MetricFilter filter;
+
+      private Builder(MetricRegistry registry) {
+        this.registry = registry;
+        this.output = System.out;
+        this.locale = Locale.getDefault();
+        this.timeZone = TimeZone.getDefault();
+        this.rateUnit = TimeUnit.SECONDS;
+        this.durationUnit = TimeUnit.MILLISECONDS;
+        this.filter = MetricFilter.ALL;
+      }
+
+      /**
+       * Write to the given {@link PrintStream}.
+       *
+       * @param output a {@link PrintStream} instance.
+       * @return {@code this}
+       */
+      public Builder outputTo(PrintStream output) {
+        this.output = output;
+        return this;
+      }
+
+      /**
+       * Only report metrics which match the given filter.
+       *
+       * @param filter a {@link MetricFilter}
+       * @return {@code this}
+       */
+      public Builder filter(MetricFilter filter) {
+        this.filter = filter;
+        return this;
+      }
+
+      /**
+       * Builds a {@link ConsoleReporter} with the given properties.
+       *
+       * @return a {@link ConsoleReporter}
+       */
+      public SimpleReporter build() {
+        return new SimpleReporter(registry,
+            output,
+            locale,
+            timeZone,
+            rateUnit,
+            durationUnit,
+            filter);
+      }
+    }
+
+    private final PrintStream output;
+    private final Locale locale;
+    private final DateFormat dateFormat;
+
+    private SimpleReporter(MetricRegistry registry,
+                            PrintStream output,
+                            Locale locale,
+                            TimeZone timeZone,
+                            TimeUnit rateUnit,
+                            TimeUnit durationUnit,
+                            MetricFilter filter) {
+      super(registry, "simple-reporter", filter, rateUnit, durationUnit);
+      this.output = output;
+      this.locale = locale;
 
-    public SimpleReporter(MetricsRegistry metricsRegistry, PrintStream out) {
-      super(metricsRegistry, out, MetricPredicate.ALL);
-      this.out = out;
+      this.dateFormat = DateFormat.getDateTimeInstance(DateFormat.SHORT,
+          DateFormat.MEDIUM,
+          locale);
+      dateFormat.setTimeZone(timeZone);
     }
 
     @Override
-    public void run() {
-      for (Map.Entry<String, SortedMap<MetricName, Metric>> entry : getMetricsRegistry().groupedMetrics(
-              MetricPredicate.ALL).entrySet()) {
-        try {
-          for (Map.Entry<MetricName, Metric> subEntry : entry.getValue().entrySet()) {
-            out.print("   " + subEntry.getKey().getName());
-            out.println(':');
-
-            subEntry.getValue().processWith(this, subEntry.getKey(), out);
-          }
-        } catch (Exception e) {
-          e.printStackTrace(out);
+    public void report(SortedMap<String, Gauge> gauges,
+                       SortedMap<String, Counter> counters,
+                       SortedMap<String, Histogram> histograms,
+                       SortedMap<String, Meter> meters,
+                       SortedMap<String, Timer> timers) {
+      // we know we only have histograms
+      if (!histograms.isEmpty()) {
+        for (Map.Entry<String, Histogram> entry : histograms.entrySet()) {
+          output.print("   " + StringUtils.substringAfterLast(entry.getKey(), "."));
+          output.println(':');
+          printHistogram(entry.getValue());
         }
+        output.println();
       }
-    }
 
-    @Override
-    public void processHistogram(MetricName name, Histogram histogram, PrintStream stream) {
-      super.processHistogram(name, histogram, stream);
-      stream.printf(Locale.getDefault(), "             count = %d%n", histogram.count());
+      output.println();
+      output.flush();
+    }
+
+    private void printHistogram(Histogram histogram) {
+      Snapshot snapshot = histogram.getSnapshot();
+      output.printf(locale, "               min = %d%n", snapshot.getMin());
+      output.printf(locale, "               max = %d%n", snapshot.getMax());
+      output.printf(locale, "              mean = %2.2f%n", snapshot.getMean());
+      output.printf(locale, "            stddev = %2.2f%n", snapshot.getStdDev());
+      output.printf(locale, "            median = %2.2f%n", snapshot.getMedian());
+      output.printf(locale, "              75%% <= %2.2f%n", snapshot.get75thPercentile());
+      output.printf(locale, "              95%% <= %2.2f%n", snapshot.get95thPercentile());
+      output.printf(locale, "              98%% <= %2.2f%n", snapshot.get98thPercentile());
+      output.printf(locale, "              99%% <= %2.2f%n", snapshot.get99thPercentile());
+      output.printf(locale, "            99.9%% <= %2.2f%n", snapshot.get999thPercentile());
+      output.printf(locale, "             count = %d%n", histogram.getCount());
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
index a48871f..cf5f7ac 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
@@ -19,7 +19,7 @@
 package org.apache.hadoop.hbase.mapreduce;
 
 import com.google.protobuf.InvalidProtocolBufferException;
-import com.yammer.metrics.core.MetricsRegistry;
+import com.codahale.metrics.MetricRegistry;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -339,7 +339,7 @@ public class TableMapReduceUtil {
 
     if (addDependencyJars) {
       addDependencyJars(job);
-      addDependencyJars(job.getConfiguration(), MetricsRegistry.class);
+      addDependencyJars(job.getConfiguration(), MetricRegistry.class);
     }
 
     resetCacheConfig(job.getConfiguration());
@@ -785,7 +785,7 @@ public class TableMapReduceUtil {
       com.google.protobuf.Message.class,
       com.google.common.collect.Lists.class,
       org.apache.htrace.Trace.class,
-      com.yammer.metrics.core.MetricsRegistry.class);
+      com.codahale.metrics.MetricRegistry.class);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-server/src/main/java/org/apache/hadoop/hbase/util/YammerHistogramUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/YammerHistogramUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/YammerHistogramUtils.java
index 120f170..201bdf2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/YammerHistogramUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/YammerHistogramUtils.java
@@ -18,9 +18,9 @@
  */
 package org.apache.hadoop.hbase.util;
 
-import com.yammer.metrics.core.Histogram;
-import com.yammer.metrics.stats.Sample;
-import com.yammer.metrics.stats.Snapshot;
+import com.codahale.metrics.Histogram;
+import com.codahale.metrics.Reservoir;
+import com.codahale.metrics.Snapshot;
 
 import java.lang.reflect.Constructor;
 import java.text.DecimalFormat;
@@ -37,13 +37,13 @@ public final class YammerHistogramUtils {
   private static DecimalFormat DOUBLE_FORMAT = new DecimalFormat("#0.00");
 
   /**
-   * Create a new {@link com.yammer.metrics.core.Histogram} instance. These constructors are
+   * Create a new {@link com.codahale.metrics.Histogram} instance. These constructors are
    * not public in 2.2.0, so we use reflection to find them.
    */
-  public static Histogram newHistogram(Sample sample) {
+  public static Histogram newHistogram(Reservoir sample) {
     try {
       Constructor<?> ctor =
-          Histogram.class.getDeclaredConstructor(Sample.class);
+          Histogram.class.getDeclaredConstructor(Reservoir.class);
       ctor.setAccessible(true);
       return (Histogram) ctor.newInstance(sample);
     } catch (Exception e) {
@@ -54,10 +54,10 @@ public final class YammerHistogramUtils {
   /** @return an abbreviated summary of {@code hist}. */
   public static String getShortHistogramReport(final Histogram hist) {
     Snapshot sn = hist.getSnapshot();
-    return "mean=" + DOUBLE_FORMAT.format(hist.mean()) +
-        ", min=" + DOUBLE_FORMAT.format(hist.min()) +
-        ", max=" + DOUBLE_FORMAT.format(hist.max()) +
-        ", stdDev=" + DOUBLE_FORMAT.format(hist.stdDev()) +
+    return "mean=" + DOUBLE_FORMAT.format(sn.getMean()) +
+        ", min=" + DOUBLE_FORMAT.format(sn.getMin()) +
+        ", max=" + DOUBLE_FORMAT.format(sn.getMax()) +
+        ", stdDev=" + DOUBLE_FORMAT.format(sn.getStdDev()) +
         ", 95th=" + DOUBLE_FORMAT.format(sn.get95thPercentile()) +
         ", 99th=" + DOUBLE_FORMAT.format(sn.get99thPercentile());
   }
@@ -65,10 +65,10 @@ public final class YammerHistogramUtils {
   /** @return a summary of {@code hist}. */
   public static String getHistogramReport(final Histogram hist) {
     Snapshot sn = hist.getSnapshot();
-    return ", mean=" + DOUBLE_FORMAT.format(hist.mean()) +
-        ", min=" + DOUBLE_FORMAT.format(hist.min()) +
-        ", max=" + DOUBLE_FORMAT.format(hist.max()) +
-        ", stdDev=" + DOUBLE_FORMAT.format(hist.stdDev()) +
+    return ", mean=" + DOUBLE_FORMAT.format(sn.getMean()) +
+        ", min=" + DOUBLE_FORMAT.format(sn.getMin()) +
+        ", max=" + DOUBLE_FORMAT.format(sn.getMax()) +
+        ", stdDev=" + DOUBLE_FORMAT.format(sn.getStdDev()) +
         ", 50th=" + DOUBLE_FORMAT.format(sn.getMedian()) +
         ", 75th=" + DOUBLE_FORMAT.format(sn.get75thPercentile()) +
         ", 95th=" + DOUBLE_FORMAT.format(sn.get95thPercentile()) +

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 33b50d4..30629a3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -93,9 +93,9 @@ import org.apache.htrace.impl.ProbabilitySampler;
 
 import com.google.common.base.Objects;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
-import com.yammer.metrics.core.Histogram;
-import com.yammer.metrics.stats.Snapshot;
-import com.yammer.metrics.stats.UniformSample;
+import com.codahale.metrics.Histogram;
+import com.codahale.metrics.Snapshot;
+import com.codahale.metrics.UniformReservoir;
 
 /**
  * Script used evaluating HBase performance and scalability.  Runs a HBase
@@ -1054,8 +1054,8 @@ public class PerformanceEvaluation extends Configured implements Tool {
         this.connection = ConnectionFactory.createConnection(conf);
       }
       onStartup();
-      latency = YammerHistogramUtils.newHistogram(new UniformSample(1024 * 500));
-      valueSize = YammerHistogramUtils.newHistogram(new UniformSample(1024 * 500));
+      latency = YammerHistogramUtils.newHistogram(new UniformReservoir(1024 * 500));
+      valueSize = YammerHistogramUtils.newHistogram(new UniformReservoir(1024 * 500));
     }
 
     abstract void onStartup() throws IOException;
@@ -1121,21 +1121,21 @@ public class PerformanceEvaluation extends Configured implements Tool {
      */
     private void reportLatency() throws IOException {
       status.setStatus(testName + " latency log (microseconds), on " +
-          latency.count() + " measures");
+          latency.getCount() + " measures");
       reportHistogram(this.latency);
     }
 
     private void reportValueSize() throws IOException {
       status.setStatus(testName + " valueSize after " +
-          valueSize.count() + " measures");
+          valueSize.getCount() + " measures");
       reportHistogram(this.valueSize);
     }
 
     private void reportHistogram(final Histogram h) throws IOException {
       Snapshot sn = h.getSnapshot();
-      status.setStatus(testName + " Min      = " + h.min());
-      status.setStatus(testName + " Avg      = " + h.mean());
-      status.setStatus(testName + " StdDev   = " + h.stdDev());
+      status.setStatus(testName + " Min      = " + sn.getMin());
+      status.setStatus(testName + " Avg      = " + sn.getMean());
+      status.setStatus(testName + " StdDev   = " + sn.getStdDev());
       status.setStatus(testName + " 50th     = " + sn.getMedian());
       status.setStatus(testName + " 75th     = " + sn.get75thPercentile());
       status.setStatus(testName + " 95th     = " + sn.get95thPercentile());
@@ -1143,7 +1143,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
       status.setStatus(testName + " 99.9th   = " + sn.get999thPercentile());
       status.setStatus(testName + " 99.99th  = " + sn.getValue(0.9999));
       status.setStatus(testName + " 99.999th = " + sn.getValue(0.99999));
-      status.setStatus(testName + " Max      = " + h.max());
+      status.setStatus(testName + " Max      = " + sn.getMax());
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
index e35fc08..fd33fe3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
@@ -40,9 +40,9 @@ import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import com.yammer.metrics.core.Histogram;
-import com.yammer.metrics.stats.Snapshot;
-import com.yammer.metrics.stats.UniformSample;
+import com.codahale.metrics.Histogram;
+import com.codahale.metrics.Snapshot;
+import com.codahale.metrics.UniformReservoir;
 
 @Category({MiscTests.class, SmallTests.class})
 public class TestPerformanceEvaluation {
@@ -125,16 +125,16 @@ public class TestPerformanceEvaluation {
     opts.setValueSize(valueSize);
     RandomReadTest rrt = new RandomReadTest(null, opts, null);
     Constructor<?> ctor =
-      Histogram.class.getDeclaredConstructor(com.yammer.metrics.stats.Sample.class);
+      Histogram.class.getDeclaredConstructor(com.codahale.metrics.Reservoir.class);
     ctor.setAccessible(true);
-    Histogram histogram = (Histogram)ctor.newInstance(new UniformSample(1024 * 500));
+    Histogram histogram = (Histogram)ctor.newInstance(new UniformReservoir(1024 * 500));
     for (int i = 0; i < 100; i++) {
       histogram.update(rrt.getValueLength(null));
     }
-    double stddev = histogram.stdDev();
-    assertTrue(stddev != 0 && stddev != 1.0);
-    assertTrue(histogram.stdDev() != 0);
     Snapshot snapshot = histogram.getSnapshot();
+    double stddev = snapshot.getStdDev();
+    assertTrue(stddev != 0 && stddev != 1.0);
+    assertTrue(snapshot.getStdDev() != 0);
     double median = snapshot.getMedian();
     assertTrue(median != 0 && median != 1 && median != valueSize);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java
index 1efbe05..bba38f77 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java
@@ -150,16 +150,17 @@ public class TestClientPushback {
     MetricsConnection.RegionStats rsStats = conn.getConnectionMetrics().
             serverStats.get(server).get(regionName);
     assertEquals(name, rsStats.name);
-    assertEquals(rsStats.heapOccupancyHist.mean(),
+    assertEquals(rsStats.heapOccupancyHist.getSnapshot().getMean(),
         (double)regionStats.getHeapOccupancyPercent(), 0.1 );
-    assertEquals(rsStats.memstoreLoadHist.mean(),
+    assertEquals(rsStats.memstoreLoadHist.getSnapshot().getMean(),
         (double)regionStats.getMemstoreLoadPercent(), 0.1);
 
     MetricsConnection.RunnerStats runnerStats = conn.getConnectionMetrics().runnerStats;
 
-    assertEquals(runnerStats.delayRunners.count(), 1);
-    assertEquals(runnerStats.normalRunners.count(), 1);
-    assertEquals("", runnerStats.delayIntevalHist.mean(), (double)backoffTime, 0.1);
+    assertEquals(runnerStats.delayRunners.getCount(), 1);
+    assertEquals(runnerStats.normalRunners.getCount(), 1);
+    assertEquals("", runnerStats.delayIntevalHist.getSnapshot().getMean(),
+      (double)backoffTime, 0.1);
 
     latch.await(backoffTime * 2, TimeUnit.MILLISECONDS);
     assertNotEquals("AsyncProcess did not submit the work time", endTime.get(), 0);

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
index 7996c17..e138174 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
@@ -62,10 +62,11 @@ import org.apache.htrace.Trace;
 import org.apache.htrace.TraceScope;
 import org.apache.htrace.impl.ProbabilitySampler;
 
-import com.yammer.metrics.core.Histogram;
-import com.yammer.metrics.core.Meter;
-import com.yammer.metrics.core.MetricsRegistry;
-import com.yammer.metrics.reporting.ConsoleReporter;
+import com.codahale.metrics.ConsoleReporter;
+import com.codahale.metrics.Histogram;
+import com.codahale.metrics.Meter;
+import com.codahale.metrics.MetricRegistry;
+import com.codahale.metrics.MetricFilter;
 
 // imports for things that haven't moved from regionserver.wal yet.
 import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogReader;
@@ -73,6 +74,8 @@ import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogWriter;
 import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
 import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
 
+import static com.codahale.metrics.MetricRegistry.name;
+
 /**
  * This class runs performance benchmarks for {@link WAL}.
  * See usage for this tool by running:
@@ -81,20 +84,18 @@ import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
 @InterfaceAudience.Private
 public final class WALPerformanceEvaluation extends Configured implements Tool {
   private static final Log LOG = LogFactory.getLog(WALPerformanceEvaluation.class.getName());
-  private final MetricsRegistry metrics = new MetricsRegistry();
+  private final MetricRegistry metrics = new MetricRegistry();
   private final Meter syncMeter =
-    metrics.newMeter(WALPerformanceEvaluation.class, "syncMeter", "syncs", TimeUnit.MILLISECONDS);
-  private final Histogram syncHistogram =
-    metrics.newHistogram(WALPerformanceEvaluation.class, "syncHistogram", "nanos-between-syncs",
-      true);
-  private final Histogram syncCountHistogram =
-      metrics.newHistogram(WALPerformanceEvaluation.class, "syncCountHistogram", "countPerSync",
-        true);
-  private final Meter appendMeter =
-    metrics.newMeter(WALPerformanceEvaluation.class, "appendMeter", "bytes",
-      TimeUnit.MILLISECONDS);
+    metrics.meter(name(WALPerformanceEvaluation.class, "syncMeter", "syncs"));
+
+  private final Histogram syncHistogram = metrics.histogram(
+    name(WALPerformanceEvaluation.class, "syncHistogram", "nanos-between-syncs"));
+  private final Histogram syncCountHistogram = metrics.histogram(
+    name(WALPerformanceEvaluation.class, "syncCountHistogram", "countPerSync"));
+  private final Meter appendMeter = metrics.meter(
+    name(WALPerformanceEvaluation.class, "appendMeter", "bytes"));
   private final Histogram latencyHistogram =
-    metrics.newHistogram(WALPerformanceEvaluation.class, "latencyHistogram", "nanos", true);
+    metrics.histogram(name(WALPerformanceEvaluation.class, "latencyHistogram", "nanos"));
 
   private final MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl();
 
@@ -333,7 +334,10 @@ public final class WALPerformanceEvaluation extends Configured implements Tool {
           benchmarks[i] = Trace.wrap(new WALPutBenchmark(regions[i], htd, numIterations, noSync,
               syncInterval, traceFreq));
         }
-        ConsoleReporter.enable(this.metrics, 30, TimeUnit.SECONDS);
+        ConsoleReporter reporter = ConsoleReporter.forRegistry(metrics).
+          outputTo(System.out).convertRatesTo(TimeUnit.SECONDS).filter(MetricFilter.ALL).build();
+        reporter.start(30, TimeUnit.SECONDS);
+
         long putTime = runBenchmark(benchmarks, numThreads);
         logBenchmarkResult("Summary: threads=" + numThreads + ", iterations=" + numIterations +
           ", syncInterval=" + syncInterval, numIterations * numThreads, putTime);

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-shaded/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-shaded/pom.xml b/hbase-shaded/pom.xml
index 4e38c88..10edf42 100644
--- a/hbase-shaded/pom.xml
+++ b/hbase-shaded/pom.xml
@@ -154,8 +154,8 @@
                                         <shadedPattern>org.apache.hadoop.hbase.shaded.com.lmax</shadedPattern>
                                     </relocation>
                                     <relocation>
-                                        <pattern>com.yammer</pattern>
-                                        <shadedPattern>org.apache.hadoop.hbase.shaded.com.yammer</shadedPattern>
+                                        <pattern>com.dropwizard</pattern>
+                                        <shadedPattern>org.apache.hadoop.hbase.shaded.com.dropwizard</shadedPattern>
                                     </relocation>
 
                                     <!-- top level io -->

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/hbase-shell/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-shell/pom.xml b/hbase-shell/pom.xml
index f80858b..7c3754e 100644
--- a/hbase-shell/pom.xml
+++ b/hbase-shell/pom.xml
@@ -235,7 +235,7 @@
     </dependency>
     <!-- General dependencies -->
     <dependency>
-      <groupId>com.yammer.metrics</groupId>
+      <groupId>io.dropwizard.metrics</groupId>
       <artifactId>metrics-core</artifactId>
     </dependency>
     <dependency>

http://git-wip-us.apache.org/repos/asf/hbase/blob/abe30b52/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 0999120..d865b0c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1181,7 +1181,7 @@
     <!-- Do not use versions earlier than 3.2.2 due to a security vulnerability -->
     <collections.version>3.2.2</collections.version>
     <httpclient.version>3.1</httpclient.version>
-    <metrics-core.version>2.2.0</metrics-core.version>
+    <metrics-core.version>3.1.2</metrics-core.version>
     <guava.version>12.0.1</guava.version>
     <jsr305.version>1.3.9</jsr305.version>
     <jackson.version>1.9.13</jackson.version>
@@ -1429,14 +1429,14 @@
         <version>${log4j.version}</version>
       </dependency>
       <!--This is not used by hbase directly.  Used by thrift,
-          yammer and zk.-->
+          dropwizard and zk.-->
       <dependency>
         <groupId>org.slf4j</groupId>
         <artifactId>slf4j-api</artifactId>
         <version>${slf4j.version}</version>
       </dependency>
       <dependency>
-        <groupId>com.yammer.metrics</groupId>
+        <groupId>io.dropwizard.metrics</groupId>
         <artifactId>metrics-core</artifactId>
         <version>${metrics-core.version}</version>
       </dependency>