You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ns...@apache.org on 2011/10/11 21:13:10 UTC

svn commit: r1182034 [2/2] - in /hbase/branches/0.89/src: main/java/org/apache/hadoop/hbase/io/hfile/ main/java/org/apache/hadoop/hbase/regionserver/ main/java/org/apache/hadoop/hbase/regionserver/metrics/ test/java/org/apache/hadoop/hbase/io/hfile/ te...

Modified: hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java?rev=1182034&r1=1182033&r2=1182034&view=diff
==============================================================================
--- hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java (original)
+++ hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java Tue Oct 11 19:13:09 2011
@@ -41,11 +41,8 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory;
-import org.apache.hadoop.hbase.io.hfile.ColumnFamilyMetrics.BlockMetricType;
 import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.BlockIndexReader;
 import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.BlockIndexChunk;
-import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ClassSize;

Modified: hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderV1.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderV1.java?rev=1182034&r1=1182033&r2=1182034&view=diff
==============================================================================
--- hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderV1.java (original)
+++ hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderV1.java Tue Oct 11 19:13:09 2011
@@ -28,6 +28,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics;
 import org.apache.hadoop.hbase.util.Bytes;
 
 import org.junit.After;
@@ -49,14 +50,15 @@ public class TestHFileReaderV1 {
 
   @Before
   public void setUp() throws IOException {
-    startingMetrics = ColumnFamilyMetrics.getMetricsSnapshot();
+    startingMetrics = SchemaMetrics.getMetricsSnapshot();
     conf = TEST_UTIL.getConfiguration();
     fs = FileSystem.get(conf);
+    SchemaMetrics.configureGlobally(conf);
   }
 
   @After
   public void tearDown() throws Exception {
-    ColumnFamilyMetrics.validateMetricChanges(startingMetrics);
+    SchemaMetrics.validateMetricChanges(startingMetrics);
   }
 
   @Test

Modified: hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java?rev=1182034&r1=1182033&r2=1182034&view=diff
==============================================================================
--- hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java (original)
+++ hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java Tue Oct 11 19:13:09 2011
@@ -19,13 +19,22 @@
  */
 package org.apache.hadoop.hbase.io.hfile;
 
+import java.util.Collection;
 import java.util.Map;
 import java.util.Random;
 
 import org.apache.hadoop.hbase.io.HeapSize;
+import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics;
+import org.apache.hadoop.hbase.regionserver.metrics.TestSchemaMetrics;
 import org.apache.hadoop.hbase.util.ClassSize;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
 
-import junit.framework.TestCase;
+import static org.junit.Assert.*;
 
 /**
  * Tests the concurrent LruBlockCache.<p>
@@ -34,22 +43,31 @@ import junit.framework.TestCase;
  * evictions run when they're supposed to and do what they should,
  * and that cached blocks are accessible when expected to be.
  */
-public class TestLruBlockCache extends TestCase {
+@RunWith(Parameterized.class)
+public class TestLruBlockCache {
 
   private Map<String, Long> startingMetrics;
 
-  @Override
+  public TestLruBlockCache(boolean useTableName) {
+    SchemaMetrics.setUseTableNameInTest(useTableName);
+  }
+
+  @Parameters
+  public static Collection<Object[]> parameters() {
+    return TestSchemaMetrics.parameters();
+  }
+
+  @Before
   public void setUp() throws Exception {
-    startingMetrics = ColumnFamilyMetrics.getMetricsSnapshot();
-    super.setUp();
+    startingMetrics = SchemaMetrics.getMetricsSnapshot();
   }
 
-  @Override
+  @After
   public void tearDown() throws Exception {
-    super.tearDown();
-    ColumnFamilyMetrics.validateMetricChanges(startingMetrics);
+    SchemaMetrics.validateMetricChanges(startingMetrics);
   }
 
+  @Test
   public void testBackgroundEvictionThread() throws Exception {
 
     long maxSize = 100000;
@@ -77,6 +95,7 @@ public class TestLruBlockCache extends T
     assertEquals(cache.getEvictionCount(), 1);
   }
 
+  @Test
   public void testCacheSimple() throws Exception {
 
     long maxSize = 1000000;
@@ -133,6 +152,7 @@ public class TestLruBlockCache extends T
     assertEquals(0, cache.getEvictionCount());
   }
 
+  @Test
   public void testCacheEvictionSimple() throws Exception {
 
     long maxSize = 100000;
@@ -173,6 +193,7 @@ public class TestLruBlockCache extends T
     }
   }
 
+  @Test
   public void testCacheEvictionTwoPriorities() throws Exception {
 
     long maxSize = 100000;
@@ -231,6 +252,7 @@ public class TestLruBlockCache extends T
     }
   }
 
+  @Test
   public void testCacheEvictionThreePriorities() throws Exception {
 
     long maxSize = 100000;
@@ -354,6 +376,7 @@ public class TestLruBlockCache extends T
   }
 
   // test scan resistance
+  @Test
   public void testScanResistance() throws Exception {
 
     long maxSize = 100000;
@@ -416,6 +439,7 @@ public class TestLruBlockCache extends T
   }
 
   // test setMaxSize
+  @Test
   public void testResizeBlockCache() throws Exception {
 
     long maxSize = 300000;
@@ -525,7 +549,7 @@ public class TestLruBlockCache extends T
         LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
   }
 
-  private static class CachedItem implements HeapSize {
+  private static class CachedItem implements Cacheable {
     String blockName;
     int size;
 
@@ -546,5 +570,15 @@ public class TestLruBlockCache extends T
           + ClassSize.align(2 * blockName.length())
           + ClassSize.align(size);
     }
+
+    @Override
+    public BlockType getBlockType() {
+      return BlockType.DATA;
+    }
+
+    @Override
+    public SchemaMetrics getSchemaMetrics() {
+      return SchemaMetrics.getUnknownInstanceForTest();
+    }
   }
 }

Modified: hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java?rev=1182034&r1=1182033&r2=1182034&view=diff
==============================================================================
--- hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java (original)
+++ hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java Tue Oct 11 19:13:09 2011
@@ -57,13 +57,13 @@ import org.apache.hadoop.hbase.HRegionIn
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.hfile.AbstractHFileReader;
-import org.apache.hadoop.hbase.io.hfile.ColumnFamilyMetrics;
 import org.apache.hadoop.hbase.io.hfile.BlockType;
 import org.apache.hadoop.hbase.io.hfile.Compression;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileBlock;
 import org.apache.hadoop.hbase.io.hfile.LruBlockCache;
 import org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter;
+import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.MD5Hash;
 import org.apache.hadoop.util.StringUtils;
@@ -617,9 +617,9 @@ public class HFileReadWriteTest {
       isCompaction = workload == Workload.MERGE;
       for (HFile.Reader reader : readers) {
         fsBlockReadMetrics.add(
-            ColumnFamilyMetrics.ALL_CF_METRICS.getBlockMetricName(
+            SchemaMetrics.ALL_SCHEMA_METRICS.getBlockMetricName(
                 BlockType.BlockCategory.ALL_CATEGORIES, isCompaction,
-                ColumnFamilyMetrics.BlockMetricType.READ_COUNT));
+                SchemaMetrics.BlockMetricType.READ_COUNT));
       }
 
       LOG.info("Using the following metrics for the number of data blocks " +

Modified: hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java?rev=1182034&r1=1182033&r2=1182034&view=diff
==============================================================================
--- hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java (original)
+++ hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java Tue Oct 11 19:13:09 2011
@@ -11,13 +11,13 @@ import org.apache.hadoop.hbase.HBaseConf
 import org.apache.hadoop.hbase.HBaseTestCase;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
 
@@ -132,13 +132,6 @@ public class TestBlocksRead extends HBas
     region.delete(del, null, true);
   }
 
-  private void deleteFamily(byte[] cf, String row, String column, long version)
-    throws IOException {
-    Delete del = new Delete(Bytes.toBytes(row));
-    del.deleteColumns(cf, Bytes.toBytes(column), version);
-    region.delete(del, null, true);
-  }
-
   private static void verifyData(KeyValue kv, String expectedRow,
                                  String expectedCol, long expectedVersion) {
     assertEquals("RowCheck", expectedRow, Bytes.toString(kv.getRow()));
@@ -150,8 +143,9 @@ public class TestBlocksRead extends HBas
   }
 
   private static long getBlkAccessCount(byte[] cf) {
-    return HRegion.getNumericMetric("cf." + Bytes.toString(cf)  + "."
-        + "bt.Data.fsBlockReadCnt");
+    return HRegion.getNumericMetric(SchemaMetrics.CF_PREFIX
+        + Bytes.toString(cf) + "." + SchemaMetrics.BLOCK_TYPE_PREFIX
+        + "Data.fsBlockReadCnt");
   }
 
   /**

Modified: hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java?rev=1182034&r1=1182033&r2=1182034&view=diff
==============================================================================
--- hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java (original)
+++ hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java Tue Oct 11 19:13:09 2011
@@ -47,6 +47,7 @@ import org.apache.hadoop.hbase.filter.Fi
 import org.apache.hadoop.hbase.filter.PrefixFilter;
 import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
 import org.apache.hadoop.hbase.regionserver.HRegion.RegionScanner;
+import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics;
 import org.apache.hadoop.hbase.regionserver.wal.HLog;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdge;
@@ -81,6 +82,8 @@ import java.util.concurrent.atomic.Atomi
 public class TestHRegion extends HBaseTestCase {
   static final Log LOG = LogFactory.getLog(TestHRegion.class);
 
+  private static final String COLUMN_FAMILY = "MyCF";
+
   HRegion region = null;
   private final String DIR = HBaseTestingUtility.getTestDir() +
     "/TestHRegion/";
@@ -96,11 +99,14 @@ public class TestHRegion extends HBaseTe
   protected final byte[] value2 = Bytes.toBytes("value2");
   protected final byte[] row = Bytes.toBytes("rowA");
 
+  private Map<String, Long> startingMetrics;
+
   /**
    * @see org.apache.hadoop.hbase.HBaseTestCase#setUp()
    */
   @Override
   protected void setUp() throws Exception {
+    startingMetrics = SchemaMetrics.getMetricsSnapshot();
     super.setUp();
   }
 
@@ -108,6 +114,7 @@ public class TestHRegion extends HBaseTe
   protected void tearDown() throws Exception {
     super.tearDown();
     EnvironmentEdgeManagerTestHelper.reset();
+    SchemaMetrics.validateMetricChanges(startingMetrics);
   }
 
   //////////////////////////////////////////////////////////////////////////////
@@ -326,10 +333,10 @@ public class TestHRegion extends HBaseTe
 
   public void testFamilyWithAndWithoutColon() throws Exception {
     byte [] b = Bytes.toBytes(getName());
-    byte [] cf = Bytes.toBytes("cf");
+    byte [] cf = Bytes.toBytes(COLUMN_FAMILY);
     initHRegion(b, getName(), cf);
     Put p = new Put(b);
-    byte [] cfwithcolon = Bytes.toBytes("cf:");
+    byte [] cfwithcolon = Bytes.toBytes(COLUMN_FAMILY + ":");
     p.add(cfwithcolon, cfwithcolon, cfwithcolon);
     boolean exception = false;
     try {
@@ -343,7 +350,7 @@ public class TestHRegion extends HBaseTe
   @SuppressWarnings("unchecked")
   public void testBatchPut() throws Exception {
     byte[] b = Bytes.toBytes(getName());
-    byte[] cf = Bytes.toBytes("cf");
+    byte[] cf = Bytes.toBytes(COLUMN_FAMILY);
     byte[] qual = Bytes.toBytes("qual");
     byte[] val = Bytes.toBytes("val");
     initHRegion(b, getName(), cf);

Modified: hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java?rev=1182034&r1=1182033&r2=1182034&view=diff
==============================================================================
--- hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java (original)
+++ hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java Tue Oct 11 19:13:09 2011
@@ -44,12 +44,13 @@ import org.apache.hadoop.hbase.KeyValueT
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.io.hfile.ColumnFamilyMetrics;
 import org.apache.hadoop.hbase.io.hfile.BlockType;
 import org.apache.hadoop.hbase.io.hfile.Compression;
-import org.apache.hadoop.hbase.io.hfile.ColumnFamilyMetrics.BlockMetricType;
 import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
+import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics;
+import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics.BlockMetricType;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -71,8 +72,7 @@ public class TestMultiColumnScanner {
   static final byte[] FAMILY_BYTES = Bytes.toBytes(FAMILY);
   static final int MAX_VERSIONS = 50;
 
-  private final ColumnFamilyMetrics cfMetrics =
-      ColumnFamilyMetrics.getInstance(FAMILY);
+  private SchemaMetrics schemaMetrics;
 
   /**
    * The size of the column qualifier set used. Increasing this parameter
@@ -124,6 +124,13 @@ public class TestMultiColumnScanner {
       assertTrue(TIMESTAMPS[i] < TIMESTAMPS[i + 1]);
   }
 
+  @Before
+  public void setUp() {
+    SchemaMetrics.configureGlobally(TEST_UTIL.getConfiguration());
+    schemaMetrics = SchemaMetrics.getInstance(TABLE_NAME, FAMILY);
+  }
+
+
   @Parameters
   public static final Collection<Object[]> parameters() {
     return HBaseTestingUtility.BLOOM_AND_COMPRESSION_COMBINATIONS;
@@ -136,13 +143,15 @@ public class TestMultiColumnScanner {
   }
 
   private long getBlocksRead() {
-    return HRegion.getNumericMetric(cfMetrics.getBlockMetricName(
-        BlockType.BlockCategory.ALL_CATEGORIES, false, BlockMetricType.READ_COUNT));
+    return HRegion.getNumericMetric(schemaMetrics.getBlockMetricName(
+        BlockType.BlockCategory.ALL_CATEGORIES, false,
+        BlockMetricType.READ_COUNT));
   }
 
   private long getCacheHits() {
-    return HRegion.getNumericMetric(cfMetrics.getBlockMetricName(
-        BlockType.BlockCategory.ALL_CATEGORIES, false, BlockMetricType.CACHE_HIT));
+    return HRegion.getNumericMetric(schemaMetrics.getBlockMetricName(
+        BlockType.BlockCategory.ALL_CATEGORIES, false,
+        BlockMetricType.CACHE_HIT));
   }
 
   private void saveBlockStats() {

Modified: hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java?rev=1182034&r1=1182033&r2=1182034&view=diff
==============================================================================
--- hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java (original)
+++ hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java Tue Oct 11 19:13:09 2011
@@ -42,10 +42,10 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.Reference.Range;
 import org.apache.hadoop.hbase.io.hfile.BlockCache;
-import org.apache.hadoop.hbase.io.hfile.ColumnFamilyMetrics;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
 import org.apache.hadoop.hbase.io.hfile.LruBlockCache.CacheStats;
+import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics;
 import org.apache.hadoop.hbase.util.BloomFilterFactory;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
@@ -65,7 +65,7 @@ public class TestStoreFile extends HBase
 
   @Override
   public void setUp() throws Exception {
-    startingMetrics = ColumnFamilyMetrics.getMetricsSnapshot();
+    startingMetrics = SchemaMetrics.getMetricsSnapshot();
     try {
       this.cluster = new MiniDFSCluster(this.conf, 2, true, (String[])null);
       // Set the hbase.rootdir to be the home directory in mini dfs.
@@ -83,7 +83,7 @@ public class TestStoreFile extends HBase
     shutdownDfs(cluster);
     // ReflectionUtils.printThreadInfo(new PrintWriter(System.out),
     //  "Temporary end-of-test thread dump debugging HADOOP-2040: " + getName());
-    ColumnFamilyMetrics.validateMetricChanges(startingMetrics);
+    SchemaMetrics.validateMetricChanges(startingMetrics);
   }
 
   /**

Added: hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/metrics/TestSchemaMetrics.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/metrics/TestSchemaMetrics.java?rev=1182034&view=auto
==============================================================================
--- hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/metrics/TestSchemaMetrics.java (added)
+++ hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/metrics/TestSchemaMetrics.java Tue Oct 11 19:13:09 2011
@@ -0,0 +1,226 @@
+/*
+ * Copyright 2011 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver.metrics;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.hadoop.hbase.io.hfile.BlockType;
+import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory;
+import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics;
+import org.apache.hadoop.hbase.util.ClassSize;
+
+import static org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics.
+    BOOL_VALUES;
+import static org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics.
+    BlockMetricType;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+import static org.junit.Assert.*;
+
+@RunWith(Parameterized.class)
+public class TestSchemaMetrics {
+
+  private final String TABLE_NAME = "myTable";
+  private final String CF_NAME = "myColumnFamily";
+
+  private final boolean useTableName;
+  private Map<String, Long> startingMetrics;
+
+  @Parameters
+  public static Collection<Object[]> parameters() {
+    List<Object[]> params = new ArrayList<Object[]>();
+    params.add(new Object[] { new Boolean(false) });
+    params.add(new Object[] { new Boolean(true) });
+    return params;
+  }
+
+  public TestSchemaMetrics(boolean useTableName) {
+    this.useTableName = useTableName;
+    SchemaMetrics.setUseTableNameInTest(useTableName);
+  }
+
+  @Before
+  public void setUp() {
+    startingMetrics = SchemaMetrics.getMetricsSnapshot();
+  };
+
+  @Test
+  public void testNaming() {
+    final String metricPrefix = (useTableName ? "tab." +
+        TABLE_NAME + "." : "") + "cf." + CF_NAME + ".";
+    SchemaMetrics schemaMetrics = SchemaMetrics.getInstance(TABLE_NAME,
+        CF_NAME);
+    SchemaMetrics ALL_CF_METRICS = SchemaMetrics.ALL_SCHEMA_METRICS;
+
+    // fsReadTimeMetric
+    assertEquals(metricPrefix + "fsRead", schemaMetrics.getBlockMetricName(
+        BlockCategory.ALL_CATEGORIES, false, BlockMetricType.READ_TIME));
+
+    // compactionReadTimeMetric
+    assertEquals(metricPrefix + "compactionRead",
+        schemaMetrics.getBlockMetricName(BlockCategory.ALL_CATEGORIES, true,
+            BlockMetricType.READ_TIME));
+
+    // fsBlockReadCntMetric
+    assertEquals(metricPrefix + "fsBlockReadCnt",
+        schemaMetrics.getBlockMetricName(BlockCategory.ALL_CATEGORIES, false,
+            BlockMetricType.READ_COUNT));
+
+    // fsBlockReadCacheHitCntMetric
+    assertEquals(metricPrefix + "fsBlockReadCacheHitCnt",
+        schemaMetrics.getBlockMetricName(BlockCategory.ALL_CATEGORIES, false,
+            BlockMetricType.CACHE_HIT));
+
+    // fsBlockReadCacheMissCntMetric
+    assertEquals(metricPrefix + "fsBlockReadCacheMissCnt",
+        schemaMetrics.getBlockMetricName(BlockCategory.ALL_CATEGORIES, false,
+            BlockMetricType.CACHE_MISS));
+
+    // compactionBlockReadCntMetric
+    assertEquals(metricPrefix + "compactionBlockReadCnt",
+        schemaMetrics.getBlockMetricName(BlockCategory.ALL_CATEGORIES, true,
+            BlockMetricType.READ_COUNT));
+
+    // compactionBlockReadCacheHitCntMetric
+    assertEquals(metricPrefix + "compactionBlockReadCacheHitCnt",
+        schemaMetrics.getBlockMetricName(BlockCategory.ALL_CATEGORIES, true,
+            BlockMetricType.CACHE_HIT));
+
+    // compactionBlockReadCacheMissCntMetric
+    assertEquals(metricPrefix + "compactionBlockReadCacheMissCnt",
+        schemaMetrics.getBlockMetricName(BlockCategory.ALL_CATEGORIES, true,
+            BlockMetricType.CACHE_MISS));
+
+    // fsMetaBlockReadCntMetric
+    assertEquals("fsMetaBlockReadCnt", ALL_CF_METRICS.getBlockMetricName(
+        BlockCategory.META, false, BlockMetricType.READ_COUNT));
+
+    // fsMetaBlockReadCacheHitCntMetric
+    assertEquals("fsMetaBlockReadCacheHitCnt",
+        ALL_CF_METRICS.getBlockMetricName(BlockCategory.META, false,
+            BlockMetricType.CACHE_HIT));
+
+    // fsMetaBlockReadCacheMissCntMetric
+    assertEquals("fsMetaBlockReadCacheMissCnt",
+        ALL_CF_METRICS.getBlockMetricName(BlockCategory.META, false,
+            BlockMetricType.CACHE_MISS));
+
+    // Per-(column family, block type) statistics.
+    assertEquals(metricPrefix + "bt.Index.fsBlockReadCnt",
+        schemaMetrics.getBlockMetricName(BlockCategory.INDEX, false,
+            BlockMetricType.READ_COUNT));
+
+    assertEquals(metricPrefix + "bt.Data.compactionBlockReadCacheHitCnt",
+        schemaMetrics.getBlockMetricName(BlockCategory.DATA, true,
+            BlockMetricType.CACHE_HIT));
+
+    // A special case for Meta blocks
+    assertEquals(metricPrefix + "compactionMetaBlockReadCacheHitCnt",
+        schemaMetrics.getBlockMetricName(BlockCategory.META, true,
+            BlockMetricType.CACHE_HIT));
+
+    // Cache metrics
+    assertEquals(metricPrefix + "blockCacheSize",
+        schemaMetrics.getBlockMetricName(BlockCategory.ALL_CATEGORIES, false,
+            BlockMetricType.CACHE_SIZE));
+
+    assertEquals(metricPrefix + "bt.Index.blockCacheNumEvicted",
+        schemaMetrics.getBlockMetricName(BlockCategory.INDEX, false,
+            BlockMetricType.EVICTED));
+
+    assertEquals("bt.Data.blockCacheNumCached",
+        ALL_CF_METRICS.getBlockMetricName(BlockCategory.DATA, false,
+            BlockMetricType.CACHED));
+
+    assertEquals("blockCacheNumCached", ALL_CF_METRICS.getBlockMetricName(
+        BlockCategory.ALL_CATEGORIES, false, BlockMetricType.CACHED));
+
+    // "Non-compaction aware" metrics
+    try {
+      ALL_CF_METRICS.getBlockMetricName(BlockCategory.ALL_CATEGORIES, true,
+          BlockMetricType.CACHE_SIZE);
+      fail("Exception expected");
+    } catch (IllegalArgumentException ex) {
+    }
+
+    // Bloom metrics
+    assertEquals("keyMaybeInBloomCnt", ALL_CF_METRICS.getBloomMetricName(true));
+    assertEquals(metricPrefix + "keyNotInBloomCnt",
+        schemaMetrics.getBloomMetricName(false));
+
+    schemaMetrics.printMetricNames();
+  }
+
+  public void checkMetrics() {
+    SchemaMetrics.validateMetricChanges(startingMetrics);
+  }
+
+  @Test
+  public void testIncrements() {
+    Random rand = new Random(23982737L);
+    for (int i = 1; i <= 3; ++i) {
+      final String tableName = "table" + i;
+      for (int j = 1; j <= 3; ++j) {
+        final String cfName = "cf" + j;
+        SchemaMetrics sm = SchemaMetrics.getInstance(tableName, cfName);
+        for (boolean isInBloom : BOOL_VALUES) {
+          sm.updateBloomMetrics(isInBloom);
+          checkMetrics();
+        }
+
+        for (BlockCategory blockCat : BlockType.BlockCategory.values()) {
+          if (blockCat == BlockCategory.ALL_CATEGORIES) {
+            continue;
+          }
+
+          for (boolean isCompaction : BOOL_VALUES) {
+            sm.updateOnCacheHit(blockCat, isCompaction);
+            checkMetrics();
+            sm.updateOnCacheMiss(blockCat, isCompaction, rand.nextInt());
+            checkMetrics();
+          }
+
+          for (boolean isEviction : BOOL_VALUES) {
+            sm.updateOnCachePutOrEvict(blockCat, (isEviction ? -1 : 1)
+                * rand.nextInt(1024 * 1024), isEviction);
+          }
+        }
+      }
+    }
+  }
+
+  @Test
+  public void testSchemaConfiguredHeapSize() {
+    SchemaConfigured sc = new SchemaConfigured(null, TABLE_NAME, CF_NAME);
+    assertEquals(ClassSize.estimateBase(SchemaConfigured.class, true),
+        sc.heapSize());
+  }
+
+}