You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2012/11/16 18:29:20 UTC

svn commit: r1410496 - in /hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase: regionserver/TestStore.java util/TestCompressionTest.java

Author: stack
Date: Fri Nov 16 17:29:19 2012
New Revision: 1410496

URL: http://svn.apache.org/viewvc?rev=1410496&view=rev
Log:
HBASE-7178 Compression tests

Modified:
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java?rev=1410496&r1=1410495&r2=1410496&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java Fri Nov 16 17:29:19 2012
@@ -51,7 +51,10 @@ import org.apache.hadoop.hbase.HTableDes
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.MediumTests;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.io.compress.Compression;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
+import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
 import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
 import org.apache.hadoop.hbase.regionserver.wal.HLog;
@@ -151,6 +154,35 @@ public class TestStore extends TestCase 
     store = new HStore(basedir, region, hcd, fs, conf);
   }
 
+  /**
+   * Verify that compression and data block encoding are respected by the
+   * Store.createWriterInTmp() method, used on store flush.
+   */
+  public void testCreateWriter() throws Exception {
+    Configuration conf = HBaseConfiguration.create();
+    FileSystem fs = FileSystem.get(conf);
+
+    HColumnDescriptor hcd = new HColumnDescriptor(family);
+    hcd.setCompressionType(Compression.Algorithm.GZ);
+    hcd.setDataBlockEncoding(DataBlockEncoding.DIFF);
+    init(getName(), conf, hcd);
+
+    // Test createWriterInTmp()
+    StoreFile.Writer writer = store.createWriterInTmp(4, hcd.getCompression(), false);
+    Path path = writer.getPath();
+    writer.append(new KeyValue(row, family, qf1, Bytes.toBytes(1)));
+    writer.append(new KeyValue(row, family, qf2, Bytes.toBytes(2)));
+    writer.append(new KeyValue(row2, family, qf1, Bytes.toBytes(3)));
+    writer.append(new KeyValue(row2, family, qf2, Bytes.toBytes(4)));
+    writer.close();
+
+    // Verify that compression and encoding settings are respected
+    HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf));
+    assertEquals(hcd.getCompressionType(), reader.getCompressionAlgorithm());
+    assertEquals(hcd.getDataBlockEncoding(), reader.getEncodingOnDisk());
+    reader.close();
+  }
+
   public void testDeleteExpiredStoreFiles() throws Exception {
     int storeFileNum = 4;
     int ttl = 4;

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java?rev=1410496&r1=1410495&r2=1410496&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java Fri Nov 16 17:29:19 2012
@@ -19,6 +19,8 @@
 
 package org.apache.hadoop.hbase.util;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.SmallTests;
 import org.apache.hadoop.hbase.io.compress.Compression;
@@ -38,10 +40,10 @@ import static org.junit.Assert.*;
 
 @Category(SmallTests.class)
 public class TestCompressionTest {
+  static final Log LOG = LogFactory.getLog(TestCompressionTest.class);
 
   @Test
-  public void testTestCompression() {
-
+  public void testExceptionCaching() {
     // This test will fail if you run the tests with LZO compression available.
     try {
       CompressionTest.testCompression(Compression.Algorithm.LZO);
@@ -60,73 +62,81 @@ public class TestCompressionTest {
       assertNull(e.getCause());
     }
 
-
     assertFalse(CompressionTest.testCompression("LZO"));
+  }
+
+  @Test
+  public void testTestCompression() {
     assertTrue(CompressionTest.testCompression("NONE"));
     assertTrue(CompressionTest.testCompression("GZ"));
 
-    if (isCompressionAvailable("org.apache.hadoop.io.compress.SnappyCodec")) {
-      if (NativeCodeLoader.isNativeCodeLoaded()) {
-        try {
-          System.loadLibrary("snappy");
+    if (NativeCodeLoader.isNativeCodeLoaded()) {
+      nativeCodecTest("LZO", "lzo2", "com.hadoop.compression.lzo.LzoCodec");
+      nativeCodecTest("LZ4", null, "org.apache.hadoop.io.compress.Lz4Codec");
+      nativeCodecTest("SNAPPY", "snappy", "org.apache.hadoop.io.compress.SnappyCodec");
+    } else {
+      // Hadoop nativelib is not available
+      LOG.debug("Native code not loaded");
+      assertFalse(CompressionTest.testCompression("LZO"));
+      assertFalse(CompressionTest.testCompression("LZ4"));
+      assertFalse(CompressionTest.testCompression("SNAPPY"));
+    }
+  }
+
+  private boolean isCompressionAvailable(String codecClassName) {
+    try {
+      Thread.currentThread().getContextClassLoader().loadClass(codecClassName);
+      return true;
+    } catch (Exception ex) {
+      return false;
+    }
+  }
 
-          try {
+  /**
+   * Verify CompressionTest.testCompression() on a native codec.
+   */
+  private void nativeCodecTest(String codecName, String libName, String codecClassName) {
+    if (isCompressionAvailable(codecClassName)) {
+      try {
+        if (libName != null) {
+          System.loadLibrary(libName);
+        }
+
+        try {
             Configuration conf = new Configuration();
             CompressionCodec codec = (CompressionCodec)
-              ReflectionUtils.newInstance(
-                conf.getClassByName("org.apache.hadoop.io.compress.SnappyCodec"), conf);
+              ReflectionUtils.newInstance(conf.getClassByName(codecClassName), conf);
 
             DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
-            CompressionOutputStream deflateFilter =
-              codec.createOutputStream(compressedDataBuffer);
+            CompressionOutputStream deflateFilter = codec.createOutputStream(compressedDataBuffer);
 
             byte[] data = new byte[1024];
-            DataOutputStream deflateOut = new DataOutputStream(
-              new BufferedOutputStream(deflateFilter));
+            DataOutputStream deflateOut = new DataOutputStream(new BufferedOutputStream(deflateFilter));
             deflateOut.write(data, 0, data.length);
             deflateOut.flush();
             deflateFilter.finish();
 
-            // Snappy Codec class, Snappy nativelib and Hadoop nativelib with 
-            // Snappy JNIs are present
-            assertTrue(CompressionTest.testCompression("SNAPPY"));
-          }
-          catch (UnsatisfiedLinkError ex) {
-            // Hadoop nativelib does not have Snappy JNIs
-            
-            // cannot assert the codec here because the current logic of 
-            // CompressionTest checks only classloading, not the codec
-            // usage.
-          }
-          catch (Exception ex) {
-          }
+            // Codec class, codec nativelib and Hadoop nativelib with codec JNIs are present
+            assertTrue(CompressionTest.testCompression(codecName));
+        } catch (UnsatisfiedLinkError e) {
+          // Hadoop nativelib does not have codec JNIs.
+          // cannot assert the codec here because the current logic of
+          // CompressionTest checks only classloading, not the codec
+          // usage.
+          LOG.debug("No JNI for codec '" + codecName + "' " + e.getMessage());
+        } catch (Exception e) {
+          LOG.error(codecName, e);
         }
-        catch (UnsatisfiedLinkError ex) {
-          // Snappy nativelib is not available
-          assertFalse(CompressionTest.testCompression("SNAPPY"));
-        }
-      }
-      else {
-        // Hadoop nativelib is not available
-        assertFalse(CompressionTest.testCompression("SNAPPY"));
+      } catch (UnsatisfiedLinkError e) {
+        // nativelib is not available
+        LOG.debug("Native lib not available: " + codecName);
+        assertFalse(CompressionTest.testCompression(codecName));
       }
-    }
-    else {
-      // Snappy Codec class is not available
-      assertFalse(CompressionTest.testCompression("SNAPPY"));
-    }
-  }
-
-  private boolean isCompressionAvailable(String codecClassName) {
-    try {
-      Thread.currentThread().getContextClassLoader().loadClass(codecClassName);
-      return true;
-    }
-    catch (Exception ex) {
-      return false;
+    } else {
+      // Compression Codec class is not available
+      LOG.debug("Codec class not available: " + codecName);
+      assertFalse(CompressionTest.testCompression(codecName));
     }
   }
-
-
 }