You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2011/03/04 05:27:37 UTC

svn commit: r1077545 - in /hadoop/common/branches/branch-0.20-security-patches/src: core/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java core/org/apache/hadoop/io/compress/zlib/ZlibFactory.java test/org/apache/hadoop/io/compress/TestCodec.java

Author: omalley
Date: Fri Mar  4 04:27:37 2011
New Revision: 1077545

URL: http://svn.apache.org/viewvc?rev=1077545&view=rev
Log:
commit eb3e692be4a5e07dcb97d6a069a4a5eec1054604
Author: Chris Douglas <cd...@apache.org>
Date:   Wed Jul 14 20:08:00 2010 -0700

    HADOOP:6669 from https://issues.apache.org/jira/secure/attachment/12449530/6669-0y20.patch
    
    +++ b/YAHOO-CHANGES.txt
    +    HADOOP-6669. Respect compression configuration when creating DefaultCodec
    +    compressors. (Koji Noguchi via cdouglas)
    +

Modified:
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/compress/zlib/ZlibFactory.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/io/compress/TestCodec.java

Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java?rev=1077545&r1=1077544&r2=1077545&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java Fri Mar  4 04:27:37 2011
@@ -36,6 +36,22 @@ public class BuiltInZlibDeflater extends
     super(level, nowrap);
   }
 
+  BuiltInZlibDeflater(Configuration conf) {
+    this(null == conf
+        ? DEFAULT_COMPRESSION
+        : ZlibFactory.getCompressionLevel(conf).compressionLevel());
+    if (conf != null) {
+      final ZlibCompressor.CompressionStrategy strategy =
+        ZlibFactory.getCompressionStrategy(conf);
+      try {
+        setStrategy(strategy.compressionStrategy());
+      } catch (IllegalArgumentException ill) {
+        Log.warn(strategy + " not supported by BuiltInZlibDeflater.");
+        setStrategy(DEFAULT_STRATEGY);
+      }
+    }
+  }
+
   public BuiltInZlibDeflater(int level) {
     super(level);
   }

Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/compress/zlib/ZlibFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/compress/zlib/ZlibFactory.java?rev=1077545&r1=1077544&r2=1077545&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/compress/zlib/ZlibFactory.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/compress/zlib/ZlibFactory.java Fri Mar  4 04:27:37 2011
@@ -83,7 +83,8 @@ public class ZlibFactory {
    */
   public static Compressor getZlibCompressor(Configuration conf) {
     return (isNativeZlibLoaded(conf)) ? 
-      new ZlibCompressor() : new BuiltInZlibDeflater(); 
+      new ZlibCompressor(conf) :
+      new BuiltInZlibDeflater(conf);
   }
 
   /**

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/io/compress/TestCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/io/compress/TestCodec.java?rev=1077545&r1=1077544&r2=1077545&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/io/compress/TestCodec.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/io/compress/TestCodec.java Fri Mar  4 04:27:37 2011
@@ -144,6 +144,8 @@ public class TestCodec extends TestCase 
       RandomDatum v2 = new RandomDatum();
       k2.readFields(inflateIn);
       v2.readFields(inflateIn);
+      assertTrue("original and compressed-then-decompressed-output not equal",
+                 k1.equals(k2) && v1.equals(v2));
     }
     LOG.info("SUCCESS! Completed checking " + count + " records");
   }
@@ -197,6 +199,60 @@ public class TestCodec extends TestCase 
                outbytes.length >= b.length);
   }
 
+  private static void codecTestWithNOCompression (Configuration conf,
+                      String codecClass) throws IOException {
+    // Create a compressor with NO_COMPRESSION and make sure that
+    // output is not compressed by comparing the size with the
+    // original input
+
+    CompressionCodec codec = null;
+    ZlibFactory.setCompressionLevel(conf, CompressionLevel.NO_COMPRESSION);
+    try {
+      codec = (CompressionCodec)
+        ReflectionUtils.newInstance(conf.getClassByName(codecClass), conf);
+    } catch (ClassNotFoundException cnfe) {
+      throw new IOException("Illegal codec!");
+    }
+    Compressor c = codec.createCompressor();
+    // ensure same compressor placed earlier
+    ByteArrayOutputStream bos = new ByteArrayOutputStream();
+    CompressionOutputStream cos = null;
+    // write trivially compressable data
+    byte[] b = new byte[1 << 15];
+    Arrays.fill(b, (byte) 43);
+    try {
+      cos = codec.createOutputStream(bos, c);
+      cos.write(b);
+    } finally {
+      if (cos != null) {
+        cos.close();
+      }
+    }
+    byte[] outbytes = bos.toByteArray();
+    // verify data were not compressed
+    assertTrue("Compressed bytes contrary to configuration(NO_COMPRESSION)",
+               outbytes.length >= b.length);
+  }
+
+  public void testCodecInitWithCompressionLevel() throws Exception {
+    Configuration conf = new Configuration();
+    conf.setBoolean("io.native.lib.available", true);
+    if (ZlibFactory.isNativeZlibLoaded(conf)) {
+      LOG.info("testCodecInitWithCompressionLevel with native");
+      codecTestWithNOCompression(conf,
+                            "org.apache.hadoop.io.compress.GzipCodec");
+      codecTestWithNOCompression(conf,
+                         "org.apache.hadoop.io.compress.DefaultCodec");
+    } else {
+      LOG.warn("testCodecInitWithCompressionLevel for native skipped"
+               + ": native libs not loaded");
+    }
+    conf = new Configuration();
+    conf.setBoolean("io.native.lib.available", false);
+    codecTestWithNOCompression( conf,
+                         "org.apache.hadoop.io.compress.DefaultCodec");
+  }
+
   public void testCodecPoolCompressorReinit() throws Exception {
     Configuration conf = new Configuration();
     conf.setBoolean("hadoop.native.lib", true);