You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ra...@apache.org on 2010/10/23 02:39:14 UTC

svn commit: r1026538 - in /hbase/trunk: ./ src/main/java/org/apache/hadoop/hbase/io/hfile/ src/main/java/org/apache/hadoop/hbase/regionserver/ src/main/java/org/apache/hadoop/hbase/util/ src/test/java/org/apache/hadoop/hbase/util/

Author: rawson
Date: Sat Oct 23 00:39:14 2010
New Revision: 1026538

URL: http://svn.apache.org/viewvc?rev=1026538&view=rev
Log:
HBASE-2514  RegionServer should refuse to be assigned a region that use LZO when LZO isn't available


Added:
    hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java
Modified:
    hbase/trunk/CHANGES.txt
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java

Modified: hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hbase/trunk/CHANGES.txt?rev=1026538&r1=1026537&r2=1026538&view=diff
==============================================================================
--- hbase/trunk/CHANGES.txt (original)
+++ hbase/trunk/CHANGES.txt Sat Oct 23 00:39:14 2010
@@ -1032,6 +1032,8 @@ Release 0.21.0 - Unreleased
    HBASE-3133  Only log compaction requests when a request is actually added
                to the queue
    HBASE-3132  Print TimestampRange and BloomFilters in HFile pretty print
+   HBASE-2514  RegionServer should refuse to be assigned a region that use 
+   	       LZO when LZO isn't available
 
   NEW FEATURES
    HBASE-1961  HBase EC2 scripts

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java?rev=1026538&r1=1026537&r2=1026538&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java Sat Oct 23 00:39:14 2010
@@ -71,7 +71,9 @@ public final class Compression {
   }
 
   /**
-   * Compression algorithms.
+   * Compression algorithms. The ordinal of these cannot change or else you
+   * risk breaking all existing HFiles out there.  Even the ones that are
+   * not compressed! (They use the NONE algorithm)
    */
   public static enum Algorithm {
     LZO("lzo") {

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java?rev=1026538&r1=1026537&r2=1026538&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java Sat Oct 23 00:39:14 2010
@@ -58,6 +58,7 @@ import org.apache.hadoop.hbase.util.Bloo
 import org.apache.hadoop.hbase.util.ByteBloomFilter;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ClassSize;
+import org.apache.hadoop.hbase.util.CompressionTest;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.Writables;
 import org.apache.hadoop.io.IOUtils;
@@ -827,20 +828,20 @@ public class HFile {
       String clazzName = Bytes.toString(fi.get(FileInfo.COMPARATOR));
       this.comparator = getComparator(clazzName);
 
-    int allIndexSize = (int)(this.fileSize - this.trailer.dataIndexOffset - FixedFileTrailer.trailerSize());
-    byte[] dataAndMetaIndex = readAllIndex(this.istream, this.trailer.dataIndexOffset, allIndexSize);
+      int allIndexSize = (int)(this.fileSize - this.trailer.dataIndexOffset - FixedFileTrailer.trailerSize());
+      byte[] dataAndMetaIndex = readAllIndex(this.istream, this.trailer.dataIndexOffset, allIndexSize);
 
-    ByteArrayInputStream bis = new ByteArrayInputStream(dataAndMetaIndex);
-    DataInputStream dis = new DataInputStream(bis);
+      ByteArrayInputStream bis = new ByteArrayInputStream(dataAndMetaIndex);
+      DataInputStream dis = new DataInputStream(bis);
 
       // Read in the data index.
-    this.blockIndex =
-      BlockIndex.readIndex(this.comparator, dis, this.trailer.dataIndexCount);
+      this.blockIndex =
+          BlockIndex.readIndex(this.comparator, dis, this.trailer.dataIndexCount);
 
       // Read in the metadata index.
       if (trailer.metaIndexCount > 0) {
-      this.metaIndex = BlockIndex.readIndex(Bytes.BYTES_RAWCOMPARATOR, dis,
-        this.trailer.metaIndexCount);
+        this.metaIndex = BlockIndex.readIndex(Bytes.BYTES_RAWCOMPARATOR, dis,
+            this.trailer.metaIndexCount);
       }
       this.fileInfoLoaded = true;
 
@@ -885,6 +886,9 @@ public class HFile {
       // Set up the codec.
       this.compressAlgo =
         Compression.Algorithm.values()[fft.compressionCodec];
+
+      CompressionTest.testCompression(this.compressAlgo);
+
       return fft;
     }
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java?rev=1026538&r1=1026537&r2=1026538&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java Sat Oct 23 00:39:14 2010
@@ -76,6 +76,7 @@ import org.apache.hadoop.hbase.regionser
 import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ClassSize;
+import org.apache.hadoop.hbase.util.CompressionTest;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.Pair;
@@ -432,7 +433,7 @@ public class HRegion implements HeapSize
   public boolean isClosing() {
     return this.closing.get();
   }
-  
+
   boolean areWritesEnabled() {
     synchronized(this.writestate) {
       return this.writestate.writesEnabled;
@@ -741,7 +742,7 @@ public class HRegion implements HeapSize
         } finally {
           long now = EnvironmentEdgeManager.currentTimeMillis();
           LOG.info(((completed) ? "completed" : "aborted")
-              + " compaction on region " + this 
+              + " compaction on region " + this
               + " after " + StringUtils.formatTimeDiff(now, startTime));
         }
       } finally {
@@ -1878,7 +1879,7 @@ public class HRegion implements HeapSize
         LOG.warn("File corruption encountered!  " +
             "Continuing, but renaming " + edits + " as " + p, ioe);
       } else {
-        // other IO errors may be transient (bad network connection, 
+        // other IO errors may be transient (bad network connection,
         // checksum exception on one datanode, etc).  throw & retry
         throw ioe;
       }
@@ -2463,6 +2464,8 @@ public class HRegion implements HeapSize
    */
   protected HRegion openHRegion(final Progressable reporter)
   throws IOException {
+    checkCompressionCodecs();
+
     long seqid = initialize(reporter);
     if (this.log != null) {
       this.log.setSequenceNumber(seqid);
@@ -2470,6 +2473,13 @@ public class HRegion implements HeapSize
     return this;
   }
 
+  private void checkCompressionCodecs() throws IOException {
+    for (HColumnDescriptor fam: regionInfo.getTableDesc().getColumnFamilies()) {
+      CompressionTest.testCompression(fam.getCompression());
+      CompressionTest.testCompression(fam.getCompactionCompression());
+    }
+  }
+
   /**
    * Inserts a new region's meta information into the passed
    * <code>meta</code> region. Used by the HMaster bootstrap code adding

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java?rev=1026538&r1=1026537&r2=1026538&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java Sat Oct 23 00:39:14 2010
@@ -108,6 +108,7 @@ import org.apache.hadoop.hbase.regionser
 import org.apache.hadoop.hbase.regionserver.wal.WALObserver;
 import org.apache.hadoop.hbase.replication.regionserver.Replication;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.CompressionTest;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.InfoServer;
 import org.apache.hadoop.hbase.util.Pair;
@@ -263,6 +264,17 @@ public class HRegionServer implements HR
     this.connection = HConnectionManager.getConnection(conf);
     this.isOnline = false;
 
+    // check to see if the codec list is available:
+    String [] codecs = conf.getStrings("hbase.regionserver.codecs", null);
+    if (codecs != null) {
+      for (String codec : codecs) {
+        if (!CompressionTest.testCompression(codec)) {
+          throw new IOException("Compression codec " + codec +
+              " not supported, aborting RS construction");
+        }
+      }
+    }
+
     // Config'ed params
     this.numRetries = conf.getInt("hbase.client.retries.number", 2);
     this.threadWakeFrequency = conf.getInt(HConstants.THREAD_WAKE_FREQUENCY,
@@ -2493,7 +2505,7 @@ public class HRegionServer implements HR
           .getConstructor(Configuration.class);
       return c.newInstance(conf2);
     } catch (Exception e) {
-      throw new RuntimeException("Failed construction of " + "Master: "
+      throw new RuntimeException("Failed construction of " + "Regionserver: "
           + regionServerClass.toString(), e);
     }
   }

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java?rev=1026538&r1=1026537&r2=1026538&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java Sat Oct 23 00:39:14 2010
@@ -19,11 +19,16 @@
  */
 package org.apache.hadoop.hbase.util;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.io.hfile.Compression;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.io.compress.Compressor;
 
+import java.io.IOException;
 import java.net.URI;
 
 /**
@@ -31,6 +36,59 @@ import java.net.URI;
  * on every node in your cluster.
  */
 public class CompressionTest {
+  static final Log LOG = LogFactory.getLog(CompressionTest.class);
+
+  public static boolean testCompression(String codec) {
+    codec = codec.toLowerCase();
+
+    Compression.Algorithm a;
+
+    try {
+      a = Compression.getCompressionAlgorithmByName(codec);
+    } catch (IllegalArgumentException e) {
+      LOG.warn("Codec type: " + codec + " is not known");
+      return false;
+    }
+
+    try {
+      testCompression(a);
+      return true;
+    } catch (IOException ignored) {
+      LOG.warn("Can't instantiate codec: " + codec, ignored);
+      return false;
+    }
+  }
+
+  private final static Boolean[] compressionTestResults
+      = new Boolean[Compression.Algorithm.values().length];
+  static {
+    for (int i = 0 ; i < compressionTestResults.length ; ++i) {
+      compressionTestResults[i] = null;
+    }
+  }
+
+  public static void testCompression(Compression.Algorithm algo)
+      throws IOException {
+    if (compressionTestResults[algo.ordinal()] != null) {
+      if (compressionTestResults[algo.ordinal()]) {
+        return ; // already passed test, dont do it again.
+      } else {
+        // failed.
+        throw new IOException("Compression algorithm '" + algo.getName() + "'" +
+        " previously failed test.");
+      }
+    }
+
+    try {
+      Compressor c = algo.getCompressor();
+      algo.returnCompressor(c);
+      compressionTestResults[algo.ordinal()] = true; // passes
+    } catch (Throwable t) {
+      compressionTestResults[algo.ordinal()] = false; // failure
+      throw new IOException(t);
+    }
+  }
+
   protected static Path path = new Path(".hfile-comp-test");
 
   public static void usage() {
@@ -51,7 +109,6 @@ public class CompressionTest {
     if (dfs != null) {
       try {
         dfs.close();
-        dfs = null;
       } catch (Exception e) {
         e.printStackTrace();
       }

Added: hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java?rev=1026538&view=auto
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java (added)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java Sat Oct 23 00:39:14 2010
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.util;
+
+import org.apache.hadoop.hbase.io.hfile.Compression;
+import org.junit.Test;
+
+import java.io.IOException;
+
+import static org.junit.Assert.*;
+
+public class TestCompressionTest {
+
+  @Test
+  public void testTestCompression() {
+
+    // This test will fail if you run the tests with LZO compression available.
+    try {
+      CompressionTest.testCompression(Compression.Algorithm.LZO);
+      fail(); // always throws
+    } catch (IOException e) {
+      // there should be a 'cause'.
+      assertNotNull(e.getCause());
+    }
+
+    // this is testing the caching of the test results.
+    try {
+      CompressionTest.testCompression(Compression.Algorithm.LZO);
+      fail(); // always throws
+    } catch (IOException e) {
+      // there should be NO cause because it's a direct exception not wrapped
+      assertNull(e.getCause());
+    }
+
+
+    assertFalse(CompressionTest.testCompression("LZO"));
+    assertTrue(CompressionTest.testCompression("NONE"));
+    assertTrue(CompressionTest.testCompression("GZ"));
+  }
+}