You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by jg...@apache.org on 2010/07/27 03:31:04 UTC

svn commit: r979491 - in /hbase/trunk: CHANGES.txt src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java

Author: jgray
Date: Tue Jul 27 01:31:04 2010
New Revision: 979491

URL: http://svn.apache.org/viewvc?rev=979491&view=rev
Log:
HBASE-2852  Bloom filter NPE (pranav via jgray)

Modified:
    hbase/trunk/CHANGES.txt
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java
    hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java

Modified: hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hbase/trunk/CHANGES.txt?rev=979491&r1=979490&r2=979491&view=diff
==============================================================================
--- hbase/trunk/CHANGES.txt (original)
+++ hbase/trunk/CHANGES.txt Tue Jul 27 01:31:04 2010
@@ -456,6 +456,7 @@ Release 0.21.0 - Unreleased
                that are in offline state in meta after a split
    HBASE-2815  not able to run the test suite in background because TestShell
                gets suspended on tty output (Alexey Kovyrin via Stack)
+   HBASE-2852  Bloom filter NPE (pranav via jgray)
 
   IMPROVEMENTS
    HBASE-1760  Cleanup TODOs in HTable

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java?rev=979491&r1=979490&r2=979491&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java Tue Jul 27 01:31:04 2010
@@ -930,7 +930,7 @@ public class StoreFile {
           key = row;
           break;
         case ROWCOL:
-          if (columns.size() == 1) {
+          if (columns != null && columns.size() == 1) {
             byte[] col = columns.first();
             key = Bytes.add(row, col);
             break;

Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java?rev=979491&r1=979490&r2=979491&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java Tue Jul 27 01:31:04 2010
@@ -2658,7 +2658,52 @@ public class TestHRegion extends HBaseTe
 
   }
 
+  //////////////////////////////////////////////////////////////////////////////
+  // Bloom filter test
+  //////////////////////////////////////////////////////////////////////////////
+
+  public void testAllColumnsWithBloomFilter() throws IOException {
+    byte [] TABLE = Bytes.toBytes("testAllColumnsWithBloomFilter");
+    byte [] FAMILY = Bytes.toBytes("family");
+
+    //Create table
+    HColumnDescriptor hcd = new HColumnDescriptor(FAMILY, Integer.MAX_VALUE,
+        HColumnDescriptor.DEFAULT_COMPRESSION,
+        HColumnDescriptor.DEFAULT_IN_MEMORY,
+        HColumnDescriptor.DEFAULT_BLOCKCACHE,
+        Integer.MAX_VALUE, HColumnDescriptor.DEFAULT_TTL,
+        "rowcol",
+        HColumnDescriptor.DEFAULT_REPLICATION_SCOPE);
+    HTableDescriptor htd = new HTableDescriptor(TABLE);
+    htd.addFamily(hcd);
+    HRegionInfo info = new HRegionInfo(htd, null, null, false);
+    Path path = new Path(DIR + "testAllColumnsWithBloomFilter");
+    region = HRegion.createHRegion(info, path, conf);
+
+    // For row:0, col:0: insert versions 1 through 5.
+    byte row[] = Bytes.toBytes("row:" + 0);
+    byte column[] = Bytes.toBytes("column:" + 0);
+    Put put = new Put(row);
+    for (long idx = 1; idx <= 4; idx++) {
+      put.add(FAMILY, column, idx, Bytes.toBytes("value-version-" + idx));
+    }
+    region.put(put);
+
+    //Flush
+    region.flushcache();
 
+    //Get rows
+    Get get = new Get(row);
+    get.setMaxVersions();
+    KeyValue[] kvs = region.get(get, null).raw();
+
+    //Check if rows are correct
+    assertEquals(4, kvs.length);
+    checkOneCell(kvs[0], FAMILY, 0, 0, 4);
+    checkOneCell(kvs[1], FAMILY, 0, 0, 3);
+    checkOneCell(kvs[2], FAMILY, 0, 0, 2);
+    checkOneCell(kvs[3], FAMILY, 0, 0, 1);
+  }
 
   private void putData(int startRow, int numRows, byte [] qf,
       byte [] ...families)
@@ -2784,4 +2829,24 @@ public class TestHRegion extends HBaseTe
     Path path = new Path(DIR + callingMethod);
     region = HRegion.createHRegion(info, path, conf);
   }
+
+  /**
+   * Assert that the passed in KeyValue has expected contents for the
+   * specified row, column & timestamp.
+   */
+  private void checkOneCell(KeyValue kv, byte[] cf,
+                             int rowIdx, int colIdx, long ts) {
+    String ctx = "rowIdx=" + rowIdx + "; colIdx=" + colIdx + "; ts=" + ts;
+    assertEquals("Row mismatch which checking: " + ctx,
+                 "row:"+ rowIdx, Bytes.toString(kv.getRow()));
+    assertEquals("ColumnFamily mismatch while checking: " + ctx,
+                 Bytes.toString(cf), Bytes.toString(kv.getFamily()));
+    assertEquals("Column qualifier mismatch while checking: " + ctx,
+                 "column:" + colIdx, Bytes.toString(kv.getQualifier()));
+    assertEquals("Timestamp mismatch while checking: " + ctx,
+                 ts, kv.getTimestamp());
+    assertEquals("Value mismatch while checking: " + ctx,
+                 "value-version-" + ts, Bytes.toString(kv.getValue()));
+  }
+
 }