You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ap...@apache.org on 2015/07/30 03:47:50 UTC

[5/5] hbase git commit: HBASE-14155 StackOverflowError in reverse scan (Ramkrishna S. Vasudevan and Ted Yu)

HBASE-14155 StackOverflowError in reverse scan (Ramkrishna S. Vasudevan and Ted Yu)

Conflicts:
	hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d3d15ca5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d3d15ca5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d3d15ca5

Branch: refs/heads/branch-1.0
Commit: d3d15ca595df602536692b74600830560e800a65
Parents: 819b5aa
Author: Andrew Purtell <ap...@apache.org>
Authored: Wed Jul 29 18:22:55 2015 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Wed Jul 29 18:35:30 2015 -0700

----------------------------------------------------------------------
 .../io/encoding/BufferedDataBlockEncoder.java   |   1 +
 .../hadoop/hbase/HBaseTestingUtility.java       |  49 +++++++
 .../encoding/TestSeekBeforeWithReverseScan.java | 147 +++++++++++++++++++
 3 files changed, 197 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/d3d15ca5/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
index e872856..dacabe6 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
@@ -829,6 +829,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
       current.tagsBuffer = previous.tagsBuffer;
       current.tagsCompressedLength = previous.tagsCompressedLength;
       current.uncompressTags = false;
+      current.setKey(current.keyBuffer, current.memstoreTS);
       previous.invalidate();
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/d3d15ca5/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index 8fa47e0..6521739 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -44,6 +44,7 @@ import java.util.TreeSet;
 import java.util.UUID;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.commons.lang.RandomStringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.impl.Jdk14Logger;
@@ -89,7 +90,10 @@ import org.apache.hadoop.hbase.regionserver.HStore;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.regionserver.RegionServerServices;
 import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
+import org.apache.hadoop.hbase.regionserver.wal.MetricsWAL;
+import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
 import org.apache.hadoop.hbase.wal.WAL;
+import org.apache.hadoop.hbase.wal.WALFactory;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.visibility.VisibilityLabelsCache;
 import org.apache.hadoop.hbase.tool.Canary;
@@ -1631,6 +1635,51 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
   }
 
   /**
+   * Create an unmanaged WAL. Be sure to close it when you're through.
+   */
+  public static WAL createWal(final Configuration conf, final Path rootDir, final HRegionInfo hri)
+      throws IOException {
+    // The WAL subsystem will use the default rootDir rather than the passed in rootDir
+    // unless I pass along via the conf.
+    Configuration confForWAL = new Configuration(conf);
+    confForWAL.set(HConstants.HBASE_DIR, rootDir.toString());
+    return (new WALFactory(confForWAL,
+        Collections.<WALActionsListener>singletonList(new MetricsWAL()),
+        "hregion-" + RandomStringUtils.randomNumeric(8))).
+        getWAL(hri.getEncodedNameAsBytes());
+  }
+
+  /**
+   * Create a region with it's own WAL. Be sure to call
+   * {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} to clean up all resources.
+   */
+  public static HRegion createRegionAndWAL(final HRegionInfo info, final Path rootDir,
+      final Configuration conf, final HTableDescriptor htd) throws IOException {
+    return createRegionAndWAL(info, rootDir, conf, htd, true);
+  }
+
+  /**
+   * Create a region with it's own WAL. Be sure to call
+   * {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} to clean up all resources.
+   */
+  public static HRegion createRegionAndWAL(final HRegionInfo info, final Path rootDir,
+      final Configuration conf, final HTableDescriptor htd, boolean initialize)
+      throws IOException {
+    WAL wal = createWal(conf, rootDir, info);
+    return HRegion.createHRegion(info, rootDir, conf, htd, wal, initialize);
+  }
+
+  /**
+   * Close both the HRegion {@code r} and it's underlying WAL. For use in tests.
+   */
+  public static void closeRegionAndWAL(final HRegion r) throws IOException {
+    if (r == null) return;
+    r.close();
+    if (r.getWAL() == null) return;
+    r.getWAL().close();
+  }
+
+  /**
    * Modify a table, synchronous. Waiting logic similar to that of {@code admin.rb#alter_status}.
    */
   @SuppressWarnings("serial")

http://git-wip-us.apache.org/repos/asf/hbase/blob/d3d15ca5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekBeforeWithReverseScan.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekBeforeWithReverseScan.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekBeforeWithReverseScan.java
new file mode 100644
index 0000000..a60c006
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekBeforeWithReverseScan.java
@@ -0,0 +1,147 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.io.encoding;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.RegionScanner;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({ SmallTests.class })
+public class TestSeekBeforeWithReverseScan {
+  private final HBaseTestingUtility testUtil = new HBaseTestingUtility();
+
+  private HRegion region;
+
+  private byte[] cfName = Bytes.toBytes("a");
+  private byte[] cqName = Bytes.toBytes("b");
+
+  @Before
+  public void setUp() throws Exception {
+    TableName tableName = TableName.valueOf(getClass().getSimpleName());
+    HTableDescriptor htd = new HTableDescriptor(tableName);
+    htd.addFamily(new HColumnDescriptor(cfName).setDataBlockEncoding(DataBlockEncoding.FAST_DIFF));
+    HRegionInfo info = new HRegionInfo(tableName, null, null, false);
+    Path path = testUtil.getDataTestDir(getClass().getSimpleName());
+    region = HBaseTestingUtility.createRegionAndWAL(info, path, testUtil.getConfiguration(), htd);
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    HBaseTestingUtility.closeRegionAndWAL(region);
+    testUtil.cleanupTestDir();
+  }
+
+  @Test
+  public void testReverseScanWithoutPadding() throws Exception {
+    byte[] row1 = Bytes.toBytes("a");
+    byte[] row2 = Bytes.toBytes("ab");
+    byte[] row3 = Bytes.toBytes("b");
+
+    Put put1 = new Put(row1);
+    put1.addColumn(cfName, cqName, HConstants.EMPTY_BYTE_ARRAY);
+    Put put2 = new Put(row2);
+    put2.addColumn(cfName, cqName, HConstants.EMPTY_BYTE_ARRAY);
+    Put put3 = new Put(row3);
+    put3.addColumn(cfName, cqName, HConstants.EMPTY_BYTE_ARRAY);
+
+    region.put(put1);
+    region.put(put2);
+    region.put(put3);
+    region.flushcache();
+    Scan scan = new Scan();
+    scan.setCacheBlocks(false);
+    scan.setReversed(true);
+    scan.setFilter(new FirstKeyOnlyFilter());
+    scan.addFamily(cfName);
+    RegionScanner scanner = region.getScanner(scan);
+    List<Cell> res = new ArrayList<Cell>();
+    int count = 1;
+    while (scanner.next(res)) {
+      count++;
+    }
+    assertEquals(Bytes.toString(res.get(0).getRowArray(), res.get(0).getRowOffset(), res.get(0)
+        .getRowLength()), "b");
+    assertEquals(Bytes.toString(res.get(1).getRowArray(), res.get(1).getRowOffset(), res.get(1)
+        .getRowLength()), "ab");
+    assertEquals(Bytes.toString(res.get(2).getRowArray(), res.get(2).getRowOffset(), res.get(2)
+        .getRowLength()), "a");
+    assertEquals(3, count);
+  }
+
+  @Test
+  public void testReverseScanWithPadding() throws Exception {
+    byte[] terminator = new byte[] { -1 };
+    byte[] row1 = Bytes.add(invert(Bytes.toBytes("a")), terminator);
+    byte[] row2 = Bytes.add(invert(Bytes.toBytes("ab")), terminator);
+    byte[] row3 = Bytes.add(invert(Bytes.toBytes("b")), terminator);
+
+    Put put1 = new Put(row1);
+    put1.addColumn(cfName, cqName, HConstants.EMPTY_BYTE_ARRAY);
+    Put put2 = new Put(row2);
+    put2.addColumn(cfName, cqName, HConstants.EMPTY_BYTE_ARRAY);
+    Put put3 = new Put(row3);
+    put3.addColumn(cfName, cqName, HConstants.EMPTY_BYTE_ARRAY);
+
+    region.put(put1);
+    region.put(put2);
+    region.put(put3);
+    region.flushcache();
+    Scan scan = new Scan();
+    scan.setCacheBlocks(false);
+    scan.setReversed(true);
+    scan.setFilter(new FirstKeyOnlyFilter());
+    scan.addFamily(cfName);
+    RegionScanner scanner = region.getScanner(scan);
+    List<Cell> res = new ArrayList<Cell>();
+    int count = 1;
+    while (scanner.next(res)) {
+      count++;
+    }
+    assertEquals(3, count);
+  }
+
+  private byte[] invert(byte[] bytes) {
+    byte[] newBytes = Arrays.copyOf(bytes, bytes.length);
+    for (int i = 0; i < newBytes.length; i++) {
+      newBytes[i] = (byte) (newBytes[i] ^ 0xFF);
+    }
+    return newBytes;
+  }
+}