You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ra...@apache.org on 2014/04/26 10:24:57 UTC

svn commit: r1590220 - in /hbase/trunk/hbase-server/src: main/java/org/apache/hadoop/hbase/coprocessor/ main/java/org/apache/hadoop/hbase/regionserver/ test/java/org/apache/hadoop/hbase/regionserver/

Author: ramkrishna
Date: Sat Apr 26 08:24:56 2014
New Revision: 1590220

URL: http://svn.apache.org/r1590220
Log:
HBASE-11054-Create new hook in StoreScanner to help user creating his own delete tracker (Ram)

Modified:
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java?rev=1590220&r1=1590219&r2=1590220&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java Sat Apr 26 08:24:56 2014
@@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.filter.Co
 import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
 import org.apache.hadoop.hbase.io.Reference;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
+import org.apache.hadoop.hbase.regionserver.DeleteTracker;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegion.Operation;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
@@ -501,4 +502,11 @@ public abstract class BaseRegionObserver
   public void postCloseRegionOperation(final ObserverContext<RegionCoprocessorEnvironment> ctx,
       Operation op) throws IOException {
   }
+
+  @Override
+  public DeleteTracker postInstantiateDeleteTracker(
+      final ObserverContext<RegionCoprocessorEnvironment> ctx, DeleteTracker delTracker)
+      throws IOException {
+    return delTracker;
+  }
 }

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java?rev=1590220&r1=1590219&r2=1590220&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java Sat Apr 26 08:24:56 2014
@@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.filter.Co
 import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
 import org.apache.hadoop.hbase.io.Reference;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
+import org.apache.hadoop.hbase.regionserver.DeleteTracker;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegion.Operation;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
@@ -1109,4 +1110,18 @@ public interface RegionObserver extends 
    */
   Cell postMutationBeforeWAL(ObserverContext<RegionCoprocessorEnvironment> ctx,
       MutationType opType, Mutation mutation, Cell oldCell, Cell newCell) throws IOException;
+
+  /**
+   * Called after the ScanQueryMatcher creates ScanDeleteTracker. Implementing
+   * this hook would help in creating customised DeleteTracker and returning
+   * the newly created DeleteTracker
+   *
+   * @param ctx the environment provided by the region server
+   * @param delTracker the deleteTracker that is created by the QueryMatcher
+   * @return the Delete Tracker
+   * @throws IOException
+   */
+  DeleteTracker postInstantiateDeleteTracker(
+      final ObserverContext<RegionCoprocessorEnvironment> ctx, DeleteTracker delTracker)
+      throws IOException;
 }

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java?rev=1590220&r1=1590219&r2=1590220&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java Sat Apr 26 08:24:56 2014
@@ -2152,4 +2152,27 @@ public class RegionCoprocessorHost
     }
   }
 
+  public DeleteTracker postInstantiateDeleteTracker(DeleteTracker tracker) throws IOException {
+    ObserverContext<RegionCoprocessorEnvironment> ctx = null;
+    for (RegionEnvironment env : coprocessors) {
+      if (env.getInstance() instanceof RegionObserver) {
+        ctx = ObserverContext.createAndPrepare(env, ctx);
+        Thread currentThread = Thread.currentThread();
+        ClassLoader cl = currentThread.getContextClassLoader();
+        try {
+          currentThread.setContextClassLoader(env.getClassLoader());
+          tracker = ((RegionObserver) env.getInstance()).postInstantiateDeleteTracker(ctx, 
+              tracker);
+        } catch (Throwable e) {
+          handleCoprocessorThrowable(env, e);
+        } finally {
+          currentThread.setContextClassLoader(cl);
+        }
+        if (ctx.shouldComplete()) {
+          break;
+        }
+      }
+    }
+    return tracker;
+  }
 }

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java?rev=1590220&r1=1590219&r2=1590220&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java Sat Apr 26 08:24:56 2014
@@ -112,6 +112,8 @@ public class ScanQueryMatcher {
    * first column.
    * */
   private boolean hasNullColumn = true;
+  
+  private RegionCoprocessorHost regionCoprocessorHost= null;
 
   // By default, when hbase.hstore.time.to.purge.deletes is 0ms, a delete
   // marker is always removed during a major compaction. If set to non-zero
@@ -145,13 +147,16 @@ public class ScanQueryMatcher {
    * @param earliestPutTs Earliest put seen in any of the store files.
    * @param oldestUnexpiredTS the oldest timestamp we are interested in,
    *  based on TTL
+   * @param regionCoprocessorHost 
+   * @throws IOException 
    */
-  public ScanQueryMatcher(Scan scan, ScanInfo scanInfo,
-      NavigableSet<byte[]> columns, ScanType scanType,
-      long readPointToUse, long earliestPutTs, long oldestUnexpiredTS) {
+  public ScanQueryMatcher(Scan scan, ScanInfo scanInfo, NavigableSet<byte[]> columns,
+      ScanType scanType, long readPointToUse, long earliestPutTs, long oldestUnexpiredTS,
+      RegionCoprocessorHost regionCoprocessorHost) throws IOException {
     this.tr = scan.getTimeRange();
     this.rowComparator = scanInfo.getComparator();
-    this.deletes =  new ScanDeleteTracker();
+    this.regionCoprocessorHost = regionCoprocessorHost;
+    this.deletes =  instantiateDeleteTracker();
     this.stopRow = scan.getStopRow();
     this.startKey = KeyValueUtil.createFirstDeleteFamilyOnRow(scan.getStartRow(),
         scanInfo.getFamily());
@@ -194,6 +199,14 @@ public class ScanQueryMatcher {
     this.isReversed = scan.isReversed();
   }
 
+  private DeleteTracker instantiateDeleteTracker() throws IOException {
+    DeleteTracker tracker = new ScanDeleteTracker();
+    if (regionCoprocessorHost != null) {
+      tracker = regionCoprocessorHost.postInstantiateDeleteTracker(tracker);
+    }
+    return tracker;
+  }
+
   /**
    * Construct a QueryMatcher for a scan that drop deletes from a limited range of rows.
    * @param scan
@@ -204,12 +217,14 @@ public class ScanQueryMatcher {
    *  based on TTL
    * @param dropDeletesFromRow The inclusive left bound of the range; can be EMPTY_START_ROW.
    * @param dropDeletesToRow The exclusive right bound of the range; can be EMPTY_END_ROW.
+   * @param regionCoprocessorHost 
+   * @throws IOException 
    */
   public ScanQueryMatcher(Scan scan, ScanInfo scanInfo, NavigableSet<byte[]> columns,
-      long readPointToUse, long earliestPutTs, long oldestUnexpiredTS,
-      byte[] dropDeletesFromRow, byte[] dropDeletesToRow) {
+      long readPointToUse, long earliestPutTs, long oldestUnexpiredTS, byte[] dropDeletesFromRow,
+      byte[] dropDeletesToRow, RegionCoprocessorHost regionCoprocessorHost) throws IOException {
     this(scan, scanInfo, columns, ScanType.COMPACT_RETAIN_DELETES, readPointToUse, earliestPutTs,
-        oldestUnexpiredTS);
+        oldestUnexpiredTS, regionCoprocessorHost);
     Preconditions.checkArgument((dropDeletesFromRow != null) && (dropDeletesToRow != null));
     this.dropDeletesFromRow = dropDeletesFromRow;
     this.dropDeletesToRow = dropDeletesToRow;
@@ -219,10 +234,10 @@ public class ScanQueryMatcher {
    * Constructor for tests
    */
   ScanQueryMatcher(Scan scan, ScanInfo scanInfo,
-      NavigableSet<byte[]> columns, long oldestUnexpiredTS) {
+      NavigableSet<byte[]> columns, long oldestUnexpiredTS) throws IOException {
     this(scan, scanInfo, columns, ScanType.USER_SCAN,
           Long.MAX_VALUE, /* max Readpoint to track versions */
-        HConstants.LATEST_TIMESTAMP, oldestUnexpiredTS);
+        HConstants.LATEST_TIMESTAMP, oldestUnexpiredTS, null);
   }
 
   /**

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java?rev=1590220&r1=1590219&r2=1590220&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java Sat Apr 26 08:24:56 2014
@@ -162,7 +162,7 @@ public class StoreScanner extends NonRev
     }
     matcher = new ScanQueryMatcher(scan, scanInfo, columns,
         ScanType.USER_SCAN, Long.MAX_VALUE, HConstants.LATEST_TIMESTAMP,
-        oldestUnexpiredTS);
+        oldestUnexpiredTS, store.getCoprocessorHost());
 
     this.store.addChangedReaderObserver(this);
 
@@ -226,11 +226,11 @@ public class StoreScanner extends NonRev
     this(store, false, scan, null, scanInfo.getTtl(), scanInfo.getMinVersions(),
         ((HStore)store).getHRegion().getReadpoint(IsolationLevel.READ_COMMITTED));
     if (dropDeletesFromRow == null) {
-      matcher = new ScanQueryMatcher(scan, scanInfo, null, scanType,
-          smallestReadPoint, earliestPutTs, oldestUnexpiredTS);
+      matcher = new ScanQueryMatcher(scan, scanInfo, null, scanType, smallestReadPoint,
+          earliestPutTs, oldestUnexpiredTS, store.getCoprocessorHost());
     } else {
-      matcher = new ScanQueryMatcher(scan, scanInfo, null, smallestReadPoint,
-          earliestPutTs, oldestUnexpiredTS, dropDeletesFromRow, dropDeletesToRow);
+      matcher = new ScanQueryMatcher(scan, scanInfo, null, smallestReadPoint, earliestPutTs,
+          oldestUnexpiredTS, dropDeletesFromRow, dropDeletesToRow, store.getCoprocessorHost());
     }
 
     // Filter the list of scanners using Bloom filters, time range, TTL, etc.
@@ -270,7 +270,7 @@ public class StoreScanner extends NonRev
     this(null, scan.getCacheBlocks(), scan, columns, scanInfo.getTtl(),
         scanInfo.getMinVersions(), readPt);
     this.matcher = new ScanQueryMatcher(scan, scanInfo, columns, scanType,
-        Long.MAX_VALUE, earliestPutTs, oldestUnexpiredTS);
+        Long.MAX_VALUE, earliestPutTs, oldestUnexpiredTS, null);
 
     // In unit tests, the store could be null
     if (this.store != null) {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java?rev=1590220&r1=1590219&r2=1590220&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java Sat Apr 26 08:24:56 2014
@@ -19,13 +19,14 @@
 
 package org.apache.hadoop.hbase.regionserver;
 
+import static org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode.INCLUDE;
+import static org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode.SKIP;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.NavigableSet;
 
-import static org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode.*;
-
 import org.apache.hadoop.hbase.HBaseTestCase;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
@@ -341,7 +342,7 @@ public class TestQueryMatcher extends HB
     NavigableSet<byte[]> cols = get.getFamilyMap().get(fam2);
 
     ScanQueryMatcher qm = new ScanQueryMatcher(scan, scanInfo, cols, Long.MAX_VALUE,
-        HConstants.OLDEST_TIMESTAMP, HConstants.OLDEST_TIMESTAMP, from, to);
+        HConstants.OLDEST_TIMESTAMP, HConstants.OLDEST_TIMESTAMP, from, to, null);
     List<ScanQueryMatcher.MatchCode> actual =
         new ArrayList<ScanQueryMatcher.MatchCode>(rows.length);
     byte[] prevRow = null;