You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ra...@apache.org on 2010/07/17 01:29:19 UTC

svn commit: r964981 - in /hbase/trunk: ./ src/main/java/org/apache/hadoop/hbase/regionserver/ src/test/java/org/apache/hadoop/hbase/ src/test/java/org/apache/hadoop/hbase/regionserver/

Author: rawson
Date: Fri Jul 16 23:29:19 2010
New Revision: 964981

URL: http://svn.apache.org/viewvc?rev=964981&view=rev
Log:
HBASE-2840  Remove the final remnants of the old Get code - the query matchers and other helper classes

Removed:
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/DeleteCompare.java
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/GetDeleteTracker.java
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/QueryMatcher.java
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileGetScan.java
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/WildcardColumnTracker.java
    hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestDeleteCompare.java
    hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetDeleteTracker.java
    hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestWildcardColumnTracker.java
Modified:
    hbase/trunk/CHANGES.txt
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/ColumnTracker.java
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/ExplicitColumnTracker.java
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/ScanWildcardColumnTracker.java
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
    hbase/trunk/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
    hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestExplicitColumnTracker.java
    hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
    hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java
    hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWildcardColumnTracker.java

Modified: hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hbase/trunk/CHANGES.txt?rev=964981&r1=964980&r2=964981&view=diff
==============================================================================
--- hbase/trunk/CHANGES.txt (original)
+++ hbase/trunk/CHANGES.txt Fri Jul 16 23:29:19 2010
@@ -774,6 +774,8 @@ Release 0.21.0 - Unreleased
                next column (Pranav via jgray)
    HBASE-2835  Update hadoop jar to head of branch-0.20-append to catch three
                added patches
+   HBASE-2840  Remove the final remnants of the old Get code - the query matchers 
+   	       and other helper classes
 
   NEW FEATURES
    HBASE-1961  HBase EC2 scripts

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/ColumnTracker.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/ColumnTracker.java?rev=964981&r1=964980&r2=964981&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/ColumnTracker.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/ColumnTracker.java Fri Jul 16 23:29:19 2010
@@ -19,8 +19,6 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
-import org.apache.hadoop.hbase.regionserver.QueryMatcher.MatchCode;
-
 /**
  * Implementing classes of this interface will be used for the tracking
  * and enforcement of columns and numbers of versions during the course of a
@@ -29,12 +27,10 @@ import org.apache.hadoop.hbase.regionser
  * Currently there are two different types of Store/Family-level queries.
  * <ul><li>{@link ExplicitColumnTracker} is used when the query specifies
  * one or more column qualifiers to return in the family.
- * <li>{@link WildcardColumnTracker} is used when the query asks for all
- * qualifiers within the family.
  * <p>
- * This class is utilized by {@link QueryMatcher} through two methods:
+ * This class is utilized by {@link ScanQueryMatcher} through two methods:
  * <ul><li>{@link #checkColumn} is called when a Put satisfies all other
- * conditions of the query.  This method returns a {@link MatchCode} to define
+ * conditions of the query.  This method returns a {@link org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode} to define
  * what action should be taken.
  * <li>{@link #update} is called at the end of every StoreFile or memstore.
  * <p>
@@ -48,7 +44,7 @@ public interface ColumnTracker {
    * @param length
    * @return The match code instance.
    */
-  public MatchCode checkColumn(byte [] bytes, int offset, int length);
+  public ScanQueryMatcher.MatchCode checkColumn(byte [] bytes, int offset, int length);
 
   /**
    * Updates internal variables in between files

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/ExplicitColumnTracker.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/ExplicitColumnTracker.java?rev=964981&r1=964980&r2=964981&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/ExplicitColumnTracker.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/ExplicitColumnTracker.java Fri Jul 16 23:29:19 2010
@@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.regionse
 import java.util.ArrayList;
 import java.util.List;
 import java.util.NavigableSet;
-import org.apache.hadoop.hbase.regionserver.QueryMatcher.MatchCode;
+
 import org.apache.hadoop.hbase.util.Bytes;
 
 /**
@@ -36,9 +36,9 @@ import org.apache.hadoop.hbase.util.Byte
  * between rows.
  *
  * <p>
- * This class is utilized by {@link QueryMatcher} through two methods:
+ * This class is utilized by {@link ScanQueryMatcher} through two methods:
  * <ul><li>{@link #checkColumn} is called when a Put satisfies all other
- * conditions of the query.  This method returns a {@link MatchCode} to define
+ * conditions of the query.  This method returns a {@link org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode} to define
  * what action should be taken.
  * <li>{@link #update} is called at the end of every StoreFile or memstore.
  * <p>
@@ -84,18 +84,18 @@ public class ExplicitColumnTracker imple
    * @param bytes KeyValue buffer
    * @param offset offset to the start of the qualifier
    * @param length length of the qualifier
-   * @return MatchCode telling QueryMatcher what action to take
+   * @return MatchCode telling ScanQueryMatcher what action to take
    */
-  public MatchCode checkColumn(byte [] bytes, int offset, int length) {
+  public ScanQueryMatcher.MatchCode checkColumn(byte [] bytes, int offset, int length) {
     do {
       // No more columns left, we are done with this query
       if(this.columns.size() == 0) {
-        return MatchCode.DONE; // done_row
+        return ScanQueryMatcher.MatchCode.DONE; // done_row
       }
 
       // No more columns to match against, done with storefile
       if(this.column == null) {
-        return MatchCode.NEXT; // done_row
+        return ScanQueryMatcher.MatchCode.NEXT; // done_row
       }
 
       // Compare specific column to current column
@@ -114,13 +114,13 @@ public class ExplicitColumnTracker imple
             this.column = this.columns.get(this.index);
           }
         }
-        return MatchCode.INCLUDE;
+        return ScanQueryMatcher.MatchCode.INCLUDE;
       }
 
 
       if (ret > 0) {
          // Specified column is smaller than the current, skip to next column.
-        return MatchCode.SKIP;
+        return ScanQueryMatcher.MatchCode.SKIP;
       }
 
       // Specified column is bigger than current column
@@ -128,7 +128,7 @@ public class ExplicitColumnTracker imple
       if(ret <= -1) {
         if(++this.index == this.columns.size()) {
           // No more to match, do not include, done with storefile
-          return MatchCode.NEXT; // done_row
+          return ScanQueryMatcher.MatchCode.NEXT; // done_row
         }
         // This is the recursive case.
         this.column = this.columns.get(this.index);

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java?rev=964981&r1=964980&r2=964981&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java Fri Jul 16 23:29:19 2010
@@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.util.Byte
 
 /**
  * State and utility processing {@link HRegion#getClosestRowBefore(byte[], byte[])}.
- * Like {@link GetDeleteTracker} and {@link ScanDeleteTracker} but does not
+ * Like {@link ScanDeleteTracker} and {@link ScanDeleteTracker} but does not
  * implement the {@link DeleteTracker} interface since state spans rows (There
  * is no update nor reset method).
  */

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java?rev=964981&r1=964980&r2=964981&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java Fri Jul 16 23:29:19 2010
@@ -24,14 +24,11 @@ import java.io.IOException;
 import java.lang.management.ManagementFactory;
 import java.lang.management.RuntimeMXBean;
 import java.rmi.UnexpectedException;
-import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
 import java.util.NavigableSet;
-import java.util.Set;
 import java.util.SortedSet;
-import java.util.concurrent.CopyOnWriteArraySet;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
@@ -41,7 +38,6 @@ import org.apache.hadoop.hbase.HConstant
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.HeapSize;
-import org.apache.hadoop.hbase.regionserver.DeleteCompare.DeleteCode;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ClassSize;
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java?rev=964981&r1=964980&r2=964981&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java Fri Jul 16 23:29:19 2010
@@ -23,7 +23,9 @@ package org.apache.hadoop.hbase.regionse
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.Filter.ReturnCode;
+import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.hadoop.hbase.util.Bytes;
 
 import java.util.NavigableSet;
@@ -31,14 +33,36 @@ import java.util.NavigableSet;
 /**
  * A query matcher that is specifically designed for the scan case.
  */
-public class ScanQueryMatcher extends QueryMatcher {
+public class ScanQueryMatcher {
   // Optimization so we can skip lots of compares when we decide to skip
   // to the next row.
   private boolean stickyNextRow;
   private byte[] stopRow;
 
+  protected TimeRange tr;
+
+  protected Filter filter;
+
+  /** Keeps track of deletes */
+  protected DeleteTracker deletes;
+
+  /** Keeps track of columns and versions */
+  protected ColumnTracker columns;
+
+  /** Key to seek to in memstore and StoreFiles */
+  protected KeyValue startKey;
+
+  /** Oldest allowed version stamp for TTL enforcement */
+  protected long oldestStamp;
+
+  /** Row comparator for the region this query is for */
+  KeyValue.KeyComparator rowComparator;
+
+  /** Row the query is on */
+  protected byte [] row;
+
   /**
-   * Constructs a QueryMatcher for a Scan.
+   * Constructs a ScanQueryMatcher for a Scan.
    * @param scan
    * @param family
    * @param columns
@@ -219,15 +243,79 @@ public class ScanQueryMatcher extends Qu
    * Set current row
    * @param row
    */
-  @Override
   public void setRow(byte [] row) {
     this.row = row;
     reset();
   }
 
-  @Override
   public void reset() {
-    super.reset();
+    this.deletes.reset();
+    this.columns.reset();
+
     stickyNextRow = false;
   }
+
+  // should be in KeyValue.
+  protected boolean isDelete(byte type) {
+    return (type != KeyValue.Type.Put.getCode());
+  }
+
+  protected boolean isExpired(long timestamp) {
+    return (timestamp < oldestStamp);
+  }
+
+  /**
+   *
+   * @return the start key
+   */
+  public KeyValue getStartKey() {
+    return this.startKey;
+  }
+
+  /**
+   * {@link #match} return codes.  These instruct the scanner moving through
+   * memstores and StoreFiles what to do with the current KeyValue.
+   * <p>
+   * Additionally, this contains "early-out" language to tell the scanner to
+   * move on to the next File (memstore or Storefile), or to return immediately.
+   */
+  public static enum MatchCode {
+    /**
+     * Include KeyValue in the returned result
+     */
+    INCLUDE,
+
+    /**
+     * Do not include KeyValue in the returned result
+     */
+    SKIP,
+
+    /**
+     * Do not include, jump to next StoreFile or memstore (in time order)
+     */
+    NEXT,
+
+    /**
+     * Do not include, return current result
+     */
+    DONE,
+
+    /**
+     * These codes are used by the ScanQueryMatcher
+     */
+
+    /**
+     * Done with the row, seek there.
+     */
+    SEEK_NEXT_ROW,
+    /**
+     * Done with column, seek to next.
+     */
+    SEEK_NEXT_COL,
+
+    /**
+     * Done with scan, thanks to the row filter.
+     */
+    DONE_SCAN,
+  }
 }
\ No newline at end of file

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/ScanWildcardColumnTracker.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/ScanWildcardColumnTracker.java?rev=964981&r1=964980&r2=964981&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/ScanWildcardColumnTracker.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/ScanWildcardColumnTracker.java Fri Jul 16 23:29:19 2010
@@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.regionse
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.regionserver.QueryMatcher.MatchCode;
+import org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode;
 import org.apache.hadoop.hbase.util.Bytes;
 
 /**
@@ -65,15 +65,15 @@ public class ScanWildcardColumnTracker i
       currentCount = 0;
 
       if (++currentCount > maxVersions)
-        return MatchCode.SKIP;
-      return MatchCode.INCLUDE;
+        return ScanQueryMatcher.MatchCode.SKIP;
+      return ScanQueryMatcher.MatchCode.INCLUDE;
     }
     int cmp = Bytes.compareTo(bytes, offset, length,
         columnBuffer, columnOffset, columnLength);
     if (cmp == 0) {
       if (++currentCount > maxVersions)
-        return MatchCode.SKIP; // skip to next col
-      return MatchCode.INCLUDE;
+        return ScanQueryMatcher.MatchCode.SKIP; // skip to next col
+      return ScanQueryMatcher.MatchCode.INCLUDE;
     }
 
     // new col > old col
@@ -84,8 +84,8 @@ public class ScanWildcardColumnTracker i
       columnLength = length;
       currentCount = 0;
       if (++currentCount > maxVersions)
-        return MatchCode.SKIP;
-      return MatchCode.INCLUDE;
+        return ScanQueryMatcher.MatchCode.SKIP;
+      return ScanQueryMatcher.MatchCode.INCLUDE;
     }
 
     // new col < oldcol
@@ -102,8 +102,8 @@ public class ScanWildcardColumnTracker i
     columnLength = length;
     currentCount = 0;
     if (++currentCount > maxVersions)
-      return MatchCode.SKIP;
-    return MatchCode.INCLUDE;
+      return ScanQueryMatcher.MatchCode.SKIP;
+    return ScanQueryMatcher.MatchCode.INCLUDE;
   }
 
   @Override

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java?rev=964981&r1=964980&r2=964981&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java Fri Jul 16 23:29:19 2010
@@ -231,7 +231,7 @@ class StoreScanner implements KeyValueSc
     KeyValue kv;
     List<KeyValue> results = new ArrayList<KeyValue>();
     LOOP: while((kv = this.heap.peek()) != null) {
-      QueryMatcher.MatchCode qcode = matcher.match(kv);
+      ScanQueryMatcher.MatchCode qcode = matcher.match(kv);
       //DebugPrint.println("SS peek kv = " + kv + " with qcode = " + qcode);
       switch(qcode) {
         case INCLUDE:

Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java?rev=964981&r1=964980&r2=964981&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java Fri Jul 16 23:29:19 2010
@@ -528,7 +528,7 @@ public class HBaseTestingUtility {
     results.close();
     return digest.toString();
   }
-  
+
   /**
    * Creates many regions names "aaa" to "zzz".
    *
@@ -566,7 +566,7 @@ public class HBaseTestingUtility {
     };
     return createMultiRegions(c, table, columnFamily, KEYS);
   }
-  
+
   public int createMultiRegions(final Configuration c, final HTable table,
       final byte[] columnFamily, byte [][] startKeys)
   throws IOException {
@@ -624,7 +624,7 @@ public class HBaseTestingUtility {
     s.close();
     return rows;
   }
-  
+
   /**
    * Returns all rows from the .META. table for a given user table
    *
@@ -828,7 +828,7 @@ public class HBaseTestingUtility {
   }
 
   public void cleanupTestDir() throws IOException {
-    getTestDir().getFileSystem(conf).delete(getTestDir(), true);    
+    getTestDir().getFileSystem(conf).delete(getTestDir(), true);
   }
 
   public void waitTableAvailable(byte[] table, long timeoutMillis)
@@ -888,16 +888,16 @@ public class HBaseTestingUtility {
    * You'll get a NPE if you call before you've started a minidfscluster.
    * @param soft Soft limit
    * @param hard Hard limit
-   * @throws NoSuchFieldException 
-   * @throws SecurityException 
-   * @throws IllegalAccessException 
-   * @throws IllegalArgumentException 
+   * @throws NoSuchFieldException
+   * @throws SecurityException
+   * @throws IllegalAccessException
+   * @throws IllegalArgumentException
    */
   public void setNameNodeNameSystemLeasePeriod(final int soft, final int hard)
   throws SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException {
     // TODO: If 0.20 hadoop do one thing, if 0.21 hadoop do another.
     // Not available in 0.20 hdfs.  Use reflection to make it happen.
-    
+
     // private NameNode nameNode;
     Field field = this.dfsCluster.getClass().getDeclaredField("nameNode");
     field.setAccessible(true);
@@ -913,10 +913,10 @@ public class HBaseTestingUtility {
    * </pre>
    * @param stream A DFSClient.DFSOutputStream.
    * @param max
-   * @throws NoSuchFieldException 
-   * @throws SecurityException 
-   * @throws IllegalAccessException 
-   * @throws IllegalArgumentException 
+   * @throws NoSuchFieldException
+   * @throws SecurityException
+   * @throws IllegalAccessException
+   * @throws IllegalArgumentException
    */
   public static void setMaxRecoveryErrorCount(final OutputStream stream,
       final int max) {
@@ -966,7 +966,7 @@ public class HBaseTestingUtility {
       // If I get to here and all rows have a Server, then all have been assigned.
       if (rows == countOfRegions) break;
       LOG.info("Found=" + rows);
-      Threads.sleep(1000); 
+      Threads.sleep(1000);
     }
   }
 

Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestExplicitColumnTracker.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestExplicitColumnTracker.java?rev=964981&r1=964980&r2=964981&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestExplicitColumnTracker.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestExplicitColumnTracker.java Fri Jul 16 23:29:19 2010
@@ -26,8 +26,7 @@ import java.util.TreeSet;
 import java.util.Arrays;
 
 import org.apache.hadoop.hbase.HBaseTestCase;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.regionserver.QueryMatcher.MatchCode;
+import org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode;
 import org.apache.hadoop.hbase.util.Bytes;
 
 
@@ -49,7 +48,7 @@ public class TestExplicitColumnTracker e
 
 
     //Initialize result
-    List<MatchCode> result = new ArrayList<MatchCode>();
+    List<ScanQueryMatcher.MatchCode> result = new ArrayList<ScanQueryMatcher.MatchCode>();
 
     //"Match"
     for(byte [] col : scannerColumns){
@@ -76,12 +75,12 @@ public class TestExplicitColumnTracker e
     //Looking for every other
     columns.add(col2);
     columns.add(col4);
-    List<MatchCode> expected = new ArrayList<MatchCode>();
-    expected.add(MatchCode.SKIP);
-    expected.add(MatchCode.INCLUDE);
-    expected.add(MatchCode.SKIP);
-    expected.add(MatchCode.INCLUDE);
-    expected.add(MatchCode.DONE);
+    List<MatchCode> expected = new ArrayList<ScanQueryMatcher.MatchCode>();
+    expected.add(ScanQueryMatcher.MatchCode.SKIP);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.SKIP);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.DONE);
     int maxVersions = 1;
 
     //Create "Scanner"
@@ -106,26 +105,26 @@ public class TestExplicitColumnTracker e
     columns.add(col2);
     columns.add(col4);
 
-    List<MatchCode> expected = new ArrayList<MatchCode>();
-    expected.add(MatchCode.SKIP);
-    expected.add(MatchCode.SKIP);
-    expected.add(MatchCode.SKIP);
-
-    expected.add(MatchCode.INCLUDE);
-    expected.add(MatchCode.INCLUDE);
-    expected.add(MatchCode.SKIP);
-
-    expected.add(MatchCode.SKIP);
-    expected.add(MatchCode.SKIP);
-    expected.add(MatchCode.SKIP);
-
-    expected.add(MatchCode.INCLUDE);
-    expected.add(MatchCode.INCLUDE);
-    expected.add(MatchCode.DONE);
-
-    expected.add(MatchCode.DONE);
-    expected.add(MatchCode.DONE);
-    expected.add(MatchCode.DONE);
+    List<ScanQueryMatcher.MatchCode> expected = new ArrayList<ScanQueryMatcher.MatchCode>();
+    expected.add(ScanQueryMatcher.MatchCode.SKIP);
+    expected.add(ScanQueryMatcher.MatchCode.SKIP);
+    expected.add(ScanQueryMatcher.MatchCode.SKIP);
+
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.SKIP);
+
+    expected.add(ScanQueryMatcher.MatchCode.SKIP);
+    expected.add(ScanQueryMatcher.MatchCode.SKIP);
+    expected.add(ScanQueryMatcher.MatchCode.SKIP);
+
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.DONE);
+
+    expected.add(ScanQueryMatcher.MatchCode.DONE);
+    expected.add(ScanQueryMatcher.MatchCode.DONE);
+    expected.add(ScanQueryMatcher.MatchCode.DONE);
     int maxVersions = 2;
 
     //Create "Scanner"
@@ -183,10 +182,10 @@ public class TestExplicitColumnTracker e
       col2, col3, col5 }));
     List<byte[]> scanner = Arrays.<byte[]>asList(
       new byte[][] { col1, col4 });
-    List<MatchCode> expected = Arrays.<MatchCode>asList(
-      new MatchCode[] {
-        MatchCode.SKIP,
-        MatchCode.SKIP });
+    List<ScanQueryMatcher.MatchCode> expected = Arrays.<ScanQueryMatcher.MatchCode>asList(
+      new ScanQueryMatcher.MatchCode[] {
+        ScanQueryMatcher.MatchCode.SKIP,
+        ScanQueryMatcher.MatchCode.SKIP });
     runTest(1, columns, scanner, expected);
   }
 }

Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java?rev=964981&r1=964980&r2=964981&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java Fri Jul 16 23:29:19 2010
@@ -361,7 +361,7 @@ public class TestHRegion extends HBaseTe
       assertEquals(OperationStatusCode.SUCCESS, codes[i]);
     }
     assertEquals(1, HLog.getSyncOps());
-    
+
     LOG.info("Next a batch put with one invalid family");
     puts[5].add(Bytes.toBytes("BAD_CF"), qual, val);
     codes = this.region.put(puts);
@@ -371,7 +371,7 @@ public class TestHRegion extends HBaseTe
         OperationStatusCode.SUCCESS, codes[i]);
     }
     assertEquals(1, HLog.getSyncOps());
-    
+
     LOG.info("Next a batch put that has to break into two batches to avoid a lock");
     Integer lockedRow = region.obtainRowLock(Bytes.toBytes("row_2"));
 
@@ -396,7 +396,7 @@ public class TestHRegion extends HBaseTe
       if (System.currentTimeMillis() - startWait > 10000) {
         fail("Timed out waiting for thread to sync first minibatch");
       }
-    }    
+    }
     LOG.info("...releasing row lock, which should let put thread continue");
     region.releaseRowLock(lockedRow);
     LOG.info("...joining on thread");
@@ -408,7 +408,7 @@ public class TestHRegion extends HBaseTe
       assertEquals((i == 5) ? OperationStatusCode.BAD_FAMILY :
         OperationStatusCode.SUCCESS, codes[i]);
     }
-    
+
     LOG.info("Nexta, a batch put which uses an already-held lock");
     lockedRow = region.obtainRowLock(Bytes.toBytes("row_2"));
     LOG.info("...obtained row lock");
@@ -427,13 +427,13 @@ public class TestHRegion extends HBaseTe
     }
     // Make sure we didn't do an extra batch
     assertEquals(1, HLog.getSyncOps());
-    
+
     // Make sure we still hold lock
     assertTrue(region.isRowLocked(lockedRow));
     LOG.info("...releasing lock");
     region.releaseRowLock(lockedRow);
   }
-  
+
   //////////////////////////////////////////////////////////////////////////////
   // checkAndMutate tests
   //////////////////////////////////////////////////////////////////////////////
@@ -834,7 +834,7 @@ public class TestHRegion extends HBaseTe
     result = region.get(get, null);
     assertEquals(0, result.size());
   }
-  
+
   /**
    * Tests that the special LATEST_TIMESTAMP option for puts gets
    * replaced by the actual timestamp
@@ -863,7 +863,7 @@ public class TestHRegion extends HBaseTe
     LOG.info("Got: " + kv);
     assertTrue("LATEST_TIMESTAMP was not replaced with real timestamp",
         kv.getTimestamp() != HConstants.LATEST_TIMESTAMP);
-    
+
     // Check same with WAL enabled (historically these took different
     // code paths, so check both)
     row = Bytes.toBytes("row2");

Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java?rev=964981&r1=964980&r2=964981&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java Fri Jul 16 23:29:19 2010
@@ -25,12 +25,11 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.hadoop.hbase.HBaseTestCase;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueTestUtil;
 import org.apache.hadoop.hbase.KeyValue.KeyComparator;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.regionserver.QueryMatcher.MatchCode;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode;
 import org.apache.hadoop.hbase.util.Bytes;
 
 
@@ -53,6 +52,7 @@ public class TestQueryMatcher extends HB
 
   long ttl = Long.MAX_VALUE;
   KeyComparator rowComparator;
+  private Scan scan;
 
   public void setUp() throws Exception {
     super.setUp();
@@ -74,6 +74,7 @@ public class TestQueryMatcher extends HB
     get.addColumn(fam2, col2);
     get.addColumn(fam2, col4);
     get.addColumn(fam2, col5);
+    this.scan = new Scan(get);
 
     rowComparator = KeyValue.KEY_COMPARATOR;
 
@@ -85,15 +86,15 @@ public class TestQueryMatcher extends HB
     //of just byte []
 
     //Expected result
-    List<MatchCode> expected = new ArrayList<MatchCode>();
-    expected.add(MatchCode.SKIP);
-    expected.add(MatchCode.INCLUDE);
-    expected.add(MatchCode.SKIP);
-    expected.add(MatchCode.INCLUDE);
-    expected.add(MatchCode.INCLUDE);
-    expected.add(MatchCode.DONE);
+    List<MatchCode> expected = new ArrayList<ScanQueryMatcher.MatchCode>();
+    expected.add(ScanQueryMatcher.MatchCode.SKIP);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.SKIP);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.DONE);
 
-    QueryMatcher qm = new QueryMatcher(get, fam2,
+    ScanQueryMatcher qm = new ScanQueryMatcher(scan, fam2,
         get.getFamilyMap().get(fam2), ttl, rowComparator, 1);
 
     List<KeyValue> memstore = new ArrayList<KeyValue>();
@@ -105,9 +106,10 @@ public class TestQueryMatcher extends HB
 
     memstore.add(new KeyValue(row2, fam1, col1, data));
 
-    List<MatchCode> actual = new ArrayList<MatchCode>();
+    List<ScanQueryMatcher.MatchCode> actual = new ArrayList<ScanQueryMatcher.MatchCode>();
 
     for(KeyValue kv : memstore){
+      qm.setRow(kv.getRow());
       actual.add(qm.match(kv));
     }
 
@@ -128,15 +130,15 @@ public class TestQueryMatcher extends HB
     //of just byte []
 
     //Expected result
-    List<MatchCode> expected = new ArrayList<MatchCode>();
-    expected.add(MatchCode.INCLUDE);
-    expected.add(MatchCode.INCLUDE);
-    expected.add(MatchCode.INCLUDE);
-    expected.add(MatchCode.INCLUDE);
-    expected.add(MatchCode.INCLUDE);
-    expected.add(MatchCode.NEXT);
+    List<MatchCode> expected = new ArrayList<ScanQueryMatcher.MatchCode>();
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.NEXT);
 
-    QueryMatcher qm = new QueryMatcher(get, fam2, null, ttl, rowComparator, 1);
+    ScanQueryMatcher qm = new ScanQueryMatcher(scan, fam2, null, ttl, rowComparator, 1);
 
     List<KeyValue> memstore = new ArrayList<KeyValue>();
     memstore.add(new KeyValue(row1, fam2, col1, data));
@@ -146,9 +148,10 @@ public class TestQueryMatcher extends HB
     memstore.add(new KeyValue(row1, fam2, col5, data));
     memstore.add(new KeyValue(row2, fam1, col1, data));
 
-    List<MatchCode> actual = new ArrayList<MatchCode>();
+    List<ScanQueryMatcher.MatchCode> actual = new ArrayList<ScanQueryMatcher.MatchCode>();
 
-    for(KeyValue kv : memstore){
+    for(KeyValue kv : memstore) {
+      qm.setRow(kv.getRow());
       actual.add(qm.match(kv));
     }
 
@@ -164,7 +167,7 @@ public class TestQueryMatcher extends HB
 
 
   /**
-   * Verify that {@link QueryMatcher} only skips expired KeyValue
+   * Verify that {@link ScanQueryMatcher} only skips expired KeyValue
    * instances and does not exit early from the row (skipping
    * later non-expired KeyValues).  This version mimics a Get with
    * explicitly specified column qualifiers.
@@ -176,15 +179,15 @@ public class TestQueryMatcher extends HB
 
     long testTTL = 1000;
     MatchCode [] expected = new MatchCode[] {
-        MatchCode.SKIP,
-        MatchCode.INCLUDE,
-        MatchCode.SKIP,
-        MatchCode.INCLUDE,
-        MatchCode.SKIP,
-        MatchCode.NEXT
+        ScanQueryMatcher.MatchCode.SKIP,
+        ScanQueryMatcher.MatchCode.INCLUDE,
+        ScanQueryMatcher.MatchCode.SKIP,
+        ScanQueryMatcher.MatchCode.INCLUDE,
+        ScanQueryMatcher.MatchCode.SKIP,
+        ScanQueryMatcher.MatchCode.NEXT
     };
 
-    QueryMatcher qm = new QueryMatcher(get, fam2,
+    ScanQueryMatcher qm = new ScanQueryMatcher(scan, fam2,
         get.getFamilyMap().get(fam2), testTTL, rowComparator, 1);
 
     long now = System.currentTimeMillis();
@@ -199,6 +202,7 @@ public class TestQueryMatcher extends HB
 
     List<MatchCode> actual = new ArrayList<MatchCode>(kvs.length);
     for (KeyValue kv : kvs) {
+      qm.setRow(kv.getRow());
       actual.add( qm.match(kv) );
     }
 
@@ -214,7 +218,7 @@ public class TestQueryMatcher extends HB
 
 
   /**
-   * Verify that {@link QueryMatcher} only skips expired KeyValue
+   * Verify that {@link ScanQueryMatcher} only skips expired KeyValue
    * instances and does not exit early from the row (skipping
    * later non-expired KeyValues).  This version mimics a Get with
    * wildcard-inferred column qualifiers.
@@ -226,15 +230,15 @@ public class TestQueryMatcher extends HB
 
     long testTTL = 1000;
     MatchCode [] expected = new MatchCode[] {
-        MatchCode.INCLUDE,
-        MatchCode.INCLUDE,
-        MatchCode.SKIP,
-        MatchCode.INCLUDE,
-        MatchCode.SKIP,
-        MatchCode.NEXT
+        ScanQueryMatcher.MatchCode.INCLUDE,
+        ScanQueryMatcher.MatchCode.INCLUDE,
+        ScanQueryMatcher.MatchCode.SKIP,
+        ScanQueryMatcher.MatchCode.INCLUDE,
+        ScanQueryMatcher.MatchCode.SKIP,
+        ScanQueryMatcher.MatchCode.NEXT
     };
 
-    QueryMatcher qm = new QueryMatcher(get, fam2,
+    ScanQueryMatcher qm = new ScanQueryMatcher(scan, fam2,
         null, testTTL, rowComparator, 1);
 
     long now = System.currentTimeMillis();
@@ -247,8 +251,9 @@ public class TestQueryMatcher extends HB
         new KeyValue(row2, fam1, col1, now-10, data)
     };
 
-    List<MatchCode> actual = new ArrayList<MatchCode>(kvs.length);
+    List<ScanQueryMatcher.MatchCode> actual = new ArrayList<ScanQueryMatcher.MatchCode>(kvs.length);
     for (KeyValue kv : kvs) {
+      qm.setRow(kv.getRow());
       actual.add( qm.match(kv) );
     }
 

Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWildcardColumnTracker.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWildcardColumnTracker.java?rev=964981&r1=964980&r2=964981&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWildcardColumnTracker.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWildcardColumnTracker.java Fri Jul 16 23:29:19 2010
@@ -24,7 +24,7 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.hadoop.hbase.HBaseTestCase;
-import org.apache.hadoop.hbase.regionserver.QueryMatcher.MatchCode;
+import org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode;
 import org.apache.hadoop.hbase.util.Bytes;
 
 public class TestScanWildcardColumnTracker extends HBaseTestCase {
@@ -32,7 +32,6 @@ public class TestScanWildcardColumnTrack
   final static int VERSIONS = 2;
 
   public void testCheckColumn_Ok() {
-    //Create a WildcardColumnTracker
     ScanWildcardColumnTracker tracker =
       new ScanWildcardColumnTracker(VERSIONS);
 
@@ -45,15 +44,15 @@ public class TestScanWildcardColumnTrack
 
     //Setting up expected result
     List<MatchCode> expected = new ArrayList<MatchCode>();
-    expected.add(MatchCode.INCLUDE);
-    expected.add(MatchCode.INCLUDE);
-    expected.add(MatchCode.INCLUDE);
-    expected.add(MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
 
-    List<MatchCode> actual = new ArrayList<MatchCode>();
+    List<ScanQueryMatcher.MatchCode> actual = new ArrayList<MatchCode>();
 
     for(byte [] qualifier : qualifiers) {
-      MatchCode mc = tracker.checkColumn(qualifier, 0, qualifier.length);
+      ScanQueryMatcher.MatchCode mc = tracker.checkColumn(qualifier, 0, qualifier.length);
       actual.add(mc);
     }
 
@@ -64,7 +63,6 @@ public class TestScanWildcardColumnTrack
   }
 
   public void testCheckColumn_EnforceVersions() {
-    //Create a WildcardColumnTracker
     ScanWildcardColumnTracker tracker =
       new ScanWildcardColumnTracker(VERSIONS);
 
@@ -76,13 +74,13 @@ public class TestScanWildcardColumnTrack
     qualifiers.add(Bytes.toBytes("qualifer2"));
 
     //Setting up expected result
-    List<MatchCode> expected = new ArrayList<MatchCode>();
-    expected.add(MatchCode.INCLUDE);
-    expected.add(MatchCode.INCLUDE);
-    expected.add(MatchCode.SKIP);
-    expected.add(MatchCode.INCLUDE);
+    List<ScanQueryMatcher.MatchCode> expected = new ArrayList<MatchCode>();
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.SKIP);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
 
-    List<MatchCode> actual = new ArrayList<MatchCode>();
+    List<MatchCode> actual = new ArrayList<ScanQueryMatcher.MatchCode>();
 
     for(byte [] qualifier : qualifiers) {
       MatchCode mc = tracker.checkColumn(qualifier, 0, qualifier.length);
@@ -96,7 +94,6 @@ public class TestScanWildcardColumnTrack
   }
 
   public void DisabledTestCheckColumn_WrongOrder() {
-    //Create a WildcardColumnTracker
     ScanWildcardColumnTracker tracker =
       new ScanWildcardColumnTracker(VERSIONS);