You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2010/05/07 21:17:55 UTC

svn commit: r942184 [4/15] - in /hadoop/hbase/branches/0.20: ./ src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/client/ src/java/org/apache/hadoop/hbase/filter/ src/java/org/apache/hadoop/hbase/io/ src/java/org/apache/hadoop/hbase/io...

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/ServerCallable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/ServerCallable.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/ServerCallable.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/ServerCallable.java Fri May  7 19:17:48 2010
@@ -47,9 +47,9 @@ public abstract class ServerCallable<T> 
     this.tableName = tableName;
     this.row = row;
   }
-  
+
   /**
-   * 
+   *
    * @param reload set this to true if connection should re-find the region
    * @throws IOException
    */
@@ -65,7 +65,7 @@ public abstract class ServerCallable<T> 
     }
     return location.getServerAddress().toString();
   }
-  
+
   /** @return the region name */
   public byte[] getRegionName() {
     if (location == null) {
@@ -73,7 +73,7 @@ public abstract class ServerCallable<T> 
     }
     return location.getRegionInfo().getRegionName();
   }
-  
+
   /** @return the row */
   public byte [] getRow() {
     return row;

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/ServerConnection.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/ServerConnection.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/ServerConnection.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/ServerConnection.java Fri May  7 19:17:48 2010
@@ -32,9 +32,9 @@ public interface ServerConnection extend
    * @param rootRegion
    */
   public void setRootRegionLocation(HRegionLocation rootRegion);
-  
+
   /**
-   * Unset the root region location in the connection. Called by 
+   * Unset the root region location in the connection. Called by
    * ServerManager.processRegionClose.
    */
   public void unsetRootRegionLocation();

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java Fri May  7 19:17:48 2010
@@ -25,14 +25,14 @@ import org.apache.hadoop.hbase.HRegionIn
 class UnmodifyableHRegionInfo extends HRegionInfo {
   /*
    * Creates an unmodifyable copy of an HRegionInfo
-   * 
+   *
    * @param info
    */
   UnmodifyableHRegionInfo(HRegionInfo info) {
     super(info);
     this.tableDesc = new UnmodifyableHTableDescriptor(info.getTableDesc());
   }
-  
+
   /**
    * @param split set split status
    */

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java Fri May  7 19:17:48 2010
@@ -40,8 +40,8 @@ public class UnmodifyableHTableDescripto
   UnmodifyableHTableDescriptor(final HTableDescriptor desc) {
     super(desc.getName(), getUnmodifyableFamilies(desc), desc.getValues());
   }
-  
-  
+
+
   /*
    * @param desc
    * @return Families as unmodifiable array.
@@ -74,7 +74,7 @@ public class UnmodifyableHTableDescripto
   public HColumnDescriptor removeFamily(final byte [] column) {
     throw new UnsupportedOperationException("HTableDescriptor is read-only");
   }
-  
+
   /**
    * @see org.apache.hadoop.hbase.HTableDescriptor#setReadOnly(boolean)
    */
@@ -120,6 +120,6 @@ public class UnmodifyableHTableDescripto
 //   */
 //  @Override
 //  public void addIndex(IndexSpecification index) {
-//    throw new UnsupportedOperationException("HTableDescriptor is read-only"); 
+//    throw new UnsupportedOperationException("HTableDescriptor is read-only");
 //  }
 }

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/BinaryComparator.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/BinaryComparator.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/BinaryComparator.java Fri May  7 19:17:48 2010
@@ -21,7 +21,7 @@
 package org.apache.hadoop.hbase.filter;
 
 /**
- * A binary comparator which lexicographically compares against the specified 
+ * A binary comparator which lexicographically compares against the specified
  * byte array using {@link org.apache.hadoop.hbase.util.Bytes#compareTo(byte[], byte[])}.
  */
 public class BinaryComparator extends WritableByteArrayComparable {

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java Fri May  7 19:17:48 2010
@@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.util.Byte
  * A comparator which compares against a specified byte array, but only compares
  * up to the length of this byte array. For the rest it is similar to
  * {@link BinaryComparator}.
- */ 
+ */
 public class BinaryPrefixComparator extends WritableByteArrayComparable {
 
   /** Nullary constructor for Writable, do not use */
@@ -42,7 +42,7 @@ public class BinaryPrefixComparator exte
 
   @Override
   public int compareTo(byte [] value) {
-    return Bytes.compareTo(this.value, 0, this.value.length, value, 0, 
+    return Bytes.compareTo(this.value, 0, this.value.length, value, 0,
       this.value.length);
   }
 

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java Fri May  7 19:17:48 2010
@@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.KeyValue;
 
 /**
  * A filter, based on the ColumnCountGetFilter, takes two arguments: limit and offset.
- * This filter can be used for row-based indexing, where references to other tables are stored across many columns, 
+ * This filter can be used for row-based indexing, where references to other tables are stored across many columns,
  * in order to efficient lookups and paginated results for end users.
  */
 public class ColumnPaginationFilter implements Filter
@@ -57,11 +57,11 @@ public class ColumnPaginationFilter impl
 
   public ReturnCode filterKeyValue(KeyValue v)
   {
-    if(count >= offset + limit) 
+    if(count >= offset + limit)
     {
       return ReturnCode.NEXT_ROW;
     }
-      
+
     ReturnCode code = count < offset ? ReturnCode.SKIP : ReturnCode.INCLUDE;
     count++;
     return code;

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java Fri May  7 19:17:48 2010
@@ -34,7 +34,7 @@ import org.apache.hadoop.io.ObjectWritab
 
 /**
  * This filter is a no-op in HBase 0.20.  Don't use it.
- * 
+ *
  * This filter is used to filter based on the value of a given column. It takes
  * an operator (equal, greater, not equal, etc) and either a byte [] value or a
  * byte [] comparator. If we have a byte [] value then we just do a
@@ -72,7 +72,7 @@ public class ColumnValueFilter implement
 
   /**
    * Constructor.
-   * 
+   *
    * @param columnName name of column
    * @param compareOp operator
    * @param value value to compare column values against
@@ -81,14 +81,14 @@ public class ColumnValueFilter implement
       final byte[] value) {
     this(columnName, compareOp, value, true);
   }
-  
+
   /**
    * Constructor.
-   * 
+   *
    * @param columnName name of column
    * @param compareOp operator
    * @param value value to compare column values against
-   * @param filterIfColumnMissing if true then we will filter rows that don't have the column. 
+   * @param filterIfColumnMissing if true then we will filter rows that don't have the column.
    */
   public ColumnValueFilter(final byte[] columnName, final CompareOp compareOp,
       final byte[] value, boolean filterIfColumnMissing) {
@@ -100,7 +100,7 @@ public class ColumnValueFilter implement
 
   /**
    * Constructor.
-   * 
+   *
    * @param columnName name of column
    * @param compareOp operator
    * @param comparator Comparator to use.
@@ -109,14 +109,14 @@ public class ColumnValueFilter implement
       final WritableByteArrayComparable comparator) {
     this(columnName, compareOp, comparator, true);
   }
-  
+
   /**
   * Constructor.
-  * 
+  *
   * @param columnName name of column
   * @param compareOp operator
   * @param comparator Comparator to use.
-  * @param filterIfColumnMissing if true then we will filter rows that don't have the column. 
+  * @param filterIfColumnMissing if true then we will filter rows that don't have the column.
   */
  public ColumnValueFilter(final byte[] columnName, final CompareOp compareOp,
      final WritableByteArrayComparable comparator, boolean filterIfColumnMissing) {
@@ -134,7 +134,7 @@ public class ColumnValueFilter implement
     return false;
   }
 
-  
+
   public boolean filterColumn(final byte[] rowKey,
       final byte[] colKey, final byte[] data) {
     if (!filterIfColumnMissing) {
@@ -143,7 +143,7 @@ public class ColumnValueFilter implement
     if (!Arrays.equals(colKey, columnName)) {
       return false;
     }
-    return filterColumnValue(data, 0, data.length); 
+    return filterColumnValue(data, 0, data.length);
   }
 
 
@@ -186,7 +186,7 @@ public class ColumnValueFilter implement
       throw new RuntimeException("Unknown Compare op " + compareOp.name());
     }
   }
-  
+
   public boolean filterAllRemaining() {
     return false;
   }
@@ -196,7 +196,7 @@ public class ColumnValueFilter implement
       return false;
     if (filterIfColumnMissing) {
       return !columns.containsKey(columnName);
-    } 
+    }
     // Otherwise we must do the filter here
     Cell colCell = columns.get(columnName);
       if (colCell == null) {

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/CompareFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/CompareFilter.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/CompareFilter.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/CompareFilter.java Fri May  7 19:17:48 2010
@@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.io.HbaseObjectWritable;
 
 /**
- * This is a generic filter to be used to filter by comparison.  It takes an 
+ * This is a generic filter to be used to filter by comparison.  It takes an
  * operator (equal, greater, not equal, etc) and a byte [] comparator.
  * <p>
  * To filter by row key, use {@link RowFilter}.
@@ -60,7 +60,7 @@ public abstract class CompareFilter impl
     /** greater than */
     GREATER;
   }
-  
+
   protected CompareOp compareOp;
   protected WritableByteArrayComparable comparator;
 
@@ -75,7 +75,7 @@ public abstract class CompareFilter impl
    * @param compareOp the compare op for row matching
    * @param comparator the comparator for row matching
    */
-  public CompareFilter(final CompareOp compareOp, 
+  public CompareFilter(final CompareOp compareOp,
       final WritableByteArrayComparable comparator) {
     this.compareOp = compareOp;
     this.comparator = comparator;
@@ -101,7 +101,7 @@ public abstract class CompareFilter impl
   public ReturnCode filterKeyValue(KeyValue v) {
     return ReturnCode.INCLUDE;
   }
-  
+
   public boolean filterRowKey(byte[] data, int offset, int length) {
     return false;
   }
@@ -109,7 +109,7 @@ public abstract class CompareFilter impl
   public boolean filterRow() {
     return false;
   }
-  
+
   public boolean filterAllRemaining() {
     return false;
   }
@@ -117,8 +117,8 @@ public abstract class CompareFilter impl
   protected boolean doCompare(final CompareOp compareOp,
       final WritableByteArrayComparable comparator, final byte [] data,
       final int offset, final int length) {
-    int compareResult = 
-      comparator.compareTo(Arrays.copyOfRange(data, offset, 
+    int compareResult =
+      comparator.compareTo(Arrays.copyOfRange(data, offset,
         offset + length));
     switch (compareOp) {
       case LESS:

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/Filter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/Filter.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/Filter.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/Filter.java Fri May  7 19:17:48 2010
@@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.KeyValue;
  *<ul>
  * <li>{@link #reset()}</li>
  * <li>{@link #filterAllRemaining()} -> true indicates scan is over, false, keep going on.</li>
- * <li>{@link #filterRowKey(byte[],int,int)} -> true to drop this row, 
+ * <li>{@link #filterRowKey(byte[],int,int)} -> true to drop this row,
  * if false, we will also call</li>
  * <li>{@link #filterKeyValue(KeyValue)} -> true to drop this key/value</li>
  * <li>{@link #filterRow()} -> last chance to drop entire row based on the sequence of

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/FilterList.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/FilterList.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/FilterList.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/FilterList.java Fri May  7 19:17:48 2010
@@ -34,12 +34,12 @@ import org.apache.hadoop.io.Writable;
 
 /**
  * Implementation of {@link Filter} that represents an ordered List of Filters
- * which will be evaluated with a specified boolean operator {@link Operator#MUST_PASS_ALL} 
+ * which will be evaluated with a specified boolean operator {@link Operator#MUST_PASS_ALL}
  * (<code>!AND</code>) or {@link Operator#MUST_PASS_ONE} (<code>!OR</code>).
  * Since you can use Filter Lists as children of Filter Lists, you can create a
  * hierarchy of filters to be evaluated.
  * Defaults to {@link Operator#MUST_PASS_ALL}.
- * <p>TODO: Fix creation of Configuration on serialization and deserialization. 
+ * <p>TODO: Fix creation of Configuration on serialization and deserialization.
  */
 public class FilterList implements Filter {
   /** set operator */
@@ -65,7 +65,7 @@ public class FilterList implements Filte
   /**
    * Constructor that takes a set of {@link Filter}s. The default operator
    * MUST_PASS_ALL is assumed.
-   * 
+   *
    * @param rowFilters
    */
   public FilterList(final List<Filter> rowFilters) {
@@ -74,7 +74,7 @@ public class FilterList implements Filte
 
   /**
    * Constructor that takes an operator.
-   * 
+   *
    * @param operator Operator to process filter set with.
    */
   public FilterList(final Operator operator) {
@@ -83,7 +83,7 @@ public class FilterList implements Filte
 
   /**
    * Constructor that takes a set of {@link Filter}s and an operator.
-   * 
+   *
    * @param operator Operator to process filter set with.
    * @param rowFilters Set of row filters.
    */
@@ -94,7 +94,7 @@ public class FilterList implements Filte
 
   /**
    * Get the operator.
-   * 
+   *
    * @return operator
    */
   public Operator getOperator() {
@@ -103,7 +103,7 @@ public class FilterList implements Filte
 
   /**
    * Get the filters.
-   * 
+   *
    * @return filters
    */
   public List<Filter> getFilters() {
@@ -112,7 +112,7 @@ public class FilterList implements Filte
 
   /**
    * Add a filter.
-   * 
+   *
    * @param filter
    */
   public void addFilter(Filter filter) {

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java Fri May  7 19:17:48 2010
@@ -63,7 +63,7 @@ public class InclusiveStopFilter impleme
     // if stopRowKey is <= buffer, then true, filter row.
     int cmp = Bytes.compareTo(stopRowKey, 0, stopRowKey.length,
       buffer, offset, length);
-    
+
     if(cmp < 0) {
       done = true;
     }

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/InclusiveStopRowFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/InclusiveStopRowFilter.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/InclusiveStopRowFilter.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/InclusiveStopRowFilter.java Fri May  7 19:17:48 2010
@@ -36,7 +36,7 @@ public class InclusiveStopRowFilter exte
 
   /**
    * Constructor that takes a stopRowKey on which to filter
-   * 
+   *
    * @param stopRowKey rowKey to filter on.
    */
   public InclusiveStopRowFilter(final byte [] stopRowKey) {

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/PageFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/PageFilter.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/PageFilter.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/PageFilter.java Fri May  7 19:17:48 2010
@@ -50,7 +50,7 @@ public class PageFilter implements Filte
 
   /**
    * Constructor that takes a maximum page size.
-   * 
+   *
    * @param pageSize Maximum result size.
    */
   public PageFilter(final long pageSize) {

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/PageRowFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/PageRowFilter.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/PageRowFilter.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/PageRowFilter.java Fri May  7 19:17:48 2010
@@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.io.Cell;
  * Implementation of RowFilterInterface that limits results to a specific page
  * size. It terminates scanning once the number of filter-passed results is >=
  * the given page size.
- * 
+ *
  * <p>
  * Note that this filter cannot guarantee that the number of results returned
  * to a client are <= page size. This is because the filter is applied
@@ -58,7 +58,7 @@ public class PageRowFilter implements Ro
 
   /**
    * Constructor that takes a maximum page size.
-   * 
+   *
    * @param pageSize Maximum result size.
    */
   public PageRowFilter(final long pageSize) {

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/PrefixRowFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/PrefixRowFilter.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/PrefixRowFilter.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/PrefixRowFilter.java Fri May  7 19:17:48 2010
@@ -36,21 +36,21 @@ import org.apache.hadoop.hbase.util.Byte
  */
 public class PrefixRowFilter implements RowFilterInterface {
   protected byte[] prefix;
-  
+
   /**
    * Constructor that takes a row prefix to filter on
-   * @param prefix 
+   * @param prefix
    */
   public PrefixRowFilter(byte[] prefix) {
     this.prefix = prefix;
   }
-  
+
   /**
    * Default Constructor, filters nothing. Required for RPC
    * deserialization
    */
   public PrefixRowFilter() { }
-  
+
   public void reset() {
     // Nothing to reset
   }
@@ -62,15 +62,15 @@ public class PrefixRowFilter implements 
   public void rowProcessed(boolean filtered, byte[] key, int offset, int length) {
     // does not care
   }
-  
+
   public boolean processAlways() {
     return false;
   }
-  
+
   public boolean filterAllRemaining() {
     return false;
   }
-  
+
   public boolean filterRowKey(final byte [] rowKey) {
     return filterRowKey(rowKey, 0, rowKey.length);
   }
@@ -109,11 +109,11 @@ public class PrefixRowFilter implements 
   public void validate(final byte [][] columns) {
     // does not do this
   }
-  
+
   public void readFields(final DataInput in) throws IOException {
     prefix = Bytes.readByteArray(in);
   }
-  
+
   public void write(final DataOutput out) throws IOException {
     Bytes.writeByteArray(out, prefix);
   }

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/QualifierFilter.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/QualifierFilter.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/QualifierFilter.java Fri May  7 19:17:48 2010
@@ -24,8 +24,8 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Get;
 
 /**
- * This filter is used to filter based on the column qualifier. It takes an 
- * operator (equal, greater, not equal, etc) and a byte [] comparator for the 
+ * This filter is used to filter based on the column qualifier. It takes an
+ * operator (equal, greater, not equal, etc) and a byte [] comparator for the
  * column qualifier portion of a key.
  * <p>
  * This filter can be wrapped with {@link WhileMatchFilter} and {@link SkipFilter}
@@ -58,7 +58,7 @@ public class QualifierFilter extends Com
   public ReturnCode filterKeyValue(KeyValue v) {
     int qualifierLength = v.getQualifierLength();
     if (qualifierLength > 0) {
-      if (doCompare(this.compareOp, this.comparator, v.getBuffer(), 
+      if (doCompare(this.compareOp, this.comparator, v.getBuffer(),
           v.getQualifierOffset(), qualifierLength)) {
         return ReturnCode.SKIP;
       }

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java Fri May  7 19:17:48 2010
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.util.Byte
  * Implementation of RowFilterInterface that can filter by rowkey regular
  * expression and/or individual column values (equals comparison only). Multiple
  * column filters imply an implicit conjunction of filter criteria.
- * 
+ *
  * Note that column value filtering in this interface has been replaced by
  * {@link ColumnValueFilter}.
  * @deprecated This interface doesn't work well in new KeyValue world.
@@ -65,7 +65,7 @@ public class RegExpRowFilter implements 
 
   /**
    * Constructor that takes a row key regular expression to filter on.
-   * 
+   *
    * @param rowKeyRegExp
    */
   public RegExpRowFilter(final String rowKeyRegExp) {
@@ -75,7 +75,7 @@ public class RegExpRowFilter implements 
   /**
    * @deprecated Column filtering has been replaced by {@link ColumnValueFilter}
    * Constructor that takes a row key regular expression to filter on.
-   * 
+   *
    * @param rowKeyRegExp
    * @param columnFilter
    */
@@ -85,7 +85,7 @@ public class RegExpRowFilter implements 
     this.rowKeyRegExp = rowKeyRegExp;
     this.setColumnFilters(columnFilter);
   }
-  
+
   public void rowProcessed(boolean filtered, byte [] rowKey) {
     rowProcessed(filtered, rowKey, 0, rowKey.length);
   }
@@ -98,11 +98,11 @@ public class RegExpRowFilter implements 
   public boolean processAlways() {
     return false;
   }
-  
+
   /**
    * @deprecated Column filtering has been replaced by {@link ColumnValueFilter}
    * Specify a value that must be matched for the given column.
-   * 
+   *
    * @param colKey
    *          the column to match on
    * @param value
@@ -120,7 +120,7 @@ public class RegExpRowFilter implements 
   /**
    * @deprecated Column filtering has been replaced by {@link ColumnValueFilter}
    * Set column filters for a number of columns.
-   * 
+   *
    * @param columnFilter
    *          Map of columns with value criteria.
    */

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java Fri May  7 19:17:48 2010
@@ -38,7 +38,7 @@ import java.nio.charset.IllegalCharsetNa
  * regular expression matches a cell value in the column.
  * <p>
  * Only EQUAL or NOT_EQUAL {@link org.apache.hadoop.hbase.filter.CompareFilter.CompareOp}
- * comparisons are valid with this comparator. 
+ * comparisons are valid with this comparator.
  * <p>
  * For example:
  * <p>

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/RowFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/RowFilter.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/RowFilter.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/RowFilter.java Fri May  7 19:17:48 2010
@@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.client.Sc
 
 /**
  * This filter is used to filter based on the key. It takes an operator
- * (equal, greater, not equal, etc) and a byte [] comparator for the row, 
+ * (equal, greater, not equal, etc) and a byte [] comparator for the row,
  * and column qualifier portions of a key.
  * <p>
  * This filter can be wrapped with {@link WhileMatchFilter} to add more control.
@@ -51,7 +51,7 @@ public class RowFilter extends CompareFi
    * @param rowCompareOp the compare op for row matching
    * @param rowComparator the comparator for row matching
    */
-  public RowFilter(final CompareOp rowCompareOp, 
+  public RowFilter(final CompareOp rowCompareOp,
       final WritableByteArrayComparable rowComparator) {
     super(rowCompareOp, rowComparator);
   }
@@ -68,7 +68,7 @@ public class RowFilter extends CompareFi
     }
     return ReturnCode.INCLUDE;
   }
-  
+
   @Override
   public boolean filterRowKey(byte[] data, int offset, int length) {
     if(doCompare(this.compareOp, this.comparator, data, offset, length)) {

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/RowFilterInterface.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/RowFilterInterface.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/RowFilterInterface.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/RowFilterInterface.java Fri May  7 19:17:48 2010
@@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.io.Cell;
 import org.apache.hadoop.io.Writable;
 
 /**
- * 
+ *
  * Interface used for row-level filters applied to HRegion.HScanner scan
  * results during calls to next().
  *
@@ -46,17 +46,17 @@ import org.apache.hadoop.io.Writable;
 public interface RowFilterInterface extends Writable {
   /**
    * Resets the state of the filter. Used prior to the start of a Region scan.
-   * 
+   *
    */
   void reset();
 
   /**
-   * Called to let filter know the final decision (to pass or filter) on a 
-   * given row.  With out HScanner calling this, the filter does not know if a 
-   * row passed filtering even if it passed the row itself because other 
-   * filters may have failed the row. E.g. when this filter is a member of a 
+   * Called to let filter know the final decision (to pass or filter) on a
+   * given row.  With out HScanner calling this, the filter does not know if a
+   * row passed filtering even if it passed the row itself because other
+   * filters may have failed the row. E.g. when this filter is a member of a
    * RowFilterSet with an OR operator.
-   * 
+   *
    * @see RowFilterSet
    * @param filtered
    * @param key
@@ -65,12 +65,12 @@ public interface RowFilterInterface exte
   void rowProcessed(boolean filtered, byte [] key);
 
   /**
-   * Called to let filter know the final decision (to pass or filter) on a 
-   * given row.  With out HScanner calling this, the filter does not know if a 
-   * row passed filtering even if it passed the row itself because other 
-   * filters may have failed the row. E.g. when this filter is a member of a 
+   * Called to let filter know the final decision (to pass or filter) on a
+   * given row.  With out HScanner calling this, the filter does not know if a
+   * row passed filtering even if it passed the row itself because other
+   * filters may have failed the row. E.g. when this filter is a member of a
    * RowFilterSet with an OR operator.
-   * 
+   *
    * @see RowFilterSet
    * @param filtered
    * @param key
@@ -80,31 +80,31 @@ public interface RowFilterInterface exte
   void rowProcessed(boolean filtered, byte [] key, int offset, int length);
 
   /**
-   * Returns whether or not the filter should always be processed in any 
-   * filtering call.  This precaution is necessary for filters that maintain 
-   * state and need to be updated according to their response to filtering 
-   * calls (see WhileMatchRowFilter for an example).  At times, filters nested 
-   * in RowFilterSets may or may not be called because the RowFilterSet 
-   * determines a result as fast as possible.  Returning true for 
+   * Returns whether or not the filter should always be processed in any
+   * filtering call.  This precaution is necessary for filters that maintain
+   * state and need to be updated according to their response to filtering
+   * calls (see WhileMatchRowFilter for an example).  At times, filters nested
+   * in RowFilterSets may or may not be called because the RowFilterSet
+   * determines a result as fast as possible.  Returning true for
    * processAlways() ensures that the filter will always be called.
-   * 
+   *
    * @return whether or not to always process the filter
    */
   boolean processAlways();
-  
+
   /**
    * Determines if the filter has decided that all remaining results should be
    * filtered (skipped). This is used to prevent the scanner from scanning a
    * the rest of the HRegion when for sure the filter will exclude all
    * remaining rows.
-   * 
+   *
    * @return true if the filter intends to filter all remaining rows.
    */
   boolean filterAllRemaining();
 
   /**
    * Filters on just a row key. This is the first chance to stop a row.
-   * 
+   *
    * @param rowKey
    * @return true if given row key is filtered and row should not be processed.
    * @deprecated Use {@link #filterRowKey(byte[], int, int)} instead.
@@ -113,7 +113,7 @@ public interface RowFilterInterface exte
 
   /**
    * Filters on just a row key. This is the first chance to stop a row.
-   * 
+   *
    * @param rowKey
    * @param offset
    * @param length
@@ -122,9 +122,9 @@ public interface RowFilterInterface exte
   boolean filterRowKey(final byte [] rowKey, final int offset, final int length);
 
   /**
-   * Filters on row key, column name, and column value. This will take individual columns out of a row, 
+   * Filters on row key, column name, and column value. This will take individual columns out of a row,
    * but the rest of the row will still get through.
-   * 
+   *
    * @param rowKey row key to filter on.
    * @param columnName column name to filter on
    * @param columnValue column value to filter on
@@ -137,9 +137,9 @@ public interface RowFilterInterface exte
       final byte [] columnValue);
 
   /**
-   * Filters on row key, column name, and column value. This will take individual columns out of a row, 
+   * Filters on row key, column name, and column value. This will take individual columns out of a row,
    * but the rest of the row will still get through.
-   * 
+   *
    * @param rowKey row key to filter on.
    * @param colunmName column name to filter on
    * @param columnValue column value to filter on
@@ -151,16 +151,16 @@ public interface RowFilterInterface exte
       final int vlength);
 
   /**
-   * Filter on the fully assembled row. This is the last chance to stop a row. 
-   * 
+   * Filter on the fully assembled row. This is the last chance to stop a row.
+   *
    * @param columns
    * @return true if row filtered and should not be processed.
    */
   boolean filterRow(final SortedMap<byte [], Cell> columns);
 
   /**
-   * Filter on the fully assembled row. This is the last chance to stop a row. 
-   * 
+   * Filter on the fully assembled row. This is the last chance to stop a row.
+   *
    * @param results
    * @return true if row filtered and should not be processed.
    */
@@ -174,7 +174,7 @@ public interface RowFilterInterface exte
    * list will be ignored. In the case of null value filters, all rows will pass
    * the filter. This behavior should be 'undefined' for the user and therefore
    * not permitted.
-   * 
+   *
    * @param columns
    */
   void validate(final byte [][] columns);

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/RowFilterSet.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/RowFilterSet.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/RowFilterSet.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/RowFilterSet.java Fri May  7 19:17:48 2010
@@ -35,8 +35,8 @@ import org.apache.hadoop.io.ObjectWritab
 
 /**
  * Implementation of RowFilterInterface that represents a set of RowFilters
- * which will be evaluated with a specified boolean operator MUST_PASS_ALL 
- * (!AND) or MUST_PASS_ONE (!OR).  Since you can use RowFilterSets as children 
+ * which will be evaluated with a specified boolean operator MUST_PASS_ALL
+ * (!AND) or MUST_PASS_ONE (!OR).  Since you can use RowFilterSets as children
  * of RowFilterSet, you can create a hierarchy of filters to be evaluated.
  *
  * It is highly likely this construct will no longer work!
@@ -65,9 +65,9 @@ public class RowFilterSet implements Row
   }
 
   /**
-   * Constructor that takes a set of RowFilters. The default operator 
+   * Constructor that takes a set of RowFilters. The default operator
    * MUST_PASS_ALL is assumed.
-   * 
+   *
    * @param rowFilters
    */
   public RowFilterSet(final Set<RowFilterInterface> rowFilters) {
@@ -76,7 +76,7 @@ public class RowFilterSet implements Row
 
   /**
    * Constructor that takes a set of RowFilters and an operator.
-   * 
+   *
    * @param operator Operator to process filter set with.
    * @param rowFilters Set of row filters.
    */
@@ -87,29 +87,29 @@ public class RowFilterSet implements Row
   }
 
   /** Get the operator.
-   * 
+   *
    * @return operator
    */
   public Operator getOperator() {
     return operator;
   }
-  
+
   /** Get the filters.
-   * 
+   *
    * @return filters
    */
   public Set<RowFilterInterface> getFilters() {
     return filters;
   }
-  
+
   /** Add a filter.
-   * 
+   *
    * @param filter
    */
   public void addFilter(RowFilterInterface filter) {
     this.filters.add(filter);
   }
-  
+
   public void validate(final byte [][] columns) {
     for (RowFilterInterface filter : filters) {
       filter.validate(columns);
@@ -140,7 +140,7 @@ public class RowFilterSet implements Row
     }
     return false;
   }
-  
+
   public boolean filterAllRemaining() {
     boolean result = operator == Operator.MUST_PASS_ONE;
     for (RowFilterInterface filter : filters) {
@@ -187,7 +187,7 @@ public class RowFilterSet implements Row
     return result;
   }
 
-  public boolean filterColumn(final byte [] rowKey, final byte [] colKey, 
+  public boolean filterColumn(final byte [] rowKey, final byte [] colKey,
     final byte[] data) {
     return filterColumn(rowKey, 0, rowKey.length, colKey, 0, colKey.length,
         data, 0, data.length);
@@ -201,14 +201,14 @@ public class RowFilterSet implements Row
     for (RowFilterInterface filter : filters) {
       if (!resultFound) {
         if (operator == Operator.MUST_PASS_ALL) {
-          if (filter.filterAllRemaining() || 
+          if (filter.filterAllRemaining() ||
             filter.filterColumn(rowKey, roffset, rlength, columnName, coffset,
                 clength, columnValue, voffset, vlength)) {
             result = true;
             resultFound = true;
           }
         } else if (operator == Operator.MUST_PASS_ONE) {
-          if (!filter.filterAllRemaining() && 
+          if (!filter.filterAllRemaining() &&
             !filter.filterColumn(rowKey, roffset, rlength, columnName, coffset,
                 clength, columnValue, voffset, vlength)) {
             result = false;

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java Fri May  7 19:17:48 2010
@@ -35,19 +35,19 @@ import org.apache.hadoop.hbase.util.Byte
 
 /**
  * This filter is used to filter cells based on value. It takes a
- * {@link org.apache.hadoop.hbase.filter.CompareFilter.CompareOp} 
- * operator (equal, greater, not equal, etc), and either a byte [] value or 
+ * {@link org.apache.hadoop.hbase.filter.CompareFilter.CompareOp}
+ * operator (equal, greater, not equal, etc), and either a byte [] value or
  * a {@link org.apache.hadoop.hbase.filter.WritableByteArrayComparable}.
  * <p>
- * If we have a byte [] value then we just do a lexicographic compare. For 
- * example, if passed value is 'b' and cell has 'a' and the compare operator 
- * is LESS, then we will filter out this cell (return true).  If this is not 
- * sufficient (eg you want to deserialize a long and then compare it to a fixed 
+ * If we have a byte [] value then we just do a lexicographic compare. For
+ * example, if passed value is 'b' and cell has 'a' and the compare operator
+ * is LESS, then we will filter out this cell (return true).  If this is not
+ * sufficient (eg you want to deserialize a long and then compare it to a fixed
  * long value), then you can pass in your own comparator instead.
  * <p>
- * You must also specify a family and qualifier.  Only the value of this column 
- * will be tested. When using this filter on a {@link Scan} with specified 
- * inputs, the column to be tested should also be added as input (otherwise 
+ * You must also specify a family and qualifier.  Only the value of this column
+ * will be tested. When using this filter on a {@link Scan} with specified
+ * inputs, the column to be tested should also be added as input (otherwise
  * the filter will regard the column as missing).
  * <p>
  * To prevent the entire row from being emitted if the column is not found
@@ -65,7 +65,7 @@ public class SingleColumnValueFilter imp
   static final Log LOG = LogFactory.getLog(SingleColumnValueFilter.class);
 
   private byte [] columnFamily;
-  private byte [] columnQualifier; 
+  private byte [] columnQualifier;
   private CompareOp compareOp;
   private WritableByteArrayComparable comparator;
   private boolean foundColumn = false;
@@ -78,13 +78,13 @@ public class SingleColumnValueFilter imp
    */
   public SingleColumnValueFilter() {
   }
-  
+
   /**
    * Constructor for binary compare of the value of a single column.  If the
    * column is found and the condition passes, all columns of the row will be
    * emitted.  If the column is not found or the condition fails, the row will
    * not be emitted.
-   * 
+   *
    * @param family name of column family
    * @param qualifier name of column qualifier
    * @param compareOp operator
@@ -103,7 +103,7 @@ public class SingleColumnValueFilter imp
    * Use the filterIfColumnMissing flag to set whether the rest of the columns
    * in a row will be emitted if the specified column to check is not found in
    * the row.
-   * 
+   *
    * @param family name of column family
    * @param qualifier name of column qualifier
    * @param compareOp operator

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/SkipFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/SkipFilter.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/SkipFilter.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/SkipFilter.java Fri May  7 19:17:48 2010
@@ -27,7 +27,7 @@ import java.io.IOException;
 import java.io.DataInput;
 
 /**
- * A wrapper filter that filters an entire row if any of the KeyValue checks do 
+ * A wrapper filter that filters an entire row if any of the KeyValue checks do
  * not pass.
  * <p>
  * For example, if all columns in a row represent weights of different things,
@@ -41,7 +41,7 @@ import java.io.DataInput;
  *     new BinaryComparator(Bytes.toBytes(0))));
  * </code>
  * Any row which contained a column whose value was 0 will be filtered out.
- * Without this filter, the other non-zero valued columns in the row would still 
+ * Without this filter, the other non-zero valued columns in the row would still
  * be emitted.
  */
 public class SkipFilter implements Filter {

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java Fri May  7 19:17:48 2010
@@ -30,14 +30,14 @@ import org.apache.hadoop.hbase.io.Cell;
 import org.apache.hadoop.hbase.util.Bytes;
 
 /**
- * Implementation of RowFilterInterface that filters out rows greater than or 
+ * Implementation of RowFilterInterface that filters out rows greater than or
  * equal to a specified rowKey.
  *
  * @deprecated Use filters that are rooted on @{link Filter} instead
  */
 public class StopRowFilter implements RowFilterInterface {
   private byte [] stopRowKey;
-  
+
   /**
    * Default constructor, filters nothing. Required though for RPC
    * deserialization.
@@ -48,16 +48,16 @@ public class StopRowFilter implements Ro
 
   /**
    * Constructor that takes a stopRowKey on which to filter
-   * 
+   *
    * @param stopRowKey rowKey to filter on.
    */
   public StopRowFilter(final byte [] stopRowKey) {
     this.stopRowKey = stopRowKey;
   }
-  
+
   /**
    * An accessor for the stopRowKey
-   * 
+   *
    * @return the filter's stopRowKey
    */
   public byte [] getStopRowKey() {
@@ -83,7 +83,7 @@ public class StopRowFilter implements Ro
   public boolean processAlways() {
     return false;
   }
-  
+
   public boolean filterAllRemaining() {
     return false;
   }
@@ -104,11 +104,11 @@ public class StopRowFilter implements Ro
   }
 
   /**
-   * Because StopRowFilter does not examine column information, this method 
+   * Because StopRowFilter does not examine column information, this method
    * defaults to calling the rowKey-only version of filter.
-   * @param rowKey 
-   * @param colKey 
-   * @param data 
+   * @param rowKey
+   * @param colKey
+   * @param data
    * @return boolean
    */
   public boolean filterColumn(final byte [] rowKey, final byte [] colKey,
@@ -123,9 +123,9 @@ public class StopRowFilter implements Ro
   }
 
   /**
-   * Because StopRowFilter does not examine column information, this method 
+   * Because StopRowFilter does not examine column information, this method
    * defaults to calling filterAllRemaining().
-   * @param columns 
+   * @param columns
    * @return boolean
    */
   public boolean filterRow(final SortedMap<byte [], Cell> columns) {

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/SubstringComparator.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/SubstringComparator.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/SubstringComparator.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/SubstringComparator.java Fri May  7 19:17:48 2010
@@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.util.Byte
  * the value of a given column. Use it to test if a given substring appears
  * in a cell value in the column. The comparison is case insensitive.
  * <p>
- * Only EQUAL or NOT_EQUAL tests are valid with this comparator. 
+ * Only EQUAL or NOT_EQUAL tests are valid with this comparator.
  * <p>
  * For example:
  * <p>

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/ValueFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/ValueFilter.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/ValueFilter.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/ValueFilter.java Fri May  7 19:17:48 2010
@@ -24,8 +24,8 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Get;
 
 /**
- * This filter is used to filter based on column value. It takes an 
- * operator (equal, greater, not equal, etc) and a byte [] comparator for the 
+ * This filter is used to filter based on column value. It takes an
+ * operator (equal, greater, not equal, etc) and a byte [] comparator for the
  * cell value.
  * <p>
  * This filter can be wrapped with {@link WhileMatchFilter} and {@link SkipFilter}
@@ -56,7 +56,7 @@ public class ValueFilter extends Compare
 
   @Override
   public ReturnCode filterKeyValue(KeyValue v) {
-    if (doCompare(this.compareOp, this.comparator, v.getBuffer(), 
+    if (doCompare(this.compareOp, this.comparator, v.getBuffer(),
         v.getValueOffset(), v.getValueLength())) {
       return ReturnCode.SKIP;
     }

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java Fri May  7 19:17:48 2010
@@ -30,7 +30,7 @@ import java.io.DataInput;
  * A wrapper filter that returns true from {@link #filterAllRemaining()} as soon
  * as the wrapped filters {@link Filter#filterRowKey(byte[], int, int)},
  * {@link Filter#filterKeyValue(org.apache.hadoop.hbase.KeyValue)},
- * {@link org.apache.hadoop.hbase.filter.Filter#filterRow()} or 
+ * {@link org.apache.hadoop.hbase.filter.Filter#filterRow()} or
  * {@link org.apache.hadoop.hbase.filter.Filter#filterAllRemaining()} methods
  * returns true.
  */

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/WhileMatchRowFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/WhileMatchRowFilter.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/WhileMatchRowFilter.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/WhileMatchRowFilter.java Fri May  7 19:17:48 2010
@@ -29,10 +29,10 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.io.Cell;
 
 /**
- * WhileMatchRowFilter is a wrapper filter that filters everything after the 
- * first filtered row.  Once the nested filter returns true for either of it's 
- * filter(..) methods or filterNotNull(SortedMap<Text, byte[]>), this wrapper's 
- * filterAllRemaining() will return true.  All filtering methods will 
+ * WhileMatchRowFilter is a wrapper filter that filters everything after the
+ * first filtered row.  Once the nested filter returns true for either of it's
+ * filter(..) methods or filterNotNull(SortedMap<Text, byte[]>), this wrapper's
+ * filterAllRemaining() will return true.  All filtering methods will
  * thereafter defer to the result of filterAllRemaining().
  *
  * @deprecated Use filters that are rooted on @{link Filter} instead
@@ -48,7 +48,7 @@ public class WhileMatchRowFilter impleme
   public WhileMatchRowFilter() {
     super();
   }
-  
+
   /**
    * Constructor
    * @param filter
@@ -56,16 +56,16 @@ public class WhileMatchRowFilter impleme
   public WhileMatchRowFilter(RowFilterInterface filter) {
     this.filter = filter;
   }
-  
+
   /**
    * Returns the internal filter being wrapped
-   * 
+   *
    * @return the internal filter
    */
   public RowFilterInterface getInternalFilter() {
     return this.filter;
   }
-  
+
   public void reset() {
     this.filterAllRemaining = false;
     this.filter.reset();
@@ -74,18 +74,18 @@ public class WhileMatchRowFilter impleme
   public boolean processAlways() {
     return true;
   }
-  
+
   /**
-   * Returns true once the nested filter has filtered out a row (returned true 
+   * Returns true once the nested filter has filtered out a row (returned true
    * on a call to one of it's filtering methods).  Until then it returns false.
-   * 
-   * @return true/false whether the nested filter has returned true on a filter 
+   *
+   * @return true/false whether the nested filter has returned true on a filter
    * call.
    */
   public boolean filterAllRemaining() {
     return this.filterAllRemaining || this.filter.filterAllRemaining();
   }
-  
+
   public boolean filterRowKey(final byte [] rowKey) {
     changeFAR(this.filter.filterRowKey(rowKey, 0, rowKey.length));
     return filterAllRemaining();
@@ -101,7 +101,7 @@ public class WhileMatchRowFilter impleme
     changeFAR(this.filter.filterColumn(rowKey, colKey, data));
     return filterAllRemaining();
   }
-  
+
   public boolean filterRow(final SortedMap<byte [], Cell> columns) {
     changeFAR(this.filter.filterRow(columns));
     return filterAllRemaining();
@@ -113,9 +113,9 @@ public class WhileMatchRowFilter impleme
   }
 
   /**
-   * Change filterAllRemaining from false to true if value is true, otherwise 
+   * Change filterAllRemaining from false to true if value is true, otherwise
    * leave as is.
-   * 
+   *
    * @param value
    */
   private void changeFAR(boolean value) {
@@ -129,14 +129,14 @@ public class WhileMatchRowFilter impleme
   public void rowProcessed(boolean filtered, byte[] key, int offset, int length) {
     this.filter.rowProcessed(filtered, key, offset, length);
   }
-  
+
   public void validate(final byte [][] columns) {
     this.filter.validate(columns);
   }
-  
+
   public void readFields(DataInput in) throws IOException {
     String className = in.readUTF();
-    
+
     try {
       this.filter = (RowFilterInterface)(Class.forName(className).
         newInstance());
@@ -152,7 +152,7 @@ public class WhileMatchRowFilter impleme
           e);
     }
   }
-  
+
   public void write(DataOutput out) throws IOException {
     out.writeUTF(this.filter.getClass().getName());
     this.filter.write(out);

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/package-info.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/package-info.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/package-info.java Fri May  7 19:17:48 2010
@@ -18,7 +18,7 @@
  * limitations under the License.
  */
 /**Provides row-level filters applied to HRegion scan results during calls to
- * {@link org.apache.hadoop.hbase.client.ResultScanner#next()}. 
+ * {@link org.apache.hadoop.hbase.client.ResultScanner#next()}.
 
 <p>Since HBase 0.20.0, {@link org.apache.hadoop.hbase.filter.Filter} is the new
 Interface used filtering.  It replaces the deprecated

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/BatchOperation.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/BatchOperation.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/BatchOperation.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/BatchOperation.java Fri May  7 19:17:48 2010
@@ -28,7 +28,7 @@ import org.apache.hadoop.io.Writable;
 
 /**
  * Batch update operation.
- * 
+ *
  * If value is null, its a DELETE operation.  If its non-null, its a PUT.
  * This object is purposely bare-bones because many instances are created
  * during bulk uploads.  We have one class for DELETEs and PUTs rather than
@@ -42,12 +42,12 @@ public class BatchOperation implements W
    */
   // JHat says this is 32 bytes.
   public final int ESTIMATED_HEAP_TAX = 36;
-  
+
   private byte [] column = null;
-  
+
   // A null value defines DELETE operations.
   private byte [] value = null;
-  
+
   /**
    * Default constructor
    */
@@ -118,7 +118,7 @@ public class BatchOperation implements W
   public String toString() {
     return "column => " + Bytes.toString(this.column) + ", value => '...'";
   }
-  
+
   // Writable methods
 
   // This is a hotspot when updating deserializing incoming client submissions.
@@ -142,7 +142,7 @@ public class BatchOperation implements W
       out.write(value);
     }
   }
-  
+
   public long heapSize() {
     return Bytes.ESTIMATED_HEAP_TAX * 2 + this.column.length +
       this.value.length + ESTIMATED_HEAP_TAX;

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java Fri May  7 19:17:48 2010
@@ -38,7 +38,7 @@ import org.apache.hadoop.io.WritableComp
 
 /**
  * A Writable object that contains a series of BatchOperations
- * 
+ *
  * There is one BatchUpdate object per server, so a series of batch operations
  * can result in multiple BatchUpdate objects if the batch contains rows that
  * are served by multiple region servers.
@@ -47,25 +47,25 @@ import org.apache.hadoop.io.WritableComp
 public class BatchUpdate
 implements WritableComparable<BatchUpdate>, Iterable<BatchOperation>, HeapSize {
   private static final Log LOG = LogFactory.getLog(BatchUpdate.class);
-  
+
   /**
    * Estimated 'shallow size' of this object not counting payload.
    */
   // Shallow size is 56.  Add 32 for the arraylist below.
   public static final int ESTIMATED_HEAP_TAX = 56 + 32;
-  
+
   // the row being updated
   private byte [] row = null;
   private long size = 0;
-    
+
   // the batched operations
   private ArrayList<BatchOperation> operations =
     new ArrayList<BatchOperation>();
-  
+
   private long timestamp = HConstants.LATEST_TIMESTAMP;
-  
+
   private long rowLock = -1l;
-  
+
   /**
    * Default constructor used serializing.  Do not use directly.
    */
@@ -76,7 +76,7 @@ implements WritableComparable<BatchUpdat
   /**
    * Initialize a BatchUpdate operation on a row. Timestamp is assumed to be
    * now.
-   * 
+   *
    * @param row
    */
   public BatchUpdate(final String row) {
@@ -86,7 +86,7 @@ implements WritableComparable<BatchUpdat
   /**
    * Initialize a BatchUpdate operation on a row. Timestamp is assumed to be
    * now.
-   * 
+   *
    * @param row
    */
   public BatchUpdate(final byte [] row) {
@@ -95,14 +95,14 @@ implements WritableComparable<BatchUpdat
 
   /**
    * Initialize a BatchUpdate operation on a row with a specific timestamp.
-   * 
+   *
    * @param row
    * @param timestamp
    */
   public BatchUpdate(final String row, long timestamp){
     this(Bytes.toBytes(row), timestamp);
   }
-  
+
   /**
    * Recopy constructor
    * @param buToCopy BatchUpdate to copy
@@ -122,7 +122,7 @@ implements WritableComparable<BatchUpdat
 
   /**
    * Initialize a BatchUpdate operation on a row with a specific timestamp.
-   * 
+   *
    * @param row
    * @param timestamp
    */
@@ -132,7 +132,7 @@ implements WritableComparable<BatchUpdat
     this.operations = new ArrayList<BatchOperation>();
     this.size = (row == null)? 0: row.length;
   }
-  
+
   /**
    * Create a batch operation.
    * @param rr the RowResult
@@ -143,7 +143,7 @@ implements WritableComparable<BatchUpdat
       this.put(entry.getKey(), entry.getValue().getValue());
     }
   }
-  
+
   /**
    * Get the row lock associated with this update
    * @return the row lock
@@ -172,29 +172,29 @@ implements WritableComparable<BatchUpdat
   public long getTimestamp() {
     return timestamp;
   }
-  
+
   /**
    * Set this BatchUpdate's timestamp.
-   * 
+   *
    * @param timestamp
-   */  
+   */
   public void setTimestamp(long timestamp) {
     this.timestamp = timestamp;
   }
-  
+
   /**
    * Get the current value of the specified column
-   * 
+   *
    * @param column column name
    * @return byte[] the cell value, returns null if the column does not exist.
    */
   public synchronized byte[] get(final String column) {
     return get(Bytes.toBytes(column));
   }
-  
+
   /**
-   * Get the current value of the specified column 
-   * 
+   * Get the current value of the specified column
+   *
    * @param column column name
    * @return byte[] the cell value, returns null if the column does not exist.
    */
@@ -209,7 +209,7 @@ implements WritableComparable<BatchUpdat
 
   /**
    * Get the current columns
-   * 
+   *
    * @return byte[][] an array of byte[] columns
    */
   public synchronized byte[][] getColumns() {
@@ -222,17 +222,17 @@ implements WritableComparable<BatchUpdat
 
   /**
    * Check if the specified column is currently assigned a value
-   * 
+   *
    * @param column column to check for
    * @return boolean true if the given column exists
    */
   public synchronized boolean hasColumn(String column) {
     return hasColumn(Bytes.toBytes(column));
   }
-  
+
   /**
    * Check if the specified column is currently assigned a value
-   * 
+   *
    * @param column column to check for
    * @return boolean true if the given column exists
    */
@@ -243,8 +243,8 @@ implements WritableComparable<BatchUpdat
     }
     return true;
   }
-  
-  /** 
+
+  /**
    * Change a value for the specified column
    *
    * @param column column whose value is being set
@@ -254,7 +254,7 @@ implements WritableComparable<BatchUpdat
     put(Bytes.toBytes(column), val);
   }
 
-  /** 
+  /**
    * Change a value for the specified column
    *
    * @param column column whose value is being set
@@ -270,7 +270,7 @@ implements WritableComparable<BatchUpdat
     operations.add(bo);
   }
 
-  /** 
+  /**
    * Delete the value for a column
    * Deletes the cell whose row/column/commit-timestamp match those of the
    * delete.
@@ -280,7 +280,7 @@ implements WritableComparable<BatchUpdat
     delete(Bytes.toBytes(column));
   }
 
-  /** 
+  /**
    * Delete the value for a column
    * Deletes the cell whose row/column/commit-timestamp match those of the
    * delete.
@@ -293,7 +293,7 @@ implements WritableComparable<BatchUpdat
   //
   // Iterable
   //
-  
+
   /**
    * @return Iterator<BatchOperation>
    */
@@ -363,7 +363,7 @@ implements WritableComparable<BatchUpdat
     return this.row.length + Bytes.ESTIMATED_HEAP_TAX + this.size +
       ESTIMATED_HEAP_TAX;
   }
-  
+
   /**
    * Code to test sizes of BatchUpdate arrays.
    * @param args

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/Cell.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/Cell.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/Cell.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/Cell.java Fri May  7 19:17:48 2010
@@ -62,7 +62,7 @@ public class Cell implements Writable, I
 
   /**
    * Create a new Cell with a given value and timestamp. Used by HStore.
-   * 
+   *
    * @param value
    * @param timestamp
    */
@@ -72,7 +72,7 @@ public class Cell implements Writable, I
 
   /**
    * Create a new Cell with a given value and timestamp. Used by HStore.
-   * 
+   *
    * @param value
    * @param timestamp
    */
@@ -82,7 +82,7 @@ public class Cell implements Writable, I
 
   /**
    * Create a new Cell with a given value and timestamp. Used by HStore.
-   * 
+   *
    * @param bb
    * @param timestamp
    */
@@ -134,7 +134,7 @@ public class Cell implements Writable, I
   /**
    * Add a new timestamp and value to this cell provided timestamp does not
    * already exist
-   * 
+   *
    * @param val
    * @param ts
    */
@@ -264,7 +264,7 @@ public class Cell implements Writable, I
 
   /*
    * (non-Javadoc)
-   * 
+   *
    * @see
    * org.apache.hadoop.hbase.rest.serializer.ISerializable#restSerialize(org
    * .apache.hadoop.hbase.rest.serializer.IRestSerializer)

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/CodeToClassAndBack.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/CodeToClassAndBack.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/CodeToClassAndBack.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/CodeToClassAndBack.java Fri May  7 19:17:48 2010
@@ -41,12 +41,12 @@ public interface CodeToClassAndBack {
    */
   public static final Map<Class<?>, Byte> CLASS_TO_CODE =
     new HashMap<Class<?>, Byte>();
-  
+
   /**
    * Class list for supported classes
    */
   public Class<?>[] classList = {byte[].class, Cell.class};
-  
+
   /**
    * The static loader that is used instead of the static constructor in
    * HbaseMapWritable.
@@ -55,7 +55,7 @@ public interface CodeToClassAndBack {
     new InternalStaticLoader(classList, CODE_TO_CLASS, CLASS_TO_CODE);
 
   /**
-   * Class that loads the static maps with their values. 
+   * Class that loads the static maps with their values.
    */
   public class InternalStaticLoader{
     InternalStaticLoader(Class<?>[] classList,

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/HalfHFileReader.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/HalfHFileReader.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/HalfHFileReader.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/HalfHFileReader.java Fri May  7 19:17:48 2010
@@ -40,10 +40,10 @@ import org.apache.hadoop.hbase.util.Byte
  * of the file with keys that sort greater than those of the bottom half.
  * The top includes the split files midkey, of the key that follows if it does
  * not exist in the file.
- * 
+ *
  * <p>This type works in tandem with the {@link Reference} type.  This class
  * is used reading while Reference is used writing.
- * 
+ *
  * <p>This file is not splitable.  Calls to {@link #midkey()} return null.
  */
 public class HalfHFileReader extends HFile.Reader {
@@ -116,7 +116,7 @@ public class HalfHFileReader extends HFi
 
       public boolean next() throws IOException {
         if (atEnd) return false;
-        
+
         boolean b = delegate.next();
         if (!b) {
           return b;
@@ -215,7 +215,7 @@ public class HalfHFileReader extends HFi
   @Override
   public byte[] getLastKey() {
     if (top) {
-      return super.getLastKey(); 
+      return super.getLastKey();
     }
     // Get a scanner that caches the block and that uses pread.
     HFileScanner scanner = getScanner(true, true);

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java Fri May  7 19:17:48 2010
@@ -59,15 +59,15 @@ implements SortedMap<byte[],V>, Configur
 
   /**
    * Contructor where another SortedMap can be used
-   * 
-   * @param map the SortedMap to be used 
+   *
+   * @param map the SortedMap to be used
    */
   public HbaseMapWritable(SortedMap<byte[], V> map){
     conf = new AtomicReference<Configuration>();
     instance = map;
   }
-  
-  
+
+
   /** @return the conf */
   public Configuration getConf() {
     return conf.get();
@@ -97,7 +97,7 @@ implements SortedMap<byte[],V>, Configur
   public V get(Object key) {
     return instance.get(key);
   }
-  
+
   public boolean isEmpty() {
     return instance.isEmpty();
   }
@@ -149,7 +149,7 @@ implements SortedMap<byte[],V>, Configur
   public SortedMap<byte[], V> tailMap(byte[] fromKey) {
     return this.instance.tailMap(fromKey);
   }
-  
+
   // Writable
 
   /** @return the Class class for the specified id */
@@ -167,7 +167,7 @@ implements SortedMap<byte[],V>, Configur
     }
     return b;
   }
-  
+
   /**
    * @see java.lang.Object#toString()
    */

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java Fri May  7 19:17:48 2010
@@ -58,7 +58,7 @@ import org.apache.hadoop.io.WritableFact
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.util.Bytes;
 
-/** 
+/**
  * This is a customized version of the polymorphic hadoop
  * {@link ObjectWritable}.  It removes UTF8 (HADOOP-414).
  * Using {@link Text} intead of UTF-8 saves ~2% CPU between reading and writing
@@ -75,7 +75,7 @@ import org.apache.hadoop.hbase.util.Byte
  */
 public class HbaseObjectWritable implements Writable, Configurable {
   protected final static Log LOG = LogFactory.getLog(HbaseObjectWritable.class);
-  
+
   // Here we maintain two static maps of classes to code and vice versa.
   // Add new classes+codes as wanted or figure way to auto-generate these
   // maps from the HMasterInterface.
@@ -98,12 +98,12 @@ public class HbaseObjectWritable impleme
     addToMap(Float.TYPE, code++);
     addToMap(Double.TYPE, code++);
     addToMap(Void.TYPE, code++);
-    
+
     // Other java types
     addToMap(String.class, code++);
     addToMap(byte [].class, code++);
     addToMap(byte [][].class, code++);
-    
+
     // Hadoop types
     addToMap(Text.class, code++);
     addToMap(Writable.class, code++);
@@ -126,7 +126,7 @@ public class HbaseObjectWritable impleme
     addToMap(HServerInfo.class, code++);
     addToMap(HTableDescriptor.class, code++);
     addToMap(MapWritable.class, code++);
-    
+
     //
     // HBASE-880
     //
@@ -162,10 +162,10 @@ public class HbaseObjectWritable impleme
     addToMap(MultiPut.class, code++);
     addToMap(MultiPutResponse.class, code++);
 
-    // List 
+    // List
     addToMap(List.class, code++);
   }
-  
+
   private Class<?> declaredClass;
   private Object instance;
   private Configuration conf;
@@ -174,7 +174,7 @@ public class HbaseObjectWritable impleme
   public HbaseObjectWritable() {
     super();
   }
-  
+
   /**
    * @param instance
    */
@@ -193,10 +193,10 @@ public class HbaseObjectWritable impleme
 
   /** @return the instance, or null if none. */
   public Object get() { return instance; }
-  
+
   /** @return the class this is meant to be. */
   public Class<?> getDeclaredClass() { return declaredClass; }
-  
+
   /**
    * Reset the instance.
    * @param instance
@@ -214,11 +214,11 @@ public class HbaseObjectWritable impleme
     return "OW[class=" + declaredClass + ",value=" + instance + "]";
   }
 
-  
+
   public void readFields(DataInput in) throws IOException {
     readObject(in, this, this.conf);
   }
-  
+
   public void write(DataOutput out) throws IOException {
     writeObject(out, instance, declaredClass, conf);
   }
@@ -227,7 +227,7 @@ public class HbaseObjectWritable impleme
     Class<?> declaredClass;
     /** default constructor for writable */
     public NullInstance() { super(null); }
-    
+
     /**
      * @param declaredClass
      * @param conf
@@ -236,16 +236,16 @@ public class HbaseObjectWritable impleme
       super(conf);
       this.declaredClass = declaredClass;
     }
-    
+
     public void readFields(DataInput in) throws IOException {
       this.declaredClass = CODE_TO_CLASS.get(in.readByte());
     }
-    
+
     public void write(DataOutput out) throws IOException {
       writeClassCode(out, this.declaredClass);
     }
   }
-  
+
   /**
    * Write out the code byte for passed Class.
    * @param out
@@ -279,13 +279,13 @@ public class HbaseObjectWritable impleme
    */
   @SuppressWarnings("unchecked")
   public static void writeObject(DataOutput out, Object instance,
-                                 Class declaredClass, 
+                                 Class declaredClass,
                                  Configuration conf)
   throws IOException {
 
     Object instanceObj = instance;
     Class declClass = declaredClass;
-    
+
     if (instanceObj == null) {                       // null
       instanceObj = new NullInstance(declClass, conf);
       declClass = Writable.class;
@@ -345,8 +345,8 @@ public class HbaseObjectWritable impleme
       throw new IOException("Can't write: "+instanceObj+" as "+declClass);
     }
   }
-  
-  
+
+
   /**
    * Read a {@link Writable}, {@link String}, primitive type, or an array of
    * the preceding.
@@ -359,7 +359,7 @@ public class HbaseObjectWritable impleme
     throws IOException {
     return readObject(in, null, conf);
   }
-    
+
   /**
    * Read a {@link Writable}, {@link String}, primitive type, or an array of
    * the preceding.
@@ -443,7 +443,7 @@ public class HbaseObjectWritable impleme
   }
 
   @SuppressWarnings("unchecked")
-  private static Class getClassByName(Configuration conf, String className) 
+  private static Class getClassByName(Configuration conf, String className)
   throws ClassNotFoundException {
     if(conf != null) {
       return conf.getClassByName(className);
@@ -454,7 +454,7 @@ public class HbaseObjectWritable impleme
     }
     return Class.forName(className, true, cl);
   }
-  
+
   private static void addToMap(final Class<?> clazz, final byte code) {
     CLASS_TO_CODE.put(clazz, code);
     CODE_TO_CLASS.put(code, clazz);

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/HeapSize.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/HeapSize.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/HeapSize.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/HeapSize.java Fri May  7 19:17:48 2010
@@ -31,7 +31,7 @@ package org.apache.hadoop.hbase.io;
  * For example:
  * <pre>
  * public class SampleObject implements HeapSize {
- *   
+ *
  *   int [] numbers;
  *   int x;
  * }
@@ -43,5 +43,5 @@ public interface HeapSize {
    * count of payload and hosting object sizings.
   */
   public long heapSize();
-  
+
 }

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java Fri May  7 19:17:48 2010
@@ -29,7 +29,7 @@ import org.apache.hadoop.io.BytesWritabl
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.WritableComparator;
 
-/** 
+/**
  * A byte sequence that is usable as a key or value.  Based on
  * {@link org.apache.hadoop.io.BytesWritable} only this class is NOT resizable
  * and DOES NOT distinguish between the size of the seqeunce and the current
@@ -43,14 +43,14 @@ implements WritableComparable<ImmutableB
   private byte[] bytes;
   private int offset;
   private int length;
-  
+
   /**
    * Create a zero-size sequence.
    */
   public ImmutableBytesWritable() {
     super();
   }
-  
+
   /**
    * Create a ImmutableBytesWritable using the byte array as the initial value.
    * @param bytes This array becomes the backing storage for the object.
@@ -58,7 +58,7 @@ implements WritableComparable<ImmutableB
   public ImmutableBytesWritable(byte[] bytes) {
     this(bytes, 0, bytes.length);
   }
-  
+
   /**
    * Set the new ImmutableBytesWritable to the contents of the passed
    * <code>ibw</code>.
@@ -67,7 +67,7 @@ implements WritableComparable<ImmutableB
   public ImmutableBytesWritable(final ImmutableBytesWritable ibw) {
     this(ibw.get(), 0, ibw.getSize());
   }
-  
+
   /**
    * Set the value to a given byte range
    * @param bytes the new byte range to set to
@@ -80,7 +80,7 @@ implements WritableComparable<ImmutableB
     this.offset = offset;
     this.length = length;
   }
-  
+
   /**
    * Get the data from the BytesWritable.
    * @return The data is only valid between 0 and getSize() - 1.
@@ -92,7 +92,7 @@ implements WritableComparable<ImmutableB
     }
     return this.bytes;
   }
-  
+
   /**
    * @param b Use passed bytes as backing array for this instance.
    */
@@ -110,7 +110,7 @@ implements WritableComparable<ImmutableB
     this.offset = offset;
     this.length = length;
   }
-  
+
   /**
    * @return the current size of the buffer.
    */
@@ -121,7 +121,7 @@ implements WritableComparable<ImmutableB
     }
     return this.length;
   }
- 
+
   /**
    * @return the current length of the buffer. same as getSize()
    */
@@ -134,7 +134,7 @@ implements WritableComparable<ImmutableB
     }
     return this.length;
   }
-  
+
   /**
    * @return offset
    */
@@ -148,19 +148,19 @@ implements WritableComparable<ImmutableB
     in.readFully(this.bytes, 0, this.length);
     this.offset = 0;
   }
-  
+
   public void write(final DataOutput out) throws IOException {
     out.writeInt(this.length);
     out.write(this.bytes, this.offset, this.length);
   }
-  
+
   // Below methods copied from BytesWritable
 
   @Override
   public int hashCode() {
     return WritableComparator.hashBytes(bytes, this.length);
   }
-  
+
   /**
    * Define the sort order of the BytesWritable.
    * @param right_obj The other bytes writable
@@ -170,7 +170,7 @@ implements WritableComparable<ImmutableB
   public int compareTo(ImmutableBytesWritable right_obj) {
     return compareTo(right_obj.get());
   }
-  
+
   /**
    * Compares the bytes in this object to the specified byte array
    * @param that
@@ -200,7 +200,7 @@ implements WritableComparable<ImmutableB
    * @see java.lang.Object#toString()
    */
   @Override
-  public String toString() { 
+  public String toString() {
     StringBuffer sb = new StringBuffer(3*this.bytes.length);
     for (int idx = 0; idx < this.bytes.length; idx++) {
       // if not the first, put a blank separator in
@@ -218,7 +218,7 @@ implements WritableComparable<ImmutableB
   }
 
   /** A Comparator optimized for ImmutableBytesWritable.
-   */ 
+   */
   public static class Comparator extends WritableComparator {
     private BytesWritable.Comparator comparator =
       new BytesWritable.Comparator();
@@ -236,11 +236,11 @@ implements WritableComparable<ImmutableB
       return comparator.compare(b1, s1, l1, b2, s2, l2);
     }
   }
-  
+
   static { // register this comparator
     WritableComparator.define(ImmutableBytesWritable.class, new Comparator());
   }
-  
+
   /**
    * @param array List of byte [].
    * @return Array of byte [].

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/Reference.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/Reference.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/Reference.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/Reference.java Fri May  7 19:17:48 2010
@@ -36,7 +36,7 @@ import org.apache.hadoop.io.Writable;
 /**
  * A reference to the top or bottom half of a store file.  The file referenced
  * lives under a different region.  References are made at region split time.
- * 
+ *
  * <p>References work with a special half store file type.  References know how
  * to write out the reference format in the file system and are whats juggled
  * when references are mixed in with direct store files.  The half store file
@@ -53,7 +53,7 @@ public class Reference implements Writab
   private byte [] splitkey;
   private Range region;
 
-  /** 
+  /**
    * For split HStoreFiles, it specifies if the file covers the lower half or
    * the upper half of the key range
    */
@@ -83,7 +83,7 @@ public class Reference implements Writab
   }
 
   /**
-   * 
+   *
    * @return Range
    */
   public Range getFileRegion() {

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/RowResult.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/RowResult.java?rev=942184&r1=942183&r2=942184&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/RowResult.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/RowResult.java Fri May  7 19:17:48 2010
@@ -66,7 +66,7 @@ public class RowResult implements Writab
     this.row = row;
     this.cells = m;
   }
-  
+
   /**
    * Get the row for this RowResult
    * @return the row
@@ -75,9 +75,9 @@ public class RowResult implements Writab
     return row;
   }
 
-  // 
+  //
   // Map interface
-  // 
+  //
   public Cell put(byte [] key,
     Cell value) {
     throw new UnsupportedOperationException("RowResult is read-only!");
@@ -99,7 +99,7 @@ public class RowResult implements Writab
   public boolean containsKey(Object key) {
     return cells.containsKey(key);
   }
-  
+
   /**
    * Check if the key can be found in this RowResult
    * @param key
@@ -136,10 +136,10 @@ public class RowResult implements Writab
   public Set<Map.Entry<byte [], Cell>> entrySet() {
     return Collections.unmodifiableSet(this.cells.entrySet());
   }
-  
+
   /**
    * This method used solely for the REST serialization
-   * 
+   *
    * @return Cells
    */
   public RestCell[] getCells() {
@@ -159,7 +159,7 @@ public class RowResult implements Writab
     }
     return result;
   }
-  
+
   /**
    * Get the Cell that corresponds to column
    * @param column
@@ -168,7 +168,7 @@ public class RowResult implements Writab
   public Cell get(byte [] column) {
     return this.cells.get(column);
   }
-  
+
   /**
    * Get the Cell that corresponds to column, using a String key
    * @param key
@@ -187,7 +187,7 @@ public class RowResult implements Writab
   public Cell get(byte [] family, byte [] columnQualifier) {
     return get(Bytes.add(family, KeyValue.COLUMN_FAMILY_DELIM_ARRAY, columnQualifier));
   }
-  
+
 
   public Comparator<? super byte[]> comparator() {
     return this.cells.comparator();
@@ -219,25 +219,25 @@ public class RowResult implements Writab
   public class Entry implements Map.Entry<byte [], Cell> {
     private final byte [] column;
     private final Cell cell;
-    
+
     Entry(byte [] row, Cell cell) {
       this.column = row;
       this.cell = cell;
     }
-    
+
     public Cell setValue(Cell c) {
       throw new UnsupportedOperationException("RowResult is read-only!");
     }
-    
+
     public byte [] getKey() {
       return column;
     }
-    
+
     public Cell getValue() {
       return cell;
     }
   }
-  
+
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
@@ -264,20 +264,20 @@ public class RowResult implements Writab
           sb.append(ioe.toString());
         }
       } else {
-        sb.append(Bytes.toStringBinary(v)); 
+        sb.append(Bytes.toStringBinary(v));
       }
       sb.append(")");
     }
     sb.append("}");
     return sb.toString();
   }
-  
+
   /* (non-Javadoc)
    * @see org.apache.hadoop.hbase.rest.xml.IOutputXML#toXML()
    */
   public void restSerialize(IRestSerializer serializer) throws HBaseRestException {
     serializer.serializeRowResult(this);
-  }  
+  }
 
   /**
    * @param l
@@ -322,7 +322,7 @@ public class RowResult implements Writab
     Bytes.writeByteArray(out, this.row);
     this.cells.write(out);
   }
-  
+
   //
   // Comparable
   //