You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2010/05/07 21:26:51 UTC

svn commit: r942186 [15/18] - in /hadoop/hbase/trunk: ./ contrib/stargate/core/src/test/java/org/apache/hadoop/hbase/stargate/ core/src/main/java/org/apache/hadoop/hbase/ core/src/main/java/org/apache/hadoop/hbase/client/ core/src/main/java/org/apache/...

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/client/TestGetRowVersions.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/client/TestGetRowVersions.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/client/TestGetRowVersions.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/client/TestGetRowVersions.java Fri May  7 19:26:45 2010
@@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.util.Byte
  */
 public class TestGetRowVersions extends HBaseClusterTestCase {
   private static final Log LOG = LogFactory.getLog(TestGetRowVersions.class);
-  
+
   private static final String TABLE_NAME = "test";
   private static final byte [] CONTENTS = Bytes.toBytes("contents");
   private static final byte [] ROW = Bytes.toBytes("row");
@@ -92,7 +92,7 @@ public class TestGetRowVersions extends 
     assertTrue(Bytes.equals(value, VALUE2));
     NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> map =
       r.getMap();
-    NavigableMap<byte[], NavigableMap<Long, byte[]>> familyMap = 
+    NavigableMap<byte[], NavigableMap<Long, byte[]>> familyMap =
       map.get(CONTENTS);
     NavigableMap<Long, byte[]> versionMap = familyMap.get(CONTENTS);
     assertTrue(versionMap.size() == 2);

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/client/TestHTablePool.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/client/TestHTablePool.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/client/TestHTablePool.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/client/TestHTablePool.java Fri May  7 19:26:45 2010
@@ -39,18 +39,18 @@ import org.junit.Test;
 public class TestHTablePool  {
 
   private static HBaseTestingUtility TEST_UTIL   =  new HBaseTestingUtility();
-  
+
   @BeforeClass
-  public static void beforeClass() throws Exception { 
+  public static void beforeClass() throws Exception {
     TEST_UTIL.startMiniCluster(1);
 
   }
-  
+
   @AfterClass
-  public static void afterClass() throws IOException { 
+  public static void afterClass() throws IOException {
     TEST_UTIL.shutdownMiniCluster();
   }
-  
+
   @Test
   public void testTableWithStringName() {
     HTablePool pool = new HTablePool((HBaseConfiguration)null, Integer.MAX_VALUE);
@@ -131,11 +131,11 @@ public class TestHTablePool  {
     Assert.assertSame(table1, sameTable1);
     Assert.assertSame(table2, sameTable2);
   }
-  
-  
+
+
   @Test
-  public void testCloseTablePool() throws IOException { 
-    
+  public void testCloseTablePool() throws IOException {
+
     HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 4);
     String tableName = "testTable";
     HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
@@ -149,24 +149,24 @@ public class TestHTablePool  {
     tableDescriptor.addFamily(new HColumnDescriptor("randomFamily"));
     admin.createTable(tableDescriptor);
 
-    
+
     // Request tables from an empty pool
     HTableInterface[] tables = new HTableInterface[4];
     for (int i = 0; i < 4; ++i ) {
       tables[i] = pool.getTable(tableName);
     }
-    
+
     pool.closeTablePool(tableName);
-    
+
     for (int i = 0; i < 4; ++i ) {
       pool.putTable(tables[i]);
     }
 
     Assert.assertEquals(4, pool.getCurrentPoolSize(tableName));
-    
+
     pool.closeTablePool(tableName);
 
     Assert.assertEquals(0, pool.getCurrentPoolSize(tableName));
-    
+
   }
 }

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/client/TestTimestamp.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/client/TestTimestamp.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/client/TestTimestamp.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/client/TestTimestamp.java Fri May  7 19:26:45 2010
@@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.Timestamp
  */
 public class TestTimestamp extends HBaseClusterTestCase {
   public static String COLUMN_NAME = "colfamily1";
-  
+
   /** constructor */
   public TestTimestamp() {
     super();
@@ -51,7 +51,7 @@ public class TestTimestamp extends HBase
         cluster.flushcache();
       }
      });
-    
+
     // Perhaps drop and readd the table between tests so the former does
     // not pollute this latter?  Or put into separate tests.
     TimestampTestBase.doTestTimestampScanning(incommon, new FlushCache() {
@@ -60,8 +60,8 @@ public class TestTimestamp extends HBase
       }
     });
   }
-  
-  /* 
+
+  /*
    * Create a table named TABLE_NAME.
    * @return An instance of an HTable connected to the created table.
    * @throws IOException

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java Fri May  7 19:26:45 2010
@@ -39,19 +39,19 @@ public class TestColumnPaginationFilter 
     private static final byte[] COLUMN_FAMILY = Bytes.toBytes("test");
     private static final byte[] VAL_1 = Bytes.toBytes("a");
     private static final byte [] COLUMN_QUALIFIER = Bytes.toBytes("foo");
-    
+
     private Filter columnPaginationFilter;
-    
+
     @Override
     protected void setUp() throws Exception {
         super.setUp();
         columnPaginationFilter = getColumnPaginationFilter();
-        
+
     }
     private Filter getColumnPaginationFilter() {
         return new ColumnPaginationFilter(1,0);
     }
-    
+
     private Filter serializationTest(Filter filter) throws Exception {
         ByteArrayOutputStream stream = new ByteArrayOutputStream();
         DataOutputStream out = new DataOutputStream(stream);
@@ -66,29 +66,29 @@ public class TestColumnPaginationFilter 
 
         return newFilter;
     }
-    
-    
+
+
     /**
      * The more specific functionality tests are contained within the TestFilters class.  This class is mainly for testing
      * serialization
-     * 
+     *
      * @param filter
      * @throws Exception
      */
-    private void basicFilterTests(ColumnPaginationFilter filter) throws Exception 
+    private void basicFilterTests(ColumnPaginationFilter filter) throws Exception
     {
       KeyValue kv = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_1);
       assertTrue("basicFilter1", filter.filterKeyValue(kv) == Filter.ReturnCode.INCLUDE);
     }
-    
+
     /**
      * Tests serialization
      * @throws Exception
-     */                       
+     */
     public void testSerialization() throws Exception {
       Filter newFilter = serializationTest(columnPaginationFilter);
       basicFilterTests((ColumnPaginationFilter)newFilter);
-    }   
-    
+    }
+
 
 }

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java Fri May  7 19:26:45 2010
@@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.util.Byte
 public class TestFilter extends HBaseTestCase {
   private final Log LOG = LogFactory.getLog(this.getClass());
   private HRegion region;
-  
+
   //
   // Rows, Qualifiers, and Values are in two groups, One and Two.
   //
@@ -64,7 +64,7 @@ public class TestFilter extends HBaseTes
       Bytes.toBytes("testRowTwo-0"), Bytes.toBytes("testRowTwo-1"),
       Bytes.toBytes("testRowTwo-2"), Bytes.toBytes("testRowTwo-3")
   };
-  
+
   private static final byte [][] FAMILIES = {
     Bytes.toBytes("testFamilyOne"), Bytes.toBytes("testFamilyTwo")
   };
@@ -73,20 +73,20 @@ public class TestFilter extends HBaseTes
     Bytes.toBytes("testQualifierOne-0"), Bytes.toBytes("testQualifierOne-1"),
     Bytes.toBytes("testQualifierOne-2"), Bytes.toBytes("testQualifierOne-3")
   };
-  
+
   private static final byte [][] QUALIFIERS_TWO = {
     Bytes.toBytes("testQualifierTwo-0"), Bytes.toBytes("testQualifierTwo-1"),
     Bytes.toBytes("testQualifierTwo-2"), Bytes.toBytes("testQualifierTwo-3")
   };
-  
+
   private static final byte [][] VALUES = {
     Bytes.toBytes("testValueOne"), Bytes.toBytes("testValueTwo")
   };
-  
+
   private long numRows = ROWS_ONE.length + ROWS_TWO.length;
   private long colsPerRow = FAMILIES.length * QUALIFIERS_ONE.length;
-    
-  
+
+
   protected void setUp() throws Exception {
     super.setUp();
     HTableDescriptor htd = new HTableDescriptor(getName());
@@ -94,7 +94,7 @@ public class TestFilter extends HBaseTes
     htd.addFamily(new HColumnDescriptor(FAMILIES[1]));
     HRegionInfo info = new HRegionInfo(htd, null, null, false);
     this.region = HRegion.createHRegion(info, this.testDir, this.conf);
-    
+
     // Insert first half
     for(byte [] ROW : ROWS_ONE) {
       Put p = new Put(ROW);
@@ -110,10 +110,10 @@ public class TestFilter extends HBaseTes
       }
       this.region.put(p);
     }
-    
+
     // Flush
     this.region.flushcache();
-    
+
     // Insert second half (reverse families)
     for(byte [] ROW : ROWS_ONE) {
       Put p = new Put(ROW);
@@ -129,14 +129,14 @@ public class TestFilter extends HBaseTes
       }
       this.region.put(p);
     }
-    
+
     // Delete the second qualifier from all rows and families
     for(byte [] ROW : ROWS_ONE) {
       Delete d = new Delete(ROW);
       d.deleteColumns(FAMILIES[0], QUALIFIERS_ONE[1]);
       d.deleteColumns(FAMILIES[1], QUALIFIERS_ONE[1]);
       this.region.delete(d, null, false);
-    }    
+    }
     for(byte [] ROW : ROWS_TWO) {
       Delete d = new Delete(ROW);
       d.deleteColumns(FAMILIES[0], QUALIFIERS_TWO[1]);
@@ -144,7 +144,7 @@ public class TestFilter extends HBaseTes
       this.region.delete(d, null, false);
     }
     colsPerRow -= 2;
-    
+
     // Delete the second rows from both groups, one column at a time
     for(byte [] QUALIFIER : QUALIFIERS_ONE) {
       Delete d = new Delete(ROWS_ONE[1]);
@@ -170,7 +170,7 @@ public class TestFilter extends HBaseTes
     // No filter
     long expectedRows = this.numRows;
     long expectedKeys = this.colsPerRow;
-    
+
     // Both families
     Scan s = new Scan();
     verifyScan(s, expectedRows, expectedKeys);
@@ -180,7 +180,7 @@ public class TestFilter extends HBaseTes
     s.addFamily(FAMILIES[0]);
     verifyScan(s, expectedRows, expectedKeys/2);
   }
-  
+
   public void testPrefixFilter() throws Exception {
     // Grab rows from group one (half of total)
     long expectedRows = this.numRows / 2;
@@ -189,9 +189,9 @@ public class TestFilter extends HBaseTes
     s.setFilter(new PrefixFilter(Bytes.toBytes("testRowOne")));
     verifyScan(s, expectedRows, expectedKeys);
   }
-  
+
   public void testPageFilter() throws Exception {
-    
+
     // KVs in first 6 rows
     KeyValue [] expectedKVs = {
       // testRowOne-0
@@ -237,7 +237,7 @@ public class TestFilter extends HBaseTes
       new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
       new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1])
     };
-    
+
     // Grab all 6 rows
     long expectedRows = 6;
     long expectedKeys = this.colsPerRow;
@@ -246,7 +246,7 @@ public class TestFilter extends HBaseTes
     verifyScan(s, expectedRows, expectedKeys);
     s.setFilter(new PageFilter(expectedRows));
     verifyScanFull(s, expectedKVs);
-    
+
     // Grab first 4 rows (6 cols per row)
     expectedRows = 4;
     expectedKeys = this.colsPerRow;
@@ -255,7 +255,7 @@ public class TestFilter extends HBaseTes
     verifyScan(s, expectedRows, expectedKeys);
     s.setFilter(new PageFilter(expectedRows));
     verifyScanFull(s, Arrays.copyOf(expectedKVs, 24));
-    
+
     // Grab first 2 rows
     expectedRows = 2;
     expectedKeys = this.colsPerRow;
@@ -273,7 +273,7 @@ public class TestFilter extends HBaseTes
     verifyScan(s, expectedRows, expectedKeys);
     s.setFilter(new PageFilter(expectedRows));
     verifyScanFull(s, Arrays.copyOf(expectedKVs, 6));
-    
+
   }
 
   /**
@@ -362,18 +362,18 @@ public class TestFilter extends HBaseTes
       }
     }
   }
-  
+
   public void testInclusiveStopFilter() throws IOException {
 
     // Grab rows from group one
-    
+
     // If we just use start/stop row, we get total/2 - 1 rows
     long expectedRows = (this.numRows / 2) - 1;
     long expectedKeys = this.colsPerRow;
-    Scan s = new Scan(Bytes.toBytes("testRowOne-0"), 
+    Scan s = new Scan(Bytes.toBytes("testRowOne-0"),
         Bytes.toBytes("testRowOne-3"));
     verifyScan(s, expectedRows, expectedKeys);
-    
+
     // Now use start row with inclusive stop filter
     expectedRows = this.numRows / 2;
     s = new Scan(Bytes.toBytes("testRowOne-0"));
@@ -381,14 +381,14 @@ public class TestFilter extends HBaseTes
     verifyScan(s, expectedRows, expectedKeys);
 
     // Grab rows from group two
-    
+
     // If we just use start/stop row, we get total/2 - 1 rows
     expectedRows = (this.numRows / 2) - 1;
     expectedKeys = this.colsPerRow;
-    s = new Scan(Bytes.toBytes("testRowTwo-0"), 
+    s = new Scan(Bytes.toBytes("testRowTwo-0"),
         Bytes.toBytes("testRowTwo-3"));
     verifyScan(s, expectedRows, expectedKeys);
-    
+
     // Now use start row with inclusive stop filter
     expectedRows = this.numRows / 2;
     s = new Scan(Bytes.toBytes("testRowTwo-0"));
@@ -396,9 +396,9 @@ public class TestFilter extends HBaseTes
     verifyScan(s, expectedRows, expectedKeys);
 
   }
-  
+
   public void testQualifierFilter() throws IOException {
-    
+
     // Match two keys (one from each family) in half the rows
     long expectedRows = this.numRows / 2;
     long expectedKeys = 2;
@@ -407,7 +407,7 @@ public class TestFilter extends HBaseTes
     Scan s = new Scan();
     s.setFilter(f);
     verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-    
+
     // Match keys less than same qualifier
     // Expect only two keys (one from each family) in half the rows
     expectedRows = this.numRows / 2;
@@ -417,7 +417,7 @@ public class TestFilter extends HBaseTes
     s = new Scan();
     s.setFilter(f);
     verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-    
+
     // Match keys less than or equal
     // Expect four keys (two from each family) in half the rows
     expectedRows = this.numRows / 2;
@@ -427,7 +427,7 @@ public class TestFilter extends HBaseTes
     s = new Scan();
     s.setFilter(f);
     verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-    
+
     // Match keys not equal
     // Expect four keys (two from each family)
     // Only look in first group of rows
@@ -438,7 +438,7 @@ public class TestFilter extends HBaseTes
     s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo"));
     s.setFilter(f);
     verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-    
+
     // Match keys greater or equal
     // Expect four keys (two from each family)
     // Only look in first group of rows
@@ -449,7 +449,7 @@ public class TestFilter extends HBaseTes
     s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo"));
     s.setFilter(f);
     verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-    
+
     // Match keys greater
     // Expect two keys (one from each family)
     // Only look in first group of rows
@@ -460,7 +460,7 @@ public class TestFilter extends HBaseTes
     s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo"));
     s.setFilter(f);
     verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-    
+
     // Match keys not equal to
     // Look across rows and fully validate the keys and ordering
     // Expect varied numbers of keys, 4 per row in group one, 6 per row in group two
@@ -468,7 +468,7 @@ public class TestFilter extends HBaseTes
         new BinaryComparator(QUALIFIERS_ONE[2]));
     s = new Scan();
     s.setFilter(f);
-    
+
     KeyValue [] kvs = {
         // testRowOne-0
         new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
@@ -508,8 +508,8 @@ public class TestFilter extends HBaseTes
         new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
     };
     verifyScanFull(s, kvs);
-     
-    
+
+
     // Test across rows and groups with a regex
     // Filter out "test*-2"
     // Expect 4 keys per row across both groups
@@ -517,7 +517,7 @@ public class TestFilter extends HBaseTes
         new RegexStringComparator("test.+-2"));
     s = new Scan();
     s.setFilter(f);
-    
+
     kvs = new KeyValue [] {
         // testRowOne-0
         new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
@@ -551,9 +551,9 @@ public class TestFilter extends HBaseTes
         new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
     };
     verifyScanFull(s, kvs);
-     
+
   }
-  
+
   public void testRowFilter() throws IOException {
 
     // Match a single row, all keys
@@ -564,7 +564,7 @@ public class TestFilter extends HBaseTes
     Scan s = new Scan();
     s.setFilter(f);
     verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-    
+
     // Match a two rows, one from each group, using regex
     expectedRows = 2;
     expectedKeys = this.colsPerRow;
@@ -573,7 +573,7 @@ public class TestFilter extends HBaseTes
     s = new Scan();
     s.setFilter(f);
     verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-    
+
     // Match rows less than
     // Expect all keys in one row
     expectedRows = 1;
@@ -583,7 +583,7 @@ public class TestFilter extends HBaseTes
     s = new Scan();
     s.setFilter(f);
     verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-    
+
     // Match rows less than or equal
     // Expect all keys in two rows
     expectedRows = 2;
@@ -593,7 +593,7 @@ public class TestFilter extends HBaseTes
     s = new Scan();
     s.setFilter(f);
     verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-    
+
     // Match rows not equal
     // Expect all keys in all but one row
     expectedRows = this.numRows - 1;
@@ -603,7 +603,7 @@ public class TestFilter extends HBaseTes
     s = new Scan();
     s.setFilter(f);
     verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-    
+
     // Match keys greater or equal
     // Expect all keys in all but one row
     expectedRows = this.numRows - 1;
@@ -613,7 +613,7 @@ public class TestFilter extends HBaseTes
     s = new Scan();
     s.setFilter(f);
     verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-    
+
     // Match keys greater
     // Expect all keys in all but two rows
     expectedRows = this.numRows - 2;
@@ -623,7 +623,7 @@ public class TestFilter extends HBaseTes
     s = new Scan();
     s.setFilter(f);
     verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-    
+
     // Match rows not equal to testRowTwo-2
     // Look across rows and fully validate the keys and ordering
     // Should see all keys in all rows but testRowTwo-2
@@ -631,7 +631,7 @@ public class TestFilter extends HBaseTes
         new BinaryComparator(Bytes.toBytes("testRowOne-2")));
     s = new Scan();
     s.setFilter(f);
-    
+
     KeyValue [] kvs = {
         // testRowOne-0
         new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
@@ -670,8 +670,8 @@ public class TestFilter extends HBaseTes
         new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
     };
     verifyScanFull(s, kvs);
-     
-    
+
+
     // Test across rows and groups with a regex
     // Filter out everything that doesn't match "*-2"
     // Expect all keys in two rows
@@ -679,7 +679,7 @@ public class TestFilter extends HBaseTes
         new RegexStringComparator(".+-2"));
     s = new Scan();
     s.setFilter(f);
-    
+
     kvs = new KeyValue [] {
         // testRowOne-2
         new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
@@ -697,11 +697,11 @@ public class TestFilter extends HBaseTes
         new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1])
     };
     verifyScanFull(s, kvs);
-     
+
   }
-  
+
   public void testValueFilter() throws IOException {
-    
+
     // Match group one rows
     long expectedRows = this.numRows / 2;
     long expectedKeys = this.colsPerRow;
@@ -719,7 +719,7 @@ public class TestFilter extends HBaseTes
     s = new Scan();
     s.setFilter(f);
     verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-    
+
     // Match all values using regex
     expectedRows = this.numRows;
     expectedKeys = this.colsPerRow;
@@ -728,7 +728,7 @@ public class TestFilter extends HBaseTes
     s = new Scan();
     s.setFilter(f);
     verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-    
+
     // Match values less than
     // Expect group one rows
     expectedRows = this.numRows / 2;
@@ -738,7 +738,7 @@ public class TestFilter extends HBaseTes
     s = new Scan();
     s.setFilter(f);
     verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-    
+
     // Match values less than or equal
     // Expect all rows
     expectedRows = this.numRows;
@@ -758,7 +758,7 @@ public class TestFilter extends HBaseTes
     s = new Scan();
     s.setFilter(f);
     verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-    
+
     // Match values not equal
     // Expect half the rows
     expectedRows = this.numRows / 2;
@@ -768,7 +768,7 @@ public class TestFilter extends HBaseTes
     s = new Scan();
     s.setFilter(f);
     verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-    
+
     // Match values greater or equal
     // Expect all rows
     expectedRows = this.numRows;
@@ -778,7 +778,7 @@ public class TestFilter extends HBaseTes
     s = new Scan();
     s.setFilter(f);
     verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-    
+
     // Match values greater
     // Expect half rows
     expectedRows = this.numRows / 2;
@@ -788,7 +788,7 @@ public class TestFilter extends HBaseTes
     s = new Scan();
     s.setFilter(f);
     verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-    
+
     // Match values not equal to testValueOne
     // Look across rows and fully validate the keys and ordering
     // Should see all keys in all group two rows
@@ -796,7 +796,7 @@ public class TestFilter extends HBaseTes
         new BinaryComparator(Bytes.toBytes("testValueOne")));
     s = new Scan();
     s.setFilter(f);
-    
+
     KeyValue [] kvs = {
         // testRowTwo-0
         new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
@@ -822,16 +822,16 @@ public class TestFilter extends HBaseTes
     };
     verifyScanFull(s, kvs);
   }
-  
+
   public void testSkipFilter() throws IOException {
-    
+
     // Test for qualifier regex: "testQualifierOne-2"
     // Should only get rows from second group, and all keys
     Filter f = new SkipFilter(new QualifierFilter(CompareOp.NOT_EQUAL,
         new BinaryComparator(Bytes.toBytes("testQualifierOne-2"))));
     Scan s = new Scan();
     s.setFilter(f);
-    
+
     KeyValue [] kvs = {
         // testRowTwo-0
         new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
@@ -857,12 +857,12 @@ public class TestFilter extends HBaseTes
     };
     verifyScanFull(s, kvs);
   }
-    
+
   // TODO: This is important... need many more tests for ordering, etc
   // There are limited tests elsewhere but we need HRegion level ones here
   public void testFilterList() throws IOException {
-    
-    // Test getting a single row, single key using Row, Qualifier, and Value 
+
+    // Test getting a single row, single key using Row, Qualifier, and Value
     // regular expression and substring filters
     // Use must pass all
     List<Filter> filters = new ArrayList<Filter>();
@@ -888,10 +888,10 @@ public class TestFilter extends HBaseTes
     s = new Scan();
     s.setFilter(f);
     verifyScanNoEarlyOut(s, this.numRows, this.colsPerRow);
-    
-    
+
+
   }
-  
+
   public void testFirstKeyOnlyFilter() throws IOException {
     Scan s = new Scan();
     s.setFilter(new FirstKeyOnlyFilter());
@@ -906,18 +906,18 @@ public class TestFilter extends HBaseTes
     };
     verifyScanFull(s, kvs);
   }
-  
+
   public void testSingleColumnValueFilter() throws IOException {
-    
+
     // From HBASE-1821
     // Desired action is to combine two SCVF in a FilterList
     // Want to return only rows that match both conditions
-    
+
     // Need to change one of the group one columns to use group two value
     Put p = new Put(ROWS_ONE[2]);
     p.add(FAMILIES[0], QUALIFIERS_ONE[2], VALUES[1]);
     this.region.put(p);
-    
+
     // Now let's grab rows that have Q_ONE[0](VALUES[0]) and Q_ONE[2](VALUES[1])
     // Since group two rows don't have these qualifiers, they will pass
     // so limiting scan to group one
@@ -938,7 +938,7 @@ public class TestFilter extends HBaseTes
     };
     verifyScanNoEarlyOut(s, 1, 3);
     verifyScanFull(s, kvs);
-    
+
     // In order to get expected behavior without limiting to group one
     // need to wrap SCVFs in SkipFilters
     filters = new ArrayList<Filter>();
@@ -955,7 +955,7 @@ public class TestFilter extends HBaseTes
     verifyScanFull(s, kvs);
 
     // More tests from HBASE-1821 for Clint and filterIfMissing flag
-    
+
     byte [][] ROWS_THREE = {
         Bytes.toBytes("rowThree-0"), Bytes.toBytes("rowThree-1"),
         Bytes.toBytes("rowThree-2"), Bytes.toBytes("rowThree-3")
@@ -963,28 +963,28 @@ public class TestFilter extends HBaseTes
 
     // Give row 0 and 2 QUALIFIERS_ONE[0] (VALUE[0] VALUE[1])
     // Give row 1 and 3 QUALIFIERS_ONE[1] (VALUE[0] VALUE[1])
-    
+
     KeyValue [] srcKVs = new KeyValue [] {
         new KeyValue(ROWS_THREE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
         new KeyValue(ROWS_THREE[1], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[1]),
         new KeyValue(ROWS_THREE[2], FAMILIES[0], QUALIFIERS_ONE[1], VALUES[0]),
         new KeyValue(ROWS_THREE[3], FAMILIES[0], QUALIFIERS_ONE[1], VALUES[1])
     };
-    
+
     for(KeyValue kv : srcKVs) {
       this.region.put(new Put(kv.getRow()).add(kv));
     }
-    
+
     // Match VALUES[0] against QUALIFIERS_ONE[0] with filterIfMissing = false
     // Expect 3 rows (0, 2, 3)
-    SingleColumnValueFilter scvf = new SingleColumnValueFilter(FAMILIES[0], 
+    SingleColumnValueFilter scvf = new SingleColumnValueFilter(FAMILIES[0],
         QUALIFIERS_ONE[0], CompareOp.EQUAL, VALUES[0]);
     s = new Scan(ROWS_THREE[0], Bytes.toBytes("rowThree-4"));
     s.addFamily(FAMILIES[0]);
     s.setFilter(scvf);
     kvs = new KeyValue [] { srcKVs[0], srcKVs[2], srcKVs[3] };
     verifyScanFull(s, kvs);
-    
+
     // Match VALUES[0] against QUALIFIERS_ONE[0] with filterIfMissing = true
     // Expect 1 row (0)
     scvf = new SingleColumnValueFilter(FAMILIES[0], QUALIFIERS_ONE[0],
@@ -995,10 +995,10 @@ public class TestFilter extends HBaseTes
     s.setFilter(scvf);
     kvs = new KeyValue [] { srcKVs[0] };
     verifyScanFull(s, kvs);
-    
+
     // Match VALUES[1] against QUALIFIERS_ONE[1] with filterIfMissing = true
     // Expect 1 row (3)
-    scvf = new SingleColumnValueFilter(FAMILIES[0], 
+    scvf = new SingleColumnValueFilter(FAMILIES[0],
         QUALIFIERS_ONE[1], CompareOp.EQUAL, VALUES[1]);
     scvf.setFilterIfMissing(true);
     s = new Scan(ROWS_THREE[0], Bytes.toBytes("rowThree-4"));
@@ -1006,14 +1006,14 @@ public class TestFilter extends HBaseTes
     s.setFilter(scvf);
     kvs = new KeyValue [] { srcKVs[3] };
     verifyScanFull(s, kvs);
-    
+
     // Add QUALIFIERS_ONE[1] to ROWS_THREE[0] with VALUES[0]
     KeyValue kvA = new KeyValue(ROWS_THREE[0], FAMILIES[0], QUALIFIERS_ONE[1], VALUES[0]);
     this.region.put(new Put(kvA.getRow()).add(kvA));
-    
+
     // Match VALUES[1] against QUALIFIERS_ONE[1] with filterIfMissing = true
     // Expect 1 row (3)
-    scvf = new SingleColumnValueFilter(FAMILIES[0], 
+    scvf = new SingleColumnValueFilter(FAMILIES[0],
         QUALIFIERS_ONE[1], CompareOp.EQUAL, VALUES[1]);
     scvf.setFilterIfMissing(true);
     s = new Scan(ROWS_THREE[0], Bytes.toBytes("rowThree-4"));
@@ -1021,10 +1021,10 @@ public class TestFilter extends HBaseTes
     s.setFilter(scvf);
     kvs = new KeyValue [] { srcKVs[3] };
     verifyScanFull(s, kvs);
-    
+
   }
-  
-  private void verifyScan(Scan s, long expectedRows, long expectedKeys) 
+
+  private void verifyScan(Scan s, long expectedRows, long expectedKeys)
   throws IOException {
     InternalScanner scanner = this.region.getScanner(s);
     List<KeyValue> results = new ArrayList<KeyValue>();
@@ -1035,7 +1035,7 @@ public class TestFilter extends HBaseTes
           KeyValue.COMPARATOR);
       LOG.info("counter=" + i + ", " + results);
       if (results.isEmpty()) break;
-      assertTrue("Scanned too many rows! Only expected " + expectedRows + 
+      assertTrue("Scanned too many rows! Only expected " + expectedRows +
           " total but already scanned " + (i+1), expectedRows > i);
       assertEquals("Expected " + expectedKeys + " keys per row but " +
           "returned " + results.size(), expectedKeys, results.size());
@@ -1046,9 +1046,9 @@ public class TestFilter extends HBaseTes
   }
 
 
-  
-  private void verifyScanNoEarlyOut(Scan s, long expectedRows, 
-      long expectedKeys) 
+
+  private void verifyScanNoEarlyOut(Scan s, long expectedRows,
+      long expectedKeys)
   throws IOException {
     InternalScanner scanner = this.region.getScanner(s);
     List<KeyValue> results = new ArrayList<KeyValue>();
@@ -1059,7 +1059,7 @@ public class TestFilter extends HBaseTes
           KeyValue.COMPARATOR);
       LOG.info("counter=" + i + ", " + results);
       if(results.isEmpty()) break;
-      assertTrue("Scanned too many rows! Only expected " + expectedRows + 
+      assertTrue("Scanned too many rows! Only expected " + expectedRows +
           " total but already scanned " + (i+1), expectedRows > i);
       assertEquals("Expected " + expectedKeys + " keys per row but " +
           "returned " + results.size(), expectedKeys, results.size());
@@ -1080,20 +1080,20 @@ public class TestFilter extends HBaseTes
       Arrays.sort(results.toArray(new KeyValue[results.size()]),
           KeyValue.COMPARATOR);
       if(results.isEmpty()) break;
-      assertTrue("Scanned too many keys! Only expected " + kvs.length + 
-          " total but already scanned " + (results.size() + idx) + 
-          (results.isEmpty() ? "" : "(" + results.get(0).toString() + ")"), 
+      assertTrue("Scanned too many keys! Only expected " + kvs.length +
+          " total but already scanned " + (results.size() + idx) +
+          (results.isEmpty() ? "" : "(" + results.get(0).toString() + ")"),
           kvs.length >= idx + results.size());
       for(KeyValue kv : results) {
-        LOG.info("row=" + row + ", result=" + kv.toString() + 
+        LOG.info("row=" + row + ", result=" + kv.toString() +
             ", match=" + kvs[idx].toString());
-        assertTrue("Row mismatch", 
+        assertTrue("Row mismatch",
             Bytes.equals(kv.getRow(), kvs[idx].getRow()));
-        assertTrue("Family mismatch", 
+        assertTrue("Family mismatch",
             Bytes.equals(kv.getFamily(), kvs[idx].getFamily()));
-        assertTrue("Qualifier mismatch", 
+        assertTrue("Qualifier mismatch",
             Bytes.equals(kv.getQualifier(), kvs[idx].getQualifier()));
-        assertTrue("Value mismatch", 
+        assertTrue("Value mismatch",
             Bytes.equals(kv.getValue(), kvs[idx].getValue()));
         idx++;
       }
@@ -1103,10 +1103,10 @@ public class TestFilter extends HBaseTes
     assertEquals("Expected " + kvs.length + " total keys but scanned " + idx,
         kvs.length, idx);
   }
-  
-  
+
+
   public void testColumnPaginationFilter() throws Exception {
-      
+
      // Set of KVs (page: 1; pageSize: 1) - the first set of 1 column per row
       KeyValue [] expectedKVs = {
         // testRowOne-0
@@ -1122,7 +1122,7 @@ public class TestFilter extends HBaseTes
         // testRowTwo-3
         new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1])
       };
-      
+
 
       // Set of KVs (page: 3; pageSize: 1)  - the third set of 1 column per row
       KeyValue [] expectedKVs2 = {
@@ -1139,7 +1139,7 @@ public class TestFilter extends HBaseTes
         // testRowTwo-3
         new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
       };
-      
+
       // Set of KVs (page: 2; pageSize 2)  - the 2nd set of 2 columns per row
       KeyValue [] expectedKVs3 = {
         // testRowOne-0
@@ -1161,8 +1161,8 @@ public class TestFilter extends HBaseTes
         new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
         new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
       };
-      
-      
+
+
       // Set of KVs (page: 2; pageSize 2)  - the 2nd set of 2 columns per row
       KeyValue [] expectedKVs4 = {
 
@@ -1171,8 +1171,8 @@ public class TestFilter extends HBaseTes
       long expectedRows = this.numRows;
       long expectedKeys = 1;
       Scan s = new Scan();
-      
-      
+
+
       // Page 1; 1 Column per page  (Limit 1, Offset 0)
       s.setFilter(new ColumnPaginationFilter(1,0));
       verifyScan(s, expectedRows, expectedKeys);
@@ -1182,7 +1182,7 @@ public class TestFilter extends HBaseTes
       s.setFilter(new ColumnPaginationFilter(1,2));
       verifyScan(s, expectedRows, expectedKeys);
       this.verifyScanFull(s, expectedKVs2);
-      
+
       // Page 2; 2 Results per page (Limit 2, Offset 2)
       s.setFilter(new ColumnPaginationFilter(2,2));
       expectedKeys = 2;
@@ -1194,6 +1194,6 @@ public class TestFilter extends HBaseTes
       expectedKeys = 0;
       expectedRows = 0;
       verifyScan(s, expectedRows, 0);
-      this.verifyScanFull(s, expectedKVs4);     
+      this.verifyScanFull(s, expectedKVs4);
     }
 }

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java Fri May  7 19:26:45 2010
@@ -67,7 +67,7 @@ public class TestFilterList extends Test
     */
     filterMPONE.reset();
     assertFalse(filterMPONE.filterAllRemaining());
-    
+
     /* Will pass both */
     byte [] rowkey = Bytes.toBytes("yyyyyyyyy");
     for (int i = 0; i < MAX_PAGES - 1; i++) {
@@ -85,7 +85,7 @@ public class TestFilterList extends Test
     KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(0),
         Bytes.toBytes(0));
     assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterKeyValue(kv));
-    
+
     /* PageFilter will fail now, but should pass because we match yyy */
     rowkey = Bytes.toBytes("yyy");
     assertFalse(filterMPONE.filterRowKey(rowkey, 0, rowkey.length));
@@ -93,7 +93,7 @@ public class TestFilterList extends Test
     kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(0),
         Bytes.toBytes(0));
     assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterKeyValue(kv));
-    
+
     /* We should filter any row */
     rowkey = Bytes.toBytes("z");
     assertTrue(filterMPONE.filterRowKey(rowkey, 0, rowkey.length));
@@ -168,7 +168,7 @@ public class TestFilterList extends Test
     */
     filterMPONE.reset();
     assertFalse(filterMPONE.filterAllRemaining());
-    
+
     /* We should be able to fill MAX_PAGES without incrementing page counter */
     byte [] rowkey = Bytes.toBytes("yyyyyyyy");
     for (int i = 0; i < MAX_PAGES; i++) {
@@ -178,7 +178,7 @@ public class TestFilterList extends Test
         assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterKeyValue(kv));
       assertFalse(filterMPONE.filterRow());
     }
-    
+
     /* Now let's fill the page filter */
     rowkey = Bytes.toBytes("xxxxxxx");
     for (int i = 0; i < MAX_PAGES; i++) {
@@ -188,7 +188,7 @@ public class TestFilterList extends Test
         assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterKeyValue(kv));
       assertFalse(filterMPONE.filterRow());
     }
-    
+
     /* We should still be able to include even though page filter is at max */
     rowkey = Bytes.toBytes("yyy");
     for (int i = 0; i < MAX_PAGES; i++) {

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java Fri May  7 19:26:45 2010
@@ -40,7 +40,7 @@ public class TestPageFilter extends Test
     Filter f = new PageFilter(ROW_LIMIT);
     pageSizeTests(f);
   }
-  
+
   /**
    * Test filter serialization
    * @throws Exception
@@ -57,33 +57,33 @@ public class TestPageFilter extends Test
     DataInputStream in = new DataInputStream(new ByteArrayInputStream(buffer));
     Filter newFilter = new PageFilter();
     newFilter.readFields(in);
-    
+
     // Ensure the serialization preserved the filter by running a full test.
     pageSizeTests(newFilter);
   }
-  
+
   private void pageSizeTests(Filter f) throws Exception {
     testFiltersBeyondPageSize(f, ROW_LIMIT);
   }
-  
+
   private void testFiltersBeyondPageSize(final Filter f, final int pageSize) {
     int count = 0;
     for (int i = 0; i < (pageSize * 2); i++) {
       boolean filterOut = f.filterRow();
-      
+
       if(filterOut) {
         break;
       } else {
         count++;
       }
-      
+
       // If at last row, should tell us to skip all remaining
       if(count == pageSize) {
         assertTrue(f.filterAllRemaining());
       } else {
         assertFalse(f.filterAllRemaining());
       }
-      
+
     }
     assertEquals(pageSize, count);
   }

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java Fri May  7 19:26:45 2010
@@ -26,10 +26,10 @@ import org.apache.hadoop.hbase.filter.Co
 import org.apache.hadoop.hbase.util.Bytes;
 
 /**
- * Tests for {@link SingleColumnValueExcludeFilter}. Because this filter 
+ * Tests for {@link SingleColumnValueExcludeFilter}. Because this filter
  * extends {@link SingleColumnValueFilter}, only the added functionality is
  * tested. That is, method filterKeyValue(KeyValue).
- * 
+ *
  * @author ferdy
  *
  */
@@ -48,7 +48,7 @@ public class TestSingleColumnValueExclud
   public void testFilterKeyValue() throws Exception {
     Filter filter = new SingleColumnValueExcludeFilter(COLUMN_FAMILY, COLUMN_QUALIFIER,
         CompareOp.EQUAL, VAL_1);
-    
+
     // A 'match' situation
     KeyValue kv;
     kv = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER_2, VAL_1);
@@ -61,7 +61,7 @@ public class TestSingleColumnValueExclud
     kv = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER_2, VAL_1);
     assertTrue("otherColumn", filter.filterKeyValue(kv) == Filter.ReturnCode.INCLUDE);
     assertFalse("allRemainingWhenMatch", filter.filterAllRemaining());
-    
+
     // A 'mismatch' situation
     filter.reset();
     // INCLUDE expected because test column has not yet passed
@@ -73,6 +73,6 @@ public class TestSingleColumnValueExclud
     // After a mismatch (at least with LatestVersionOnly), subsequent columns are EXCLUDE
     kv = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER_2, VAL_1);
     assertTrue("otherColumn", filter.filterKeyValue(kv) == Filter.ReturnCode.NEXT_ROW);
-  } 
-              
+  }
+
 }

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java Fri May  7 19:26:45 2010
@@ -41,9 +41,9 @@ public class TestSingleColumnValueFilter
   private static final byte[] VAL_2 = Bytes.toBytes("ab");
   private static final byte[] VAL_3 = Bytes.toBytes("abc");
   private static final byte[] VAL_4 = Bytes.toBytes("abcd");
-  private static final byte[] FULLSTRING_1 = 
+  private static final byte[] FULLSTRING_1 =
     Bytes.toBytes("The quick brown fox jumps over the lazy dog.");
-  private static final byte[] FULLSTRING_2 = 
+  private static final byte[] FULLSTRING_2 =
     Bytes.toBytes("The slow grey fox trips over the lazy dog.");
   private static final String QUICK_SUBSTR = "quick";
   private static final String QUICK_REGEX = ".+quick.+";
@@ -102,7 +102,7 @@ public class TestSingleColumnValueFilter
     assertFalse("basicFilterNotNull", filter.filterRow());
   }
 
-  private void substrFilterTests(Filter filter) 
+  private void substrFilterTests(Filter filter)
       throws Exception {
     KeyValue kv = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER,
       FULLSTRING_1);
@@ -115,7 +115,7 @@ public class TestSingleColumnValueFilter
     assertFalse("substrFilterNotNull", filter.filterRow());
   }
 
-  private void regexFilterTests(Filter filter) 
+  private void regexFilterTests(Filter filter)
       throws Exception {
     KeyValue kv = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER,
       FULLSTRING_1);
@@ -126,8 +126,8 @@ public class TestSingleColumnValueFilter
     assertTrue("regexFalse", filter.filterKeyValue(kv) == Filter.ReturnCode.INCLUDE);
     assertFalse("regexFilterAllRemaining", filter.filterAllRemaining());
     assertFalse("regexFilterNotNull", filter.filterRow());
-  }    
-                 
+  }
+
   private Filter serializationTest(Filter filter)
       throws Exception {
     // Decompose filter to bytes.
@@ -136,13 +136,13 @@ public class TestSingleColumnValueFilter
     filter.write(out);
     out.close();
     byte[] buffer = stream.toByteArray();
-  
+
     // Recompose filter.
     DataInputStream in =
       new DataInputStream(new ByteArrayInputStream(buffer));
     Filter newFilter = new SingleColumnValueFilter();
     newFilter.readFields(in);
-  
+
     return newFilter;
   }
 
@@ -154,12 +154,12 @@ public class TestSingleColumnValueFilter
     basicFilterTests((SingleColumnValueFilter)basicFilter);
     substrFilterTests(substrFilter);
     regexFilterTests(regexFilter);
-  }                               
+  }
 
   /**
    * Tests serialization
    * @throws Exception
-   */                       
+   */
   public void testSerialization() throws Exception {
     Filter newFilter = serializationTest(basicFilter);
     basicFilterTests((SingleColumnValueFilter)newFilter);
@@ -167,5 +167,5 @@ public class TestSingleColumnValueFilter
     substrFilterTests(newFilter);
     newFilter = serializationTest(regexFilter);
     regexFilterTests(newFilter);
-  }                   
+  }
 }

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java Fri May  7 19:26:45 2010
@@ -98,7 +98,7 @@ public class TestHbaseObjectWritable ext
       PrefixFilter.class);
     assertTrue(obj instanceof PrefixFilter);
   }
-  
+
   private Object doType(final HBaseConfiguration conf, final Object value,
       final Class<?> clazz)
   throws IOException {
@@ -113,5 +113,5 @@ public class TestHbaseObjectWritable ext
     dis.close();
     return product;
   }
- 
+
 }
\ No newline at end of file

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java Fri May  7 19:26:45 2010
@@ -49,14 +49,14 @@ import org.apache.hadoop.hbase.util.Clas
 
 /**
  * Testing the sizing that HeapSize offers and compares to the size given by
- * ClassSize. 
+ * ClassSize.
  */
 public class TestHeapSize extends TestCase {
   static final Log LOG = LogFactory.getLog(TestHeapSize.class);
   // List of classes implementing HeapSize
   // BatchOperation, BatchUpdate, BlockIndex, Entry, Entry<K,V>, HStoreKey
   // KeyValue, LruBlockCache, LruHashMap<K,V>, Put, HLogKey
-  
+
   /**
    * Test our hard-coded sizing of native java objects
    */
@@ -65,7 +65,7 @@ public class TestHeapSize extends TestCa
     Class cl = null;
     long expected = 0L;
     long actual = 0L;
-    
+
     // ArrayList
     cl = ArrayList.class;
     expected = ClassSize.estimateBase(cl, false);
@@ -74,7 +74,7 @@ public class TestHeapSize extends TestCa
       ClassSize.estimateBase(cl, true);
       assertEquals(expected, actual);
     }
-    
+
     // ByteBuffer
     cl = ByteBuffer.class;
     expected = ClassSize.estimateBase(cl, false);
@@ -83,7 +83,7 @@ public class TestHeapSize extends TestCa
       ClassSize.estimateBase(cl, true);
       assertEquals(expected, actual);
     }
-    
+
     // Integer
     cl = Integer.class;
     expected = ClassSize.estimateBase(cl, false);
@@ -92,7 +92,7 @@ public class TestHeapSize extends TestCa
       ClassSize.estimateBase(cl, true);
       assertEquals(expected, actual);
     }
-    
+
     // Map.Entry
     // Interface is public, all others are not.  Hard to size via ClassSize
 //    cl = Map.Entry.class;
@@ -102,7 +102,7 @@ public class TestHeapSize extends TestCa
 //      ClassSize.estimateBase(cl, true);
 //      assertEquals(expected, actual);
 //    }
-    
+
     // Object
     cl = Object.class;
     expected = ClassSize.estimateBase(cl, false);
@@ -111,7 +111,7 @@ public class TestHeapSize extends TestCa
       ClassSize.estimateBase(cl, true);
       assertEquals(expected, actual);
     }
-    
+
     // TreeMap
     cl = TreeMap.class;
     expected = ClassSize.estimateBase(cl, false);
@@ -120,7 +120,7 @@ public class TestHeapSize extends TestCa
       ClassSize.estimateBase(cl, true);
       assertEquals(expected, actual);
     }
-    
+
     // String
     cl = String.class;
     expected = ClassSize.estimateBase(cl, false);
@@ -183,7 +183,7 @@ public class TestHeapSize extends TestCa
       ClassSize.estimateBase(cl, true);
       assertEquals(expected, actual);
     }
-    
+
     // CopyOnWriteArraySet
     cl = CopyOnWriteArraySet.class;
     expected = ClassSize.estimateBase(cl, false);
@@ -192,7 +192,7 @@ public class TestHeapSize extends TestCa
       ClassSize.estimateBase(cl, true);
       assertEquals(expected, actual);
     }
-    
+
     // CopyOnWriteArrayList
     cl = CopyOnWriteArrayList.class;
     expected = ClassSize.estimateBase(cl, false);
@@ -201,22 +201,22 @@ public class TestHeapSize extends TestCa
       ClassSize.estimateBase(cl, true);
       assertEquals(expected, actual);
     }
-    
-    
+
+
   }
-  
+
   /**
-   * Testing the classes that implements HeapSize and are a part of 0.20. 
-   * Some are not tested here for example BlockIndex which is tested in 
+   * Testing the classes that implements HeapSize and are a part of 0.20.
+   * Some are not tested here for example BlockIndex which is tested in
    * TestHFile since it is a non public class
-   * @throws IOException 
+   * @throws IOException
    */
   @SuppressWarnings("unchecked")
   public void testSizes() throws IOException {
     Class cl = null;
     long expected = 0L;
     long actual = 0L;
-    
+
     //KeyValue
     cl = KeyValue.class;
     expected = ClassSize.estimateBase(cl, false);
@@ -226,7 +226,7 @@ public class TestHeapSize extends TestCa
       ClassSize.estimateBase(cl, true);
       assertEquals(expected, actual);
     }
-    
+
     //Put
     cl = Put.class;
     expected = ClassSize.estimateBase(cl, false);
@@ -238,7 +238,7 @@ public class TestHeapSize extends TestCa
       ClassSize.estimateBase(cl, true);
       assertEquals(expected, actual);
     }
-    
+
     //LruBlockCache Overhead
     cl = LruBlockCache.class;
     actual = LruBlockCache.CACHE_FIXED_OVERHEAD;
@@ -247,7 +247,7 @@ public class TestHeapSize extends TestCa
       ClassSize.estimateBase(cl, true);
       assertEquals(expected, actual);
     }
-    
+
     // CachedBlock Fixed Overhead
     // We really need "deep" sizing but ClassSize does not do this.
     // Perhaps we should do all these more in this style....
@@ -262,7 +262,7 @@ public class TestHeapSize extends TestCa
       ClassSize.estimateBase(ByteBuffer.class, true);
       assertEquals(expected, actual);
     }
-    
+
     // MemStore Overhead
     cl = MemStore.class;
     actual = MemStore.FIXED_OVERHEAD;
@@ -271,7 +271,7 @@ public class TestHeapSize extends TestCa
       ClassSize.estimateBase(cl, true);
       assertEquals(expected, actual);
     }
-    
+
     // MemStore Deep Overhead
     actual = MemStore.DEEP_OVERHEAD;
     expected = ClassSize.estimateBase(cl, false);
@@ -290,7 +290,7 @@ public class TestHeapSize extends TestCa
       ClassSize.estimateBase(CopyOnWriteArrayList.class, true);
       assertEquals(expected, actual);
     }
-    
+
     // Store Overhead
     cl = Store.class;
     actual = Store.FIXED_OVERHEAD;
@@ -299,7 +299,7 @@ public class TestHeapSize extends TestCa
       ClassSize.estimateBase(cl, true);
       assertEquals(expected, actual);
     }
-    
+
     // Region Overhead
     cl = HRegion.class;
     actual = HRegion.FIXED_OVERHEAD;
@@ -308,12 +308,12 @@ public class TestHeapSize extends TestCa
       ClassSize.estimateBase(cl, true);
       assertEquals(expected, actual);
     }
-    
+
     // Currently NOT testing Deep Overheads of many of these classes.
     // Deep overheads cover a vast majority of stuff, but will not be 100%
     // accurate because it's unclear when we're referencing stuff that's already
     // accounted for.  But we have satisfied our two core requirements.
-    // Sizing is quite accurate now, and our tests will throw errors if 
+    // Sizing is quite accurate now, and our tests will throw errors if
     // any of these classes are modified without updating overhead sizes.
 
   }

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/KVGenerator.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/KVGenerator.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/KVGenerator.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/KVGenerator.java Fri May  7 19:26:45 2010
@@ -5,9 +5,9 @@
  * licenses this file to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  * http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -55,7 +55,7 @@ class KVGenerator {
     lastKey = new BytesWritable();
     fillKey(lastKey);
   }
-  
+
   private void fillKey(BytesWritable o) {
     int len = keyLenRNG.nextInt();
     if (len < MIN_KEY_LEN) len = MIN_KEY_LEN;
@@ -89,16 +89,16 @@ class KVGenerator {
       n += l;
     }
   }
-  
+
   private void incrementPrefix() {
     for (int i = MIN_KEY_LEN - 1; i >= 0; --i) {
       ++prefix[i];
       if (prefix[i] != 0) return;
     }
-    
+
     throw new RuntimeException("Prefix overflown");
   }
-  
+
   public void next(BytesWritable key, BytesWritable value, boolean dupKey) {
     if (dupKey) {
       key.set(lastKey);

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/KeySampler.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/KeySampler.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/KeySampler.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/KeySampler.java Fri May  7 19:26:45 2010
@@ -5,9 +5,9 @@
  * licenses this file to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  * http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -48,7 +48,7 @@ class KeySampler {
     return (b[o] & 0xff) << 24 | (b[o + 1] & 0xff) << 16
         | (b[o + 2] & 0xff) << 8 | (b[o + 3] & 0xff);
   }
-  
+
   public void next(BytesWritable key) {
     key.setSize(Math.max(MIN_KEY_LEN, keyLenRNG.nextInt()));
     random.nextBytes(key.get());

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/NanoTimer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/NanoTimer.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/NanoTimer.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/NanoTimer.java Fri May  7 19:26:45 2010
@@ -5,9 +5,9 @@
  * licenses this file to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  * http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -31,7 +31,7 @@ public class NanoTimer {
 
   /**
    * Constructor
-   * 
+   *
    * @param start
    *          Start the timer upon construction.
    */
@@ -41,7 +41,7 @@ public class NanoTimer {
 
   /**
    * Start the timer.
-   * 
+   *
    * Note: No effect if timer is already started.
    */
   public void start() {
@@ -53,7 +53,7 @@ public class NanoTimer {
 
   /**
    * Stop the timer.
-   * 
+   *
    * Note: No effect if timer is already stopped.
    */
   public void stop() {
@@ -65,7 +65,7 @@ public class NanoTimer {
 
   /**
    * Read the timer.
-   * 
+   *
    * @return the elapsed time in nano-seconds. Note: If the timer is never
    *         started before, -1 is returned.
    */
@@ -86,7 +86,7 @@ public class NanoTimer {
 
   /**
    * Checking whether the timer is started
-   * 
+   *
    * @return true if timer is started.
    */
   public boolean isStarted() {
@@ -95,7 +95,7 @@ public class NanoTimer {
 
   /**
    * Format the elapsed time to a human understandable string.
-   * 
+   *
    * Note: If timer is never started, "ERR" will be returned.
    */
   public String toString() {
@@ -109,7 +109,7 @@ public class NanoTimer {
   /**
    * A utility method to format a time duration in nano seconds into a human
    * understandable stirng.
-   * 
+   *
    * @param t
    *          Time duration in nano seconds.
    * @return String representation.
@@ -161,19 +161,19 @@ public class NanoTimer {
 
     /**
      * StringBuilder sb = new StringBuilder(); String sep = "";
-     * 
+     *
      * if (dd > 0) { String unit = (dd > 1) ? "days" : "day";
      * sb.append(String.format("%s%d%s", sep, dd, unit)); sep = " "; }
-     * 
+     *
      * if (hh > 0) { String unit = (hh > 1) ? "hrs" : "hr";
      * sb.append(String.format("%s%d%s", sep, hh, unit)); sep = " "; }
-     * 
+     *
      * if (mm > 0) { String unit = (mm > 1) ? "mins" : "min";
      * sb.append(String.format("%s%d%s", sep, mm, unit)); sep = " "; }
-     * 
+     *
      * if (ss > 0) { String unit = (ss > 1) ? "secs" : "sec";
      * sb.append(String.format("%s%.3f%s", sep, ss, unit)); sep = " "; }
-     * 
+     *
      * return sb.toString();
      */
   }
@@ -184,7 +184,7 @@ public class NanoTimer {
 
   /**
    * Simple tester.
-   * 
+   *
    * @param args
    */
   public static void main(String[] args) {

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/RandomDistribution.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/RandomDistribution.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/RandomDistribution.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/RandomDistribution.java Fri May  7 19:26:45 2010
@@ -5,9 +5,9 @@
  * licenses this file to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  * http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -36,7 +36,7 @@ public class RandomDistribution {
   public static interface DiscreteRNG {
     /**
      * Get the next random number
-     * 
+     *
      * @return the next random number.
      */
     public int nextInt();
@@ -53,14 +53,14 @@ public class RandomDistribution {
     /**
      * Generate random integers from min (inclusive) to max (exclusive)
      * following even distribution.
-     * 
+     *
      * @param random
      *          The basic random number generator.
      * @param min
      *          Minimum integer
      * @param max
      *          maximum integer (exclusive).
-     * 
+     *
      */
     public Flat(Random random, int min, int max) {
       if (min >= max) {
@@ -70,7 +70,7 @@ public class RandomDistribution {
       this.min = min;
       this.max = max;
     }
-    
+
     /**
      * @see DiscreteRNG#nextInt()
      */
@@ -83,7 +83,7 @@ public class RandomDistribution {
   /**
    * Zipf distribution. The ratio of the probabilities of integer i and j is
    * defined as follows:
-   * 
+   *
    * P(i)/P(j)=((j-min+1)/(i-min+1))^sigma.
    */
   public static final class Zipf implements DiscreteRNG {
@@ -94,7 +94,7 @@ public class RandomDistribution {
 
     /**
      * Constructor
-     * 
+     *
      * @param r
      *          The random number generator.
      * @param min
@@ -110,7 +110,7 @@ public class RandomDistribution {
 
     /**
      * Constructor.
-     * 
+     *
      * @param r
      *          The random number generator.
      * @param min
@@ -186,9 +186,9 @@ public class RandomDistribution {
 
   /**
    * Binomial distribution.
-   * 
+   *
    * P(k)=select(n, k)*p^k*(1-p)^(n-k) (k = 0, 1, ..., n)
-   * 
+   *
    * P(k)=select(max-min-1, k-min)*p^(k-min)*(1-p)^(k-min)*(1-p)^(max-k-1)
    */
   public static final class Binomial implements DiscreteRNG {
@@ -204,7 +204,7 @@ public class RandomDistribution {
       }
       return ret;
     }
-    
+
     private static double power(double p, int k) {
       return Math.exp(k * Math.log(p));
     }
@@ -212,7 +212,7 @@ public class RandomDistribution {
     /**
      * Generate random integers from min (inclusive) to max (exclusive)
      * following Binomial distribution.
-     * 
+     *
      * @param random
      *          The basic random number generator.
      * @param min
@@ -221,7 +221,7 @@ public class RandomDistribution {
      *          maximum integer (exclusive).
      * @param p
      *          parameter.
-     * 
+     *
      */
     public Binomial(Random random, int min, int max, double p) {
       if (min >= max) {

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/RandomSeek.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/RandomSeek.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/RandomSeek.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/RandomSeek.java Fri May  7 19:26:45 2010
@@ -56,14 +56,14 @@ public class RandomSeek {
     return "2" + Integer.toString(7+r.nextInt(2)) + Integer.toString(r.nextInt(100));
     //return new String(r.nextInt(100));
   }
-  
+
   public static void main(String [] argv) throws IOException {
     Configuration conf = new Configuration();
     conf.setInt("io.file.buffer.size", 64*1024);
     RawLocalFileSystem rlfs = new RawLocalFileSystem();
     rlfs.setConf(conf);
     LocalFileSystem lfs = new LocalFileSystem(rlfs);
-    
+
     Path path = new Path("/Users/ryan/rfile.big.txt");
     long start = System.currentTimeMillis();
     SimpleBlockCache cache = new SimpleBlockCache();
@@ -72,11 +72,11 @@ public class RandomSeek {
     reader.loadFileInfo();
     System.out.println(reader.trailer);
     long end = System.currentTimeMillis();
-    
+
     System.out.println("Index read time: " + (end - start));
 
     List<String> keys = slurp("/Users/ryan/xaa.50k");
-    
+
     // Get a scanner that doesn't cache and that uses pread.
     HFileScanner scanner = reader.getScanner(false, true);
     int count;
@@ -108,17 +108,17 @@ public class RandomSeek {
         totalBytes += k.limit();
         totalBytes += v.limit();
       }
-      
+
       if ( count % 1000 == 0 ) {
         end = System.nanoTime();
-        
+
             System.out.println("Cache block count: " + cache.size() + " dumped: "+ cache.dumps);
             //System.out.println("Cache size: " + cache.heapSize());
             double msTime = ((end - start) / 1000000.0);
-            System.out.println("Seeked: "+ count + " in " + msTime + " (ms) " 
+            System.out.println("Seeked: "+ count + " in " + msTime + " (ms) "
                 + (1000.0 / msTime ) + " seeks/ms "
                 + (msTime / 1000.0) + " ms/seek");
-            
+
             start = System.nanoTime();
       }
     }

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCachedBlockQueue.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCachedBlockQueue.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCachedBlockQueue.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCachedBlockQueue.java Fri May  7 19:26:45 2010
@@ -23,7 +23,7 @@ import java.nio.ByteBuffer;
 import junit.framework.TestCase;
 
 public class TestCachedBlockQueue extends TestCase {
-  
+
   public void testQueue() throws Exception {
 
     CachedBlock cb1 = new CachedBlock(1000, "cb1", 1);
@@ -36,9 +36,9 @@ public class TestCachedBlockQueue extend
     CachedBlock cb8 = new CachedBlock(1500, "cb8", 8);
     CachedBlock cb9 = new CachedBlock(1000, "cb9", 9);
     CachedBlock cb10 = new CachedBlock(1500, "cb10", 10);
-    
+
     CachedBlockQueue queue = new CachedBlockQueue(10000,1000);
-    
+
     queue.add(cb1);
     queue.add(cb2);
     queue.add(cb3);
@@ -49,14 +49,14 @@ public class TestCachedBlockQueue extend
     queue.add(cb8);
     queue.add(cb9);
     queue.add(cb10);
-    
+
     // We expect cb1 through cb8 to be in the queue
     long expectedSize = cb1.heapSize() + cb2.heapSize() + cb3.heapSize() +
       cb4.heapSize() + cb5.heapSize() + cb6.heapSize() + cb7.heapSize() +
       cb8.heapSize();
-    
+
     assertEquals(queue.heapSize(), expectedSize);
-    
+
     org.apache.hadoop.hbase.io.hfile.CachedBlock [] blocks = queue.get();
     assertEquals(blocks[0].getName(), "cb1");
     assertEquals(blocks[1].getName(), "cb2");
@@ -66,9 +66,9 @@ public class TestCachedBlockQueue extend
     assertEquals(blocks[5].getName(), "cb6");
     assertEquals(blocks[6].getName(), "cb7");
     assertEquals(blocks[7].getName(), "cb8");
-    
+
   }
-  
+
   public void testQueueSmallBlockEdgeCase() throws Exception {
 
     CachedBlock cb1 = new CachedBlock(1000, "cb1", 1);
@@ -81,9 +81,9 @@ public class TestCachedBlockQueue extend
     CachedBlock cb8 = new CachedBlock(1500, "cb8", 8);
     CachedBlock cb9 = new CachedBlock(1000, "cb9", 9);
     CachedBlock cb10 = new CachedBlock(1500, "cb10", 10);
-    
+
     CachedBlockQueue queue = new CachedBlockQueue(10000,1000);
-    
+
     queue.add(cb1);
     queue.add(cb2);
     queue.add(cb3);
@@ -94,21 +94,21 @@ public class TestCachedBlockQueue extend
     queue.add(cb8);
     queue.add(cb9);
     queue.add(cb10);
-    
+
     CachedBlock cb0 = new CachedBlock(10 + CachedBlock.PER_BLOCK_OVERHEAD, "cb0", 0);
     queue.add(cb0);
-    
+
     // This is older so we must include it, but it will not end up kicking
     // anything out because (heapSize - cb8.heapSize + cb0.heapSize < maxSize)
     // and we must always maintain heapSize >= maxSize once we achieve it.
-    
+
     // We expect cb0 through cb8 to be in the queue
     long expectedSize = cb1.heapSize() + cb2.heapSize() + cb3.heapSize() +
       cb4.heapSize() + cb5.heapSize() + cb6.heapSize() + cb7.heapSize() +
       cb8.heapSize() + cb0.heapSize();
-    
+
     assertEquals(queue.heapSize(), expectedSize);
-    
+
     org.apache.hadoop.hbase.io.hfile.CachedBlock [] blocks = queue.get();
     assertEquals(blocks[0].getName(), "cb0");
     assertEquals(blocks[1].getName(), "cb1");
@@ -119,9 +119,9 @@ public class TestCachedBlockQueue extend
     assertEquals(blocks[6].getName(), "cb6");
     assertEquals(blocks[7].getName(), "cb7");
     assertEquals(blocks[8].getName(), "cb8");
-    
+
   }
-  
+
   private static class CachedBlock extends org.apache.hadoop.hbase.io.hfile.CachedBlock
   {
     public CachedBlock(long heapSize, String name, long accessTime) {
@@ -130,5 +130,5 @@ public class TestCachedBlockQueue extend
           accessTime,false);
     }
   }
-  
+
 }

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java Fri May  7 19:26:45 2010
@@ -46,7 +46,7 @@ import org.apache.hadoop.io.RawComparato
  */
 public class TestHFile extends HBaseTestCase {
   static final Log LOG = LogFactory.getLog(TestHFile.class);
-  
+
   private static String ROOT_DIR =
     System.getProperty("test.build.data", "/tmp/TestHFile");
   private final int minBlockSize = 512;
@@ -55,7 +55,7 @@ public class TestHFile extends HBaseTest
   /**
    * Test empty HFile.
    * Test all features work reasonably when hfile is empty of entries.
-   * @throws IOException 
+   * @throws IOException
    */
   public void testEmptyHFile() throws IOException {
     Path f = new Path(ROOT_DIR, getName());
@@ -216,7 +216,7 @@ public class TestHFile extends HBaseTest
     metablocks("none");
     metablocks("gz");
   }
-  
+
   public void testNullMetaBlocks() throws Exception {
     Path mFile = new Path(ROOT_DIR, "nometa.hfile");
     FSDataOutputStream fout = createFSOutput(mFile);
@@ -238,8 +238,8 @@ public class TestHFile extends HBaseTest
     assertTrue(Compression.Algorithm.GZ.ordinal() == 1);
     assertTrue(Compression.Algorithm.NONE.ordinal() == 2);
   }
-  
-  
+
+
   public void testComparator() throws IOException {
     Path mFile = new Path(ROOT_DIR, "meta.tfile");
     FSDataOutputStream fout = createFSOutput(mFile);
@@ -249,7 +249,7 @@ public class TestHFile extends HBaseTest
         public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,
             int l2) {
           return -Bytes.compareTo(b1, s1, l1, b2, s2, l2);
-          
+
         }
         @Override
         public int compare(byte[] o1, byte[] o2) {
@@ -261,7 +261,7 @@ public class TestHFile extends HBaseTest
     writer.append("1".getBytes(), "0".getBytes());
     writer.close();
   }
-  
+
   /**
    * Checks if the HeapSize calculator is within reason
    */
@@ -270,7 +270,7 @@ public class TestHFile extends HBaseTest
     Class cl = null;
     long expected = 0L;
     long actual = 0L;
-    
+
     cl = BlockIndex.class;
     expected = ClassSize.estimateBase(cl, false);
     BlockIndex bi = new BlockIndex(Bytes.BYTES_RAWCOMPARATOR);
@@ -284,5 +284,5 @@ public class TestHFile extends HBaseTest
       assertEquals(expected, actual);
     }
   }
-  
+
 }

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java Fri May  7 19:26:45 2010
@@ -239,7 +239,7 @@ public class TestHFilePerformance extend
           fs.getFileStatus(path).getLen(), null, false);
         reader.loadFileInfo();
         switch (method) {
-        
+
           case 0:
           case 1:
           default:

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java Fri May  7 19:26:45 2010
@@ -5,9 +5,9 @@
  * licenses this file to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  * http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -49,7 +49,7 @@ import org.apache.hadoop.io.BytesWritabl
  * Remove after tfile is committed and use the tfile version of this class
  * instead.</p>
  */
-public class TestHFileSeek extends TestCase { 
+public class TestHFileSeek extends TestCase {
   private MyOptions options;
   private Configuration conf;
   private Path path;
@@ -85,7 +85,7 @@ public class TestHFileSeek extends TestC
         new KVGenerator(rng, true, keyLenGen, valLenGen, wordLenGen,
             options.dictSize);
   }
-  
+
   @Override
   public void tearDown() {
     try {
@@ -95,7 +95,7 @@ public class TestHFileSeek extends TestC
       // Nothing
     }
   }
-  
+
   private static FSDataOutputStream createFSOutput(Path name, FileSystem fs)
     throws IOException {
     if (fs.exists(name)) {
@@ -149,7 +149,7 @@ public class TestHFileSeek extends TestC
     System.out.printf("time: %s...file size: %.2fMB...disk thrpt: %.2fMB/s\n",
         timer.toString(), (double) fsize / 1024 / 1024, fsize / duration);
   }
-  
+
   public void seekTFile() throws IOException {
     int miss = 0;
     long totalBytes = 0;
@@ -186,7 +186,7 @@ public class TestHFileSeek extends TestC
         (double) totalBytes / 1024 / (options.seekCount - miss));
 
   }
-  
+
   public void testSeeks() throws IOException {
     if (options.doCreate()) {
       createTFile();
@@ -200,7 +200,7 @@ public class TestHFileSeek extends TestC
       fs.delete(path, true);
     }
   }
-  
+
   private static class IntegerRange {
     private final int from, to;
 
@@ -233,7 +233,7 @@ public class TestHFileSeek extends TestC
     int dictSize = 1000;
     int minWordLen = 5;
     int maxWordLen = 20;
-   
+
     String rootDir =
         System.getProperty("test.build.data", "/tmp/TestTFileSeek");
     String file = "TestTFileSeek";
@@ -391,7 +391,7 @@ public class TestHFileSeek extends TestC
       if (line.hasOption('o')) {
         fsOutputBufferSize = Integer.parseInt(line.getOptionValue('o'));
       }
-      
+
       if (line.hasOption('n')) {
         seekCount = Integer.parseInt(line.getOptionValue('n'));
       }
@@ -415,7 +415,7 @@ public class TestHFileSeek extends TestC
       if (line.hasOption('r')) {
         rootDir = line.getOptionValue('r');
       }
-      
+
       if (line.hasOption('f')) {
         file = line.getOptionValue('f');
       }
@@ -478,11 +478,11 @@ public class TestHFileSeek extends TestC
       return (op & OP_READ) != 0;
     }
   }
-  
+
   public static void main(String[] argv) throws IOException {
     TestHFileSeek testCase = new TestHFileSeek();
     MyOptions options = new MyOptions(argv);
-    
+
     if (options.proceed == false) {
       return;
     }