You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ji...@apache.org on 2008/08/26 20:20:26 UTC

svn commit: r689162 - in /hadoop/hbase/trunk: ./ src/java/org/apache/hadoop/hbase/client/ src/java/org/apache/hadoop/hbase/ipc/ src/java/org/apache/hadoop/hbase/regionserver/ src/java/org/apache/hadoop/hbase/util/ src/test/org/apache/hadoop/hbase/ src/...

Author: jimk
Date: Tue Aug 26 11:20:25 2008
New Revision: 689162

URL: http://svn.apache.org/viewvc?rev=689162&view=rev
Log:
HBASE-841  Consolidate multiple overloaded methods in HRegionInterface, HRegionServer (Jean-Daniel Cryans via Jim Kellerman)

Modified:
    hadoop/hbase/trunk/CHANGES.txt
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Merge.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestGet2.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestSplit.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/util/TestMergeTool.java

Modified: hadoop/hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/CHANGES.txt?rev=689162&r1=689161&r2=689162&view=diff
==============================================================================
--- hadoop/hbase/trunk/CHANGES.txt (original)
+++ hadoop/hbase/trunk/CHANGES.txt Tue Aug 26 11:20:25 2008
@@ -48,6 +48,8 @@
               (Sishen Freecity via Stack)
    HBASE-830  Debugging HCM.locateRegionInMeta is painful
    HBASE-784  Base hbase-0.3.0 on hadoop-0.18
+   HBASE-841  Consolidate multiple overloaded methods in HRegionInterface,
+              HRegionServer (Jean-Daniel Cryans via Jim Kellerman)
 
   NEW FEATURES
    HBASE-787  Postgresql to HBase table replication example (Tim Sell via Stack)

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java?rev=689162&r1=689161&r2=689162&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java Tue Aug 26 11:20:25 2008
@@ -386,8 +386,9 @@
     return connection.getRegionServerWithRetries(
         new ServerCallable<Cell>(connection, tableName, row) {
           public Cell call() throws IOException {
-            return server.get(location.getRegionInfo().getRegionName(), row,
-                column);
+            Cell[] result = server.get(location.getRegionInfo().getRegionName(), 
+                row, column, -1, -1);
+            return (result == null)? null : result[0];
           }
         }
     );
@@ -408,7 +409,7 @@
         new ServerCallable<Cell[]>(connection, tableName, row) {
           public Cell[] call() throws IOException {
             return server.get(location.getRegionInfo().getRegionName(), row, 
-                column, numVersions);
+                column, -1, numVersions);
           }
         }
     );

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java?rev=689162&r1=689161&r2=689162&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java Tue Aug 26 11:20:25 2008
@@ -49,33 +49,6 @@
    */
   public HRegionInfo getRegionInfo(final byte [] regionName)
   throws NotServingRegionException;
-
-  /**
-   * Retrieve a single value from the specified region for the specified row
-   * and column keys
-   * 
-   * @param regionName name of region
-   * @param row row key
-   * @param column column key
-   * @return alue for that region/row/column
-   * @throws IOException
-   */
-  public Cell get(final byte [] regionName, final byte [] row, final byte [] column)
-  throws IOException;
-
-  /**
-   * Get the specified number of versions of the specified row and column
-   * 
-   * @param regionName region name
-   * @param row row key
-   * @param column column key
-   * @param numVersions number of versions to return
-   * @return array of values
-   * @throws IOException
-   */
-  public Cell[] get(final byte [] regionName, final byte [] row,
-    final byte [] column, final int numVersions)
-  throws IOException;
   
   /**
    * Get the specified number of versions of the specified row and column with

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java?rev=689162&r1=689161&r2=689162&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java Tue Aug 26 11:20:25 2008
@@ -1119,43 +1119,26 @@
   //////////////////////////////////////////////////////////////////////////////
 
   /**
-   * Fetch a single data item.
-   * @param row
-   * @param column
-   * @return column value
-   * @throws IOException
-   */
-  public Cell get(byte [] row, byte [] column) throws IOException {
-    Cell[] results = get(row, column, Long.MAX_VALUE, 1);
-    return (results == null || results.length == 0)? null: results[0];
-  }
-  /**
-   * Fetch multiple versions of a single data item
-   * 
-   * @param row
-   * @param column
-   * @param numVersions
-   * @return array of values one element per version
-   * @throws IOException
-   */
-  public Cell[] get(byte [] row, byte [] column, int numVersions)
-  throws IOException {
-    return get(row, column, Long.MAX_VALUE, numVersions);
-  }
-
-  /**
    * Fetch multiple versions of a single data item, with timestamp.
    *
    * @param row
    * @param column
    * @param timestamp
    * @param numVersions
-   * @return array of values one element per version that matches the timestamp
+   * @return array of values one element per version that matches the timestamp, 
+   * or null if there are no matches.
    * @throws IOException
    */
   public Cell[] get(byte [] row, byte [] column, long timestamp,
     int numVersions) 
   throws IOException {
+    if (timestamp == -1) {
+      timestamp = Long.MAX_VALUE;
+    }
+    if (numVersions == -1) {
+      numVersions = 1;
+    }
+
     splitsAndClosesLock.readLock().lock();
     try {
       if (this.closed.get()) {
@@ -1166,7 +1149,10 @@
       checkColumn(column);
       // Don't need a row lock for a simple get
       HStoreKey key = new HStoreKey(row, column, timestamp);
-      return getStore(column).get(key, numVersions);
+      Cell[] result = getStore(column).get(key, numVersions);
+      // Guarantee that we return null instead of a zero-length array, 
+      // if there are no results to return.
+      return (result == null || result.length == 0)? null : result;
     } finally {
       splitsAndClosesLock.readLock().unlock();
     }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java?rev=689162&r1=689161&r2=689162&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java Tue Aug 26 11:20:25 2008
@@ -1008,34 +1008,6 @@
   }
 
   /** {@inheritDoc} */
-  public Cell get(final byte [] regionName, final byte [] row,
-    final byte [] column) 
-  throws IOException {
-    checkOpen();
-    requestCount.incrementAndGet();
-    try {
-      return getRegion(regionName).get(row, column);
-    } catch (IOException e) {
-      checkFileSystem();
-      throw e;
-    }
-  }
-
-  /** {@inheritDoc} */
-  public Cell[] get(final byte [] regionName, final byte [] row,
-    final byte [] column, final int numVersions) 
-  throws IOException {
-    checkOpen();
-    requestCount.incrementAndGet();
-    try {
-      return getRegion(regionName).get(row, column, numVersions);
-    } catch (IOException e) {
-      checkFileSystem();
-      throw e;
-    }
-  }
-
-  /** {@inheritDoc} */
   public Cell[] get(final byte [] regionName, final byte [] row,
     final byte [] column, final long timestamp, final int numVersions) 
   throws IOException {

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Merge.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Merge.java?rev=689162&r1=689161&r2=689162&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Merge.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Merge.java Tue Aug 26 11:20:25 2008
@@ -33,6 +33,7 @@
 import org.apache.hadoop.util.ToolRunner;
 
 import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.io.Cell;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.regionserver.HLog;
@@ -140,10 +141,10 @@
    */
   private void mergeTwoMetaRegions() throws IOException {
     HRegion rootRegion = utils.getRootRegion();
-    HRegionInfo info1 = Writables.getHRegionInfo(
-        rootRegion.get(region1, HConstants.COL_REGIONINFO));
-    HRegionInfo info2 = Writables.getHRegionInfo(
-        rootRegion.get(region2, HConstants.COL_REGIONINFO));
+    Cell[] cells1 = rootRegion.get(region1, HConstants.COL_REGIONINFO, -1, -1);
+    HRegionInfo info1 = Writables.getHRegionInfo((cells1 == null)? null : cells1[0]);
+    Cell[] cells2 = rootRegion.get(region2, HConstants.COL_REGIONINFO, -1, -1);
+    HRegionInfo info2 = Writables.getHRegionInfo((cells2 == null)? null : cells2[0]);
     HRegion merged = merge(info1, rootRegion, info2, rootRegion); 
     LOG.info("Adding " + merged.getRegionInfo() + " to " +
         rootRegion.getRegionInfo());
@@ -205,8 +206,8 @@
     LOG.info("Found meta for region1 " + meta1.getRegionName() +
       ", meta for region2 " + meta2.getRegionName());
     HRegion metaRegion1 = this.utils.getMetaRegion(meta1);
-    HRegionInfo info1 = Writables.getHRegionInfo(
-      metaRegion1.get(region1, HConstants.COL_REGIONINFO));
+    Cell[] cells1 = metaRegion1.get(region1, HConstants.COL_REGIONINFO, -1, -1);
+    HRegionInfo info1 = Writables.getHRegionInfo((cells1 == null)? null : cells1[0]);
     if (info1== null) {
       throw new NullPointerException("info1 is null using key " + region1 +
         " in " + meta1);
@@ -218,8 +219,8 @@
     } else {
       metaRegion2 = utils.getMetaRegion(meta2);
     }
-    HRegionInfo info2 = Writables.getHRegionInfo(
-      metaRegion2.get(region2, HConstants.COL_REGIONINFO));
+    Cell[] cells2 = metaRegion2.get(region2, HConstants.COL_REGIONINFO, -1, -1);
+    HRegionInfo info2 = Writables.getHRegionInfo((cells2 == null)? null : cells2[0]);
     if (info2 == null) {
       throw new NullPointerException("info2 is null using key " + meta2);
     }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java?rev=689162&r1=689161&r2=689162&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java Tue Aug 26 11:20:25 2008
@@ -400,7 +400,7 @@
   throws IOException {
     if (LOG.isDebugEnabled()) {
       HRegionInfo h = Writables.getHRegionInfoOrNull(
-        r.get(hri.getRegionName(), HConstants.COL_REGIONINFO).getValue());
+        r.get(hri.getRegionName(), HConstants.COL_REGIONINFO, -1, -1)[0].getValue());
       LOG.debug("Old " + Bytes.toString(HConstants.COL_REGIONINFO) +
         " for " + hri.toString() + " in " + r.toString() + " is: " +
         h.toString());
@@ -410,7 +410,7 @@
     r.batchUpdate(b, null);
     if (LOG.isDebugEnabled()) {
       HRegionInfo h = Writables.getHRegionInfoOrNull(
-          r.get(hri.getRegionName(), HConstants.COL_REGIONINFO).getValue());
+          r.get(hri.getRegionName(), HConstants.COL_REGIONINFO, -1, -1)[0].getValue());
         LOG.debug("New " + Bytes.toString(HConstants.COL_REGIONINFO) +
           " for " + hri.toString() + " in " + r.toString() + " is: " +
           h.toString());

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java?rev=689162&r1=689161&r2=689162&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java Tue Aug 26 11:20:25 2008
@@ -422,13 +422,14 @@
 
     /** {@inheritDoc} */
     public Cell get(byte [] row, byte [] column) throws IOException {
-      return this.region.get(row, column);
+      Cell[] result = this.region.get(row, column, -1, -1);
+      return (result == null)? null : result[0];
     }
 
     /** {@inheritDoc} */
     public Cell[] get(byte [] row, byte [] column, int versions)
     throws IOException {
-      return this.region.get(row, column, versions);
+      return this.region.get(row, column, -1, versions);
     }
 
     /** {@inheritDoc} */

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java?rev=689162&r1=689161&r2=689162&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java Tue Aug 26 11:20:25 2008
@@ -100,7 +100,7 @@
     // available.
     addContent(new HRegionIncommon(r), Bytes.toString(COLUMN_FAMILY));
     Cell[] cellValues = 
-      r.get(STARTROW, COLUMN_FAMILY_TEXT, 100 /*Too many*/);
+      r.get(STARTROW, COLUMN_FAMILY_TEXT, -1, 100 /*Too many*/);
     // Assert that I can get 3 versions since it is the max I should get
     assertTrue(cellValues.length == 3);
     r.flushcache();
@@ -114,7 +114,7 @@
     byte [] secondRowBytes = START_KEY.getBytes(HConstants.UTF8_ENCODING);
     // Increment the least significant character so we get to next row.
     secondRowBytes[START_KEY_BYTES.length - 1]++;
-    cellValues = r.get(secondRowBytes, COLUMN_FAMILY_TEXT, 100/*Too many*/);
+    cellValues = r.get(secondRowBytes, COLUMN_FAMILY_TEXT, -1, 100/*Too many*/);
     LOG.info("Count of " + Bytes.toString(secondRowBytes) + ": " + cellValues.length);
     // Commented out because fails on an hp+ubuntu single-processor w/ 1G and
     // "Intel(R) Pentium(R) 4 CPU 3.20GHz" though passes on all local
@@ -131,10 +131,10 @@
     // Now, before compacting, remove all instances of the first row so can
     // verify that it is removed as we compact.
     // Assert all delted.
-    assertNull(r.get(STARTROW, COLUMN_FAMILY_TEXT, 100 /*Too many*/));
+    assertNull(r.get(STARTROW, COLUMN_FAMILY_TEXT, -1, 100 /*Too many*/));
     r.flushcache();
     assertEquals(r.getStore(COLUMN_FAMILY_TEXT).getStorefiles().size(), 2);
-    assertNull(r.get(STARTROW, COLUMN_FAMILY_TEXT, 100 /*Too many*/));
+    assertNull(r.get(STARTROW, COLUMN_FAMILY_TEXT, -1, 100 /*Too many*/));
     // Add a bit of data and flush it so we for sure have the compaction limit
     // for store files.  Usually by this time we will have but if compaction
     // included the flush that ran 'concurrently', there may be just the
@@ -146,7 +146,7 @@
     r.compactStores();
     assertEquals(r.getStore(COLUMN_FAMILY_TEXT).getStorefiles().size(), 2);
     // Assert that the first row is still deleted.
-    cellValues = r.get(STARTROW, COLUMN_FAMILY_TEXT, 100 /*Too many*/);
+    cellValues = r.get(STARTROW, COLUMN_FAMILY_TEXT, -1, 100 /*Too many*/);
     assertNull(cellValues);
     // Make sure the store files do have some 'aaa' keys in them.
     boolean containsStartRow = false;

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestGet2.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestGet2.java?rev=689162&r1=689161&r2=689162&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestGet2.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestGet2.java Tue Aug 26 11:20:25 2008
@@ -82,11 +82,11 @@
       
       // Now delete all then retry
       region.deleteAll(Bytes.toBytes(T00), System.currentTimeMillis(), null);
-      Cell [] cells = region.get(Bytes.toBytes(T00), column,
+      Cell [] cells = region.get(Bytes.toBytes(T00), column, -1,
         HColumnDescriptor.DEFAULT_VERSIONS);
       assertTrue(cells == null);
       region.flushcache();
-      cells = region.get(Bytes.toBytes(T00), column,
+      cells = region.get(Bytes.toBytes(T00), column, -1,
           HColumnDescriptor.DEFAULT_VERSIONS);
       assertTrue(cells == null);
       
@@ -123,11 +123,11 @@
       final byte [] column)
   throws IOException {
     byte [] r = Bytes.toBytes(row);
-    Cell [] cells = region.get(r, column, 100);
+    Cell [] cells = region.get(r, column, -1, 100);
     assertTrue(cells.length == HColumnDescriptor.DEFAULT_VERSIONS);
-    cells = region.get(r, column, 1);
+    cells = region.get(r, column, -1, 1);
     assertTrue(cells.length == 1);
-    cells = region.get(r, column, HConstants.ALL_VERSIONS);
+    cells = region.get(r, column, -1, HConstants.ALL_VERSIONS);
     assertTrue(cells.length == HColumnDescriptor.DEFAULT_VERSIONS);
   }
   

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestSplit.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestSplit.java?rev=689162&r1=689161&r2=689162&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestSplit.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestSplit.java Tue Aug 26 11:20:25 2008
@@ -215,7 +215,7 @@
   private void assertGet(final HRegion r, final byte [] family, final byte [] k)
   throws IOException {
     // Now I have k, get values out and assert they are as expected.
-    Cell[] results = r.get(k, family, Integer.MAX_VALUE);
+    Cell[] results = r.get(k, family, -1, Integer.MAX_VALUE);
     for (int j = 0; j < results.length; j++) {
       byte [] tmp = results[j].getValue();
       // Row should be equal to value every time.

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/util/TestMergeTool.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/util/TestMergeTool.java?rev=689162&r1=689161&r2=689162&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/util/TestMergeTool.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/util/TestMergeTool.java Tue Aug 26 11:20:25 2008
@@ -179,7 +179,7 @@
   throws IOException {
     for (int i = 0; i < upperbound; i++) {
       for (int j = 0; j < rows[i].length; j++) {
-        byte[] bytes = merged.get(rows[i][j], COLUMN_NAME).getValue();
+        byte[] bytes = merged.get(rows[i][j], COLUMN_NAME, -1, -1)[0].getValue();
         assertNotNull(rows[i][j].toString(), bytes);
         assertTrue(Bytes.equals(bytes, rows[i][j]));
       }
@@ -195,7 +195,7 @@
     // contain the right data.
     for (int i = 0; i < regions.length; i++) {
       for (int j = 0; j < rows[i].length; j++) {
-        byte[] bytes = regions[i].get(rows[i][j], COLUMN_NAME).getValue();
+        byte[] bytes = regions[i].get(rows[i][j], COLUMN_NAME, -1, -1)[0].getValue();
         assertNotNull(bytes);
         assertTrue(Bytes.equals(bytes, rows[i][j]));
       }