You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by la...@apache.org on 2012/01/05 00:16:54 UTC

svn commit: r1227382 - in /hbase/trunk/src: main/java/org/apache/hadoop/hbase/client/ main/java/org/apache/hadoop/hbase/regionserver/ test/java/org/apache/hadoop/hbase/client/

Author: larsh
Date: Wed Jan  4 23:16:54 2012
New Revision: 1227382

URL: http://svn.apache.org/viewvc?rev=1227382&view=rev
Log:
HBASE-2947  MultiIncrement/MultiAppend (JGray and Lars H)

Modified:
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Append.java
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Increment.java
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
    hbase/trunk/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Append.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Append.java?rev=1227382&r1=1227381&r2=1227382&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Append.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Append.java Wed Jan  4 23:16:54 2012
@@ -41,7 +41,7 @@ import org.apache.hadoop.io.Writable;
  * row to append to. At least one column to append must be specified using the
  * {@link #add(byte[], byte[], byte[])} method.
  */
-public class Append extends Mutation implements Writable {
+public class Append extends Mutation implements Row {
   // TODO: refactor to derive from Put?
   private static final String RETURN_RESULTS = "_rr_";
   private static final byte APPEND_VERSION = (byte)1;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Increment.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Increment.java?rev=1227382&r1=1227381&r2=1227382&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Increment.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Increment.java Wed Jan  4 23:16:54 2012
@@ -43,7 +43,7 @@ import org.apache.hadoop.io.Writable;
  * to increment.  At least one column to increment must be specified using the
  * {@link #addColumn(byte[], byte[], long)} method.
  */
-public class Increment implements Writable {
+public class Increment implements Row {
   private static final byte INCREMENT_VERSION = (byte)2;
 
   private byte [] row = null;
@@ -328,4 +328,9 @@ public class Increment implements Writab
     }
     out.writeBoolean(writeToWAL);
   }
+
+  @Override
+  public int compareTo(Row i) {
+    return Bytes.compareTo(this.getRow(), i.getRow());
+  }
 }

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java?rev=1227382&r1=1227381&r2=1227382&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java Wed Jan  4 23:16:54 2012
@@ -3234,10 +3234,11 @@ public class HRegionServer implements HR
 
         try {
           if (action instanceof Delete) {
-            delete(regionName, (Delete) action);
+            delete(regionName, (Delete)action);
             response.add(regionName, originalIndex, new Result());
           } else if (action instanceof Get) {
-            response.add(regionName, originalIndex, get(regionName, (Get) action));
+            response.add(regionName, originalIndex,
+                get(regionName, (Get)action));
           } else if (action instanceof Put) {
             puts.add(a);  // wont throw.
           } else if (action instanceof Exec) {
@@ -3245,11 +3246,17 @@ public class HRegionServer implements HR
             response.add(regionName, new Pair<Integer, Object>(
                 a.getOriginalIndex(), result.getValue()
             ));
+          } else if (action instanceof Increment) {
+            response.add(regionName, originalIndex,
+                increment(regionName, (Increment)action));
+          } else if (action instanceof Append) {
+            response.add(regionName, originalIndex,
+                append(regionName, (Append)action));
           } else {
             LOG.debug("Error: invalid Action, row must be a Get, Delete, " +
-                "Put or Exec.");
+                "Put, Exec, Increment, or Append.");
             throw new DoNotRetryIOException("Invalid Action, row must be a " +
-                "Get, Delete or Put.");
+                "Get, Delete, Put, Exec, Increment, or Append.");
           }
         } catch (IOException ex) {
           response.add(regionName, originalIndex, ex);

Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java?rev=1227382&r1=1227381&r2=1227382&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java Wed Jan  4 23:16:54 2012
@@ -386,6 +386,39 @@ public class TestMultiParallel {
   }
 
   @Test(timeout=300000)
+  public void testBatchWithIncrementAndAppend() throws Exception {
+    LOG.info("test=testBatchWithIncrementAndAppend");
+    final byte[] QUAL1 = Bytes.toBytes("qual1");
+    final byte[] QUAL2 = Bytes.toBytes("qual2");
+    final byte[] QUAL3 = Bytes.toBytes("qual3");
+    final byte[] QUAL4 = Bytes.toBytes("qual4");
+    HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE);
+    Delete d = new Delete(ONE_ROW);
+    table.delete(d);
+    Put put = new Put(ONE_ROW);
+    put.add(BYTES_FAMILY, QUAL1, Bytes.toBytes("abc"));
+    put.add(BYTES_FAMILY, QUAL2, Bytes.toBytes(1L));
+    table.put(put);
+
+    Increment inc = new Increment(ONE_ROW);
+    inc.addColumn(BYTES_FAMILY, QUAL2, 1);
+    inc.addColumn(BYTES_FAMILY, QUAL3, 1);
+
+    Append a = new Append(ONE_ROW);
+    a.add(BYTES_FAMILY, QUAL1, Bytes.toBytes("def"));
+    a.add(BYTES_FAMILY, QUAL4, Bytes.toBytes("xyz"));
+    List<Row> actions = new ArrayList<Row>();
+    actions.add(inc);
+    actions.add(a);
+
+    Object[] multiRes = table.batch(actions);
+    validateResult(multiRes[1], QUAL1, Bytes.toBytes("abcdef"));
+    validateResult(multiRes[1], QUAL4, Bytes.toBytes("xyz"));
+    validateResult(multiRes[0], QUAL2, Bytes.toBytes(2L));
+    validateResult(multiRes[0], QUAL3, Bytes.toBytes(1L));
+  }
+
+  @Test(timeout=300000)
   public void testBatchWithMixedActions() throws Exception {
     LOG.info("test=testBatchWithMixedActions");
     HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE);