You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by la...@apache.org on 2012/08/30 20:49:58 UTC

svn commit: r1379058 - in /hbase/trunk/hbase-server/src: main/java/org/apache/hadoop/hbase/regionserver/HRegion.java test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java

Author: larsh
Date: Thu Aug 30 18:49:58 2012
New Revision: 1379058

URL: http://svn.apache.org/viewvc?rev=1379058&view=rev
Log:
HBASE-6291 Don't retry increments on an invalid cell

Modified:
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java?rev=1379058&r1=1379057&r2=1379058&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java Thu Aug 30 18:49:58 2012
@@ -4827,7 +4827,13 @@ public class HRegion implements HeapSize
             if (idx < results.size() &&
                 results.get(idx).matchingQualifier(column.getKey())) {
               KeyValue kv = results.get(idx);
-              amount += Bytes.toLong(kv.getBuffer(), kv.getValueOffset(), kv.getValueLength());
+              if(kv.getValueLength() == Bytes.SIZEOF_LONG) {
+                amount += Bytes.toLong(kv.getBuffer(), kv.getValueOffset(), Bytes.SIZEOF_LONG);
+              } else {
+                // throw DoNotRetryIOException instead of IllegalArgumentException
+                throw new DoNotRetryIOException(
+                    "Attempted to increment field that isn't 64 bits wide");
+              }
               idx++;
             }
 
@@ -4876,11 +4882,10 @@ public class HRegion implements HeapSize
       }
     } finally {
       closeRegionOperation();
+      long after = EnvironmentEdgeManager.currentTimeMillis();
+      this.opMetrics.updateIncrementMetrics(increment.getFamilyMap().keySet(), after - before);
     }
 
-    long after = EnvironmentEdgeManager.currentTimeMillis();
-    this.opMetrics.updateIncrementMetrics(increment.getFamilyMap().keySet(), after - before);
-
     if (flush) {
       // Request a cache flush.  Do it outside update lock.
       requestFlush();
@@ -4930,7 +4935,7 @@ public class HRegion implements HeapSize
 
         if (!results.isEmpty()) {
           KeyValue kv = results.get(0);
-          if(kv.getValueLength() == 8){
+          if(kv.getValueLength() == Bytes.SIZEOF_LONG){
             byte [] buffer = kv.getBuffer();
             int valueOffset = kv.getValueOffset();
             result += Bytes.toLong(buffer, valueOffset, Bytes.SIZEOF_LONG);
@@ -4986,7 +4991,7 @@ public class HRegion implements HeapSize
       requestFlush();
     }
     if (wrongLength) {
-      throw new IOException(
+      throw new DoNotRetryIOException(
           "Attempted to increment field that isn't 64 bits wide");
     }
     return result;

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java?rev=1379058&r1=1379057&r2=1379058&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java Thu Aug 30 18:49:58 2012
@@ -4230,7 +4230,7 @@ public class TestFromClientSide {
  
   @Test
   public void testIncrementWithDeletes() throws Exception {
-    LOG.info("Starting testIncrement");
+    LOG.info("Starting testIncrementWithDeletes");
     final byte [] TABLENAME = Bytes.toBytes("testIncrementWithDeletes");
     HTable ht = TEST_UTIL.createTable(TABLENAME, FAMILY);
     final byte[] COLUMN = Bytes.toBytes("column");
@@ -4250,6 +4250,32 @@ public class TestFromClientSide {
   }
 
   @Test
+  public void testIncrementingInvalidValue() throws Exception {
+    LOG.info("Starting testIncrementingInvalidValue");
+    final byte [] TABLENAME = Bytes.toBytes("testIncrementingInvalidValue");
+    HTable ht = TEST_UTIL.createTable(TABLENAME, FAMILY);
+    final byte[] COLUMN = Bytes.toBytes("column");
+    Put p = new Put(ROW);
+    // write an integer here (not a Long)
+    p.add(FAMILY, COLUMN, Bytes.toBytes(5));
+    ht.put(p);
+    try {
+      ht.incrementColumnValue(ROW, FAMILY, COLUMN, 5);
+      fail("Should have thrown DoNotRetryIOException");
+    } catch (DoNotRetryIOException iox) {
+      // success
+    }
+    Increment inc = new Increment(ROW);
+    inc.addColumn(FAMILY, COLUMN, 5);
+    try {
+      ht.increment(inc);
+      fail("Should have thrown DoNotRetryIOException");
+    } catch (DoNotRetryIOException iox) {
+      // success
+    }
+  }
+
+  @Test
   public void testIncrement() throws Exception {
     LOG.info("Starting testIncrement");
     final byte [] TABLENAME = Bytes.toBytes("testIncrement");