You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by bu...@apache.org on 2016/09/29 06:56:52 UTC

[07/50] [abbrv] hbase git commit: Revert "HBASE-16604 Scanner retries on IOException can cause the scans to miss data"

Revert "HBASE-16604 Scanner retries on IOException can cause the scans to miss data"

This reverts commit 83cf44cd3f19c841ac53889d09454ed5247ce591.

Reverting because accidental files are committed with this.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/39db0cac
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/39db0cac
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/39db0cac

Branch: refs/heads/hbase-14439
Commit: 39db0cac78e44a92f7e730244f0e1ea02e81a4c5
Parents: 50b051a
Author: Enis Soztutar <en...@apache.org>
Authored: Fri Sep 23 11:25:23 2016 -0700
Committer: Enis Soztutar <en...@apache.org>
Committed: Fri Sep 23 11:25:23 2016 -0700

----------------------------------------------------------------------
 .../hadoop/hbase/UnknownScannerException.java   |     4 -
 .../hadoop/hbase/client/ClientScanner.java      |     4 +-
 .../hadoop/hbase/client/ScannerCallable.java    |    26 +-
 .../hbase/exceptions/ScannerResetException.java |    50 -
 .../hbase/ipc/MetricsHBaseServerSource.java     |     2 -
 .../hbase/ipc/MetricsHBaseServerSourceImpl.java |     8 -
 .../hadoop/hbase/ipc/MetricsHBaseServer.java    |     3 -
 .../hbase/regionserver/RSRpcServices.java       |    76 +-
 .../hadoop/hbase/HBaseTestingUtility.java       |    27 +-
 .../hadoop/hbase/client/TestFromClientSide.java |    75 +-
 .../hbase/client/TestTableSnapshotScanner.java  |     2 +-
 .../TableSnapshotInputFormatTestBase.java       |     2 +-
 .../mapreduce/TestMultithreadedTableMapper.java |     3 +-
 .../hbase/mapreduce/TestTableMapReduce.java     |     5 +-
 .../hbase/mapreduce/TestTableMapReduceBase.java |     2 +-
 .../regionserver/DelegatingKeyValueScanner.java |   114 -
 .../thrift/HThreadedSelectorServerArgs.java"    |    97 -
 .../hbase/thrift/HbaseHandlerMetricsProxy.java" |    82 -
 .../thrift/HttpAuthenticationException.java"    |    42 -
 .../hbase/thrift/IncrementCoalescer.java"       |   372 -
 .../hbase/thrift/IncrementCoalescerMBean.java"  |    49 -
 .../hbase/thrift/TBoundedThreadPoolServer.java" |   311 -
 .../hadoop/hbase/thrift/ThriftHttpServlet.java" |   226 -
 .../hadoop/hbase/thrift/ThriftMetrics.java"     |    90 -
 .../hadoop/hbase/thrift/ThriftServer.java"      |   245 -
 .../hbase/thrift/ThriftServerRunner.java"       |  1887 -
 .../hadoop/hbase/thrift/ThriftUtilities.java"   |   235 -
 .../hbase/thrift/generated/AlreadyExists.java"  |   402 -
 .../hbase/thrift/generated/BatchMutation.java"  |   570 -
 .../thrift/generated/ColumnDescriptor.java"     |  1239 -
 .../hadoop/hbase/thrift/generated/Hbase.java"   | 58585 -----------------
 .../hadoop/hbase/thrift/generated/IOError.java" |   403 -
 .../thrift/generated/IllegalArgument.java"      |   402 -
 .../hbase/thrift/generated/Mutation.java"       |   732 -
 .../hadoop/hbase/thrift/generated/TAppend.java" |   840 -
 .../hadoop/hbase/thrift/generated/TCell.java"   |   517 -
 .../hadoop/hbase/thrift/generated/TColumn.java" |   521 -
 .../hbase/thrift/generated/TIncrement.java"     |   745 -
 .../hbase/thrift/generated/TRegionInfo.java"    |  1057 -
 .../hbase/thrift/generated/TRowResult.java"     |   745 -
 .../hadoop/hbase/thrift/generated/TScan.java"   |  1406 -
 .../thrift2/ThriftHBaseServiceHandler.java"     |   487 -
 .../hadoop/hbase/thrift2/ThriftServer.java"     |   555 -
 .../hadoop/hbase/thrift2/ThriftUtilities.java"  |   552 -
 .../hbase/thrift2/generated/TAppend.java"       |   954 -
 .../thrift2/generated/TAuthorization.java"      |   445 -
 .../thrift2/generated/TCellVisibility.java"     |   396 -
 .../hbase/thrift2/generated/TColumn.java"       |   631 -
 .../thrift2/generated/TColumnIncrement.java"    |   625 -
 .../hbase/thrift2/generated/TColumnValue.java"  |   851 -
 .../hbase/thrift2/generated/TCompareOp.java"    |    64 -
 .../hbase/thrift2/generated/TDelete.java"       |  1104 -
 .../hbase/thrift2/generated/TDeleteType.java"   |    50 -
 .../hbase/thrift2/generated/TDurability.java"   |    58 -
 .../hadoop/hbase/thrift2/generated/TGet.java"   |  1283 -
 .../hbase/thrift2/generated/THBaseService.java" | 23215 -------
 .../hbase/thrift2/generated/THRegionInfo.java"  |  1039 -
 .../thrift2/generated/THRegionLocation.java"    |   502 -
 .../hbase/thrift2/generated/TIOError.java"      |   401 -
 .../thrift2/generated/TIllegalArgument.java"    |   400 -
 .../hbase/thrift2/generated/TIncrement.java"    |   961 -
 .../hbase/thrift2/generated/TMutation.java"     |   373 -
 .../hadoop/hbase/thrift2/generated/TPut.java"   |  1070 -
 .../hbase/thrift2/generated/TResult.java"       |   569 -
 .../hbase/thrift2/generated/TRowMutations.java" |   559 -
 .../hadoop/hbase/thrift2/generated/TScan.java"  |  1694 -
 .../hbase/thrift2/generated/TServerName.java"   |   599 -
 .../hbase/thrift2/generated/TTimeRange.java"    |   486 -
 .../apache/hadoop/hbase/thrift2/package.html"   |   103 -
 69 files changed, 53 insertions(+), 112176 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownScannerException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownScannerException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownScannerException.java
index 3e7b22d..b951221 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownScannerException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownScannerException.java
@@ -45,8 +45,4 @@ public class UnknownScannerException extends DoNotRetryIOException {
   public UnknownScannerException(String s) {
     super(s);
   }
-
-  public UnknownScannerException(String s, Exception e) {
-    super(s, e);
-  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
index de8bfcc..3e676c7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
@@ -33,7 +33,6 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.UnknownScannerException;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
-import org.apache.hadoop.hbase.exceptions.ScannerResetException;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos;
@@ -429,8 +428,7 @@ public abstract class ClientScanner extends AbstractClientScanner {
         if ((cause != null && cause instanceof NotServingRegionException) ||
             (cause != null && cause instanceof RegionServerStoppedException) ||
             e instanceof OutOfOrderScannerNextException ||
-            e instanceof UnknownScannerException ||
-            e instanceof ScannerResetException) {
+            e instanceof UnknownScannerException ) {
           // Pass. It is easier writing the if loop test as list of what is allowed rather than
           // as a list of what is not allowed... so if in here, it means we do not throw.
         } else {

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java
index 8345aa1..adf1153 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.UnknownScannerException;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
-import org.apache.hadoop.hbase.exceptions.ScannerResetException;
+import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.RequestConverter;
@@ -102,7 +102,7 @@ public class ScannerCallable extends RegionServerCallable<Result[]> {
    * @param scan the scan to execute
    * @param scanMetrics the ScanMetrics to used, if it is null, ScannerCallable won't collect
    *          metrics
-   * @param rpcControllerFactory factory to use when creating
+   * @param rpcControllerFactory factory to use when creating 
    *        {@link com.google.protobuf.RpcController}
    */
   public ScannerCallable(ClusterConnection connection, TableName tableName, Scan scan,
@@ -174,7 +174,6 @@ public class ScannerCallable extends RegionServerCallable<Result[]> {
     }
   }
 
-  @Override
   protected Result [] rpcCall() throws Exception {
     if (Thread.interrupted()) {
       throw new InterruptedIOException();
@@ -246,19 +245,14 @@ public class ScannerCallable extends RegionServerCallable<Result[]> {
           if (e instanceof RemoteException) {
             ioe = ((RemoteException) e).unwrapRemoteException();
           }
-          if (logScannerActivity) {
-            if (ioe instanceof UnknownScannerException) {
-              try {
-                HRegionLocation location =
-                    getConnection().relocateRegion(getTableName(), scan.getStartRow());
-                LOG.info("Scanner=" + scannerId + " expired, current region location is " +
-                    location.toString());
-              } catch (Throwable t) {
-                LOG.info("Failed to relocate region", t);
-              }
-            } else if (ioe instanceof ScannerResetException) {
-              LOG.info("Scanner=" + scannerId + " has received an exception, and the server "
-                  + "asked us to reset the scanner state.", ioe);
+          if (logScannerActivity && (ioe instanceof UnknownScannerException)) {
+            try {
+              HRegionLocation location =
+                  getConnection().relocateRegion(getTableName(), scan.getStartRow());
+              LOG.info("Scanner=" + scannerId + " expired, current region location is " +
+                  location.toString());
+            } catch (Throwable t) {
+              LOG.info("Failed to relocate region", t);
             }
           }
           // The below convertion of exceptions into DoNotRetryExceptions is a little strange.

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ScannerResetException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ScannerResetException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ScannerResetException.java
deleted file mode 100644
index 7689eb1..0000000
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ScannerResetException.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.exceptions;
-
-import org.apache.hadoop.hbase.DoNotRetryIOException;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
-
-/**
- * Thrown when the server side has received an Exception, and asks the Client to reset the scanner
- * state by closing the current region scanner, and reopening from the start of last seen row.
- */
-@InterfaceAudience.Public
-@InterfaceStability.Stable
-public class ScannerResetException extends DoNotRetryIOException {
-  private static final long serialVersionUID = -5649728171144849619L;
-
-  /** constructor */
-  public ScannerResetException() {
-    super();
-  }
-
-  /**
-   * Constructor
-   * @param s message
-   */
-  public ScannerResetException(String s) {
-    super(s);
-  }
-
-  public ScannerResetException(String s, Exception e) {
-    super(s, e);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
index cf9c6c7..ffbe6fe 100644
--- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
+++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
@@ -79,7 +79,6 @@ public interface MetricsHBaseServerSource extends BaseSource {
   String EXCEPTIONS_OOO_NAME="exceptions.OutOfOrderScannerNextException";
   String EXCEPTIONS_BUSY_NAME="exceptions.RegionTooBusyException";
   String EXCEPTIONS_UNKNOWN_NAME="exceptions.UnknownScannerException";
-  String EXCEPTIONS_SCANNER_RESET_NAME="exceptions.ScannerResetException";
   String EXCEPTIONS_SANITY_NAME="exceptions.FailedSanityCheckException";
   String EXCEPTIONS_MOVED_NAME="exceptions.RegionMovedException";
   String EXCEPTIONS_NSRE_NAME="exceptions.NotServingRegionException";
@@ -109,7 +108,6 @@ public interface MetricsHBaseServerSource extends BaseSource {
   void movedRegionException();
   void notServingRegionException();
   void unknownScannerException();
-  void scannerResetException();
   void tooBusyException();
   void multiActionTooLargeException();
   void callQueueTooBigException();

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceImpl.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceImpl.java
index d372b1b..fafa9d0 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceImpl.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceImpl.java
@@ -45,7 +45,6 @@ public class MetricsHBaseServerSourceImpl extends BaseSourceImpl
   private final MutableFastCounter exceptionsOOO;
   private final MutableFastCounter exceptionsBusy;
   private final MutableFastCounter exceptionsUnknown;
-  private final MutableFastCounter exceptionsScannerReset;
   private final MutableFastCounter exceptionsSanity;
   private final MutableFastCounter exceptionsNSRE;
   private final MutableFastCounter exceptionsMoved;
@@ -79,8 +78,6 @@ public class MetricsHBaseServerSourceImpl extends BaseSourceImpl
         .newCounter(EXCEPTIONS_BUSY_NAME, EXCEPTIONS_TYPE_DESC, 0L);
     this.exceptionsUnknown = this.getMetricsRegistry()
         .newCounter(EXCEPTIONS_UNKNOWN_NAME, EXCEPTIONS_TYPE_DESC, 0L);
-    this.exceptionsScannerReset = this.getMetricsRegistry()
-        .newCounter(EXCEPTIONS_SCANNER_RESET_NAME, EXCEPTIONS_TYPE_DESC, 0L);
     this.exceptionsSanity = this.getMetricsRegistry()
         .newCounter(EXCEPTIONS_SANITY_NAME, EXCEPTIONS_TYPE_DESC, 0L);
     this.exceptionsMoved = this.getMetricsRegistry()
@@ -165,11 +162,6 @@ public class MetricsHBaseServerSourceImpl extends BaseSourceImpl
   }
 
   @Override
-  public void scannerResetException() {
-    exceptionsScannerReset.incr();
-  }
-
-  @Override
   public void tooBusyException() {
     exceptionsBusy.incr();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServer.java
index fe03d4f..838bdf6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServer.java
@@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
 import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
 import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
 import org.apache.hadoop.hbase.exceptions.RegionMovedException;
-import org.apache.hadoop.hbase.exceptions.ScannerResetException;
 
 @InterfaceAudience.Private
 public class MetricsHBaseServer {
@@ -104,8 +103,6 @@ public class MetricsHBaseServer {
         source.tooBusyException();
       } else if (throwable instanceof UnknownScannerException) {
         source.unknownScannerException();
-      } else if (throwable instanceof ScannerResetException) {
-        source.scannerResetException();
       } else if (throwable instanceof RegionMovedException) {
         source.movedRegionException();
       } else if (throwable instanceof NotServingRegionException) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index 5ba8afd..6f92f9d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -82,7 +82,6 @@ import org.apache.hadoop.hbase.conf.ConfigurationObserver;
 import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
 import org.apache.hadoop.hbase.exceptions.MergeRegionException;
 import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
-import org.apache.hadoop.hbase.exceptions.ScannerResetException;
 import org.apache.hadoop.hbase.filter.ByteArrayComparable;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
@@ -2902,22 +2901,13 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
                 isClientCellBlockSupport(context));
           }
         } catch (IOException e) {
-          // The scanner state might be left in a dirty state, so we will tell the Client to
-          // fail this RPC and close the scanner while opening up another one from the start of
-          // row that the client has last seen.
-          closeScanner(region, scanner, scannerName, context);
-
-          // We closed the scanner already. Instead of throwing the IOException, and client
-          // retrying with the same scannerId only to get USE on the next RPC, we directly throw
-          // a special exception to save an RPC.
-          if (VersionInfoUtil.hasMinimumVersion(context.getClientVersionInfo(), 1, 4)) {
-            // 1.4.0+ clients know how to handle
-            throw new ScannerResetException("Scanner is closed on the server-side", e);
-          } else {
-            // older clients do not know about SRE. Just throw USE, which they will handle
-            throw new UnknownScannerException("Throwing UnknownScannerException to reset the client"
-                + " scanner state for clients older than 1.3.", e);
+          // if we have an exception on scanner next and we are using the callSeq
+          // we should rollback because the client will retry with the same callSeq
+          // and get an OutOfOrderScannerNextException if we don't do so.
+          if (rsh != null && request.hasNextCallSeq()) {
+            rsh.rollbackNextCallSeq();
           }
+          throw e;
         } finally {
           if (context != null) {
             context.setCallBack(rsh.shippedCallback);
@@ -2936,8 +2926,29 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
       if (!moreResults || closeScanner) {
         ttl = 0;
         moreResults = false;
-        if (closeScanner(region, scanner, scannerName, context)) {
-          return builder.build(); // bypass
+        if (region != null && region.getCoprocessorHost() != null) {
+          if (region.getCoprocessorHost().preScannerClose(scanner)) {
+            return builder.build(); // bypass
+          }
+        }
+        rsh = scanners.remove(scannerName);
+        if (rsh != null) {
+          if (context != null) {
+            context.setCallBack(rsh.closeCallBack);
+          } else {
+            rsh.s.close();
+          }
+          try {
+            regionServer.leases.cancelLease(scannerName);
+          } catch (LeaseException le) {
+            // No problem, ignore
+            if (LOG.isTraceEnabled()) {
+              LOG.trace("Un-able to cancel lease of scanner. It could already be closed.");
+            }
+          }
+          if (region != null && region.getCoprocessorHost() != null) {
+            region.getCoprocessorHost().postScannerClose(scanner);
+          }
         }
       }
 
@@ -2969,35 +2980,6 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
     }
   }
 
-  private boolean closeScanner(Region region, RegionScanner scanner, String scannerName,
-      RpcCallContext context) throws IOException {
-    if (region != null && region.getCoprocessorHost() != null) {
-      if (region.getCoprocessorHost().preScannerClose(scanner)) {
-        return true; // bypass
-      }
-    }
-    RegionScannerHolder rsh = scanners.remove(scannerName);
-    if (rsh != null) {
-      if (context != null) {
-        context.setCallBack(rsh.closeCallBack);
-      } else {
-        rsh.s.close();
-      }
-      try {
-        regionServer.leases.cancelLease(scannerName);
-      } catch (LeaseException le) {
-        // No problem, ignore
-        if (LOG.isTraceEnabled()) {
-          LOG.trace("Un-able to cancel lease of scanner. It could already be closed.");
-        }
-      }
-      if (region != null && region.getCoprocessorHost() != null) {
-        region.getCoprocessorHost().postScannerClose(scanner);
-      }
-    }
-    return false;
-  }
-
   @Override
   public CoprocessorServiceResponse execRegionServerService(RpcController controller,
       CoprocessorServiceRequest request) throws ServiceException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index 829661c..5a4da45 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -1413,8 +1413,12 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
   public Table createTable(TableName tableName, byte[][] families,
       int numVersions, byte[] startKey, byte[] endKey, int numRegions)
   throws IOException{
-    HTableDescriptor desc = createTableDescriptor(tableName, families, numVersions);
-
+    HTableDescriptor desc = new HTableDescriptor(tableName);
+    for (byte[] family : families) {
+      HColumnDescriptor hcd = new HColumnDescriptor(family)
+          .setMaxVersions(numVersions);
+      desc.addFamily(hcd);
+    }
     getAdmin().createTable(desc, startKey, endKey, numRegions);
     // HBaseAdmin only waits for regions to appear in hbase:meta we
     // should wait until they are assigned
@@ -1777,22 +1781,6 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
         MAXVERSIONS, HConstants.FOREVER, HColumnDescriptor.DEFAULT_KEEP_DELETED);
   }
 
-  public HTableDescriptor createTableDescriptor(final TableName tableName,
-      byte[] family) {
-    return createTableDescriptor(tableName, new byte[][] {family}, 1);
-  }
-
-  public HTableDescriptor createTableDescriptor(final TableName tableName,
-      byte[][] families, int maxVersions) {
-    HTableDescriptor desc = new HTableDescriptor(tableName);
-    for (byte[] family : families) {
-      HColumnDescriptor hcd = new HColumnDescriptor(family)
-          .setMaxVersions(maxVersions);
-      desc.addFamily(hcd);
-    }
-    return desc;
-  }
-
   /**
    * Create an HRegion that writes to the local tmp dirs
    * @param desc
@@ -2010,7 +1998,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
       put.setDurability(writeToWAL ? Durability.USE_DEFAULT : Durability.SKIP_WAL);
       for (int i = 0; i < f.length; i++) {
         byte[] value1 = value != null ? value : row;
-        put.addColumn(f[i], f[i], value1);
+        put.addColumn(f[i], null, value1);
       }
       puts.add(put);
     }
@@ -3552,7 +3540,6 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
     public PortAllocator(Random random) {
       this.random = random;
       this.portChecker = new AvailablePortChecker() {
-        @Override
         public boolean available(int port) {
           try {
             ServerSocket sock = new ServerSocket(port);

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index 33a5315..f465625 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -26,6 +26,7 @@ import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertSame;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -36,12 +37,10 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.NavigableMap;
-import java.util.NavigableSet;
 import java.util.UUID;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
-import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.atomic.AtomicReference;
 
 import org.apache.commons.lang.ArrayUtils;
@@ -64,11 +63,8 @@ import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.Waiter;
 import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
-import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
-import org.apache.hadoop.hbase.coprocessor.ObserverContext;
-import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
 import org.apache.hadoop.hbase.filter.CompareFilter;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
@@ -95,14 +91,10 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType;
 import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService;
 import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest;
-import org.apache.hadoop.hbase.regionserver.DelegatingKeyValueScanner;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
-import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
 import org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException;
 import org.apache.hadoop.hbase.regionserver.Region;
-import org.apache.hadoop.hbase.regionserver.ScanInfo;
 import org.apache.hadoop.hbase.regionserver.Store;
-import org.apache.hadoop.hbase.regionserver.StoreScanner;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -544,71 +536,6 @@ public class TestFromClientSide {
     assertEquals(rowCount - endKeyCount, countGreater);
   }
 
-  /**
-   * This is a coprocessor to inject a test failure so that a store scanner.reseek() call will
-   * fail with an IOException() on the first call.
-   */
-  public static class ExceptionInReseekRegionObserver extends BaseRegionObserver {
-    static AtomicLong reqCount = new AtomicLong(0);
-    class MyStoreScanner extends StoreScanner {
-      public MyStoreScanner(Store store, ScanInfo scanInfo, Scan scan, NavigableSet<byte[]> columns,
-          long readPt) throws IOException {
-        super(store, scanInfo, scan, columns, readPt);
-      }
-
-      @Override
-      protected List<KeyValueScanner> selectScannersFrom(
-          List<? extends KeyValueScanner> allScanners) {
-        List<KeyValueScanner> scanners = super.selectScannersFrom(allScanners);
-        List<KeyValueScanner> newScanners = new ArrayList<>(scanners.size());
-        for (KeyValueScanner scanner : scanners) {
-          newScanners.add(new DelegatingKeyValueScanner(scanner) {
-            @Override
-            public boolean reseek(Cell key) throws IOException {
-              if (reqCount.incrementAndGet() == 1) {
-                throw new IOException("Injected exception");
-              }
-              return super.reseek(key);
-            }
-          });
-        }
-        return newScanners;
-      }
-    }
-
-    @Override
-    public KeyValueScanner preStoreScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c,
-        Store store, Scan scan, NavigableSet<byte[]> targetCols, KeyValueScanner s,
-        final long readPt) throws IOException {
-      return new MyStoreScanner(store, store.getScanInfo(), scan, targetCols, readPt);
-    }
-  }
-
-  /**
-   * Tests the case where a Scan can throw an IOException in the middle of the seek / reseek
-   * leaving the server side RegionScanner to be in dirty state. The client has to ensure that the
-   * ClientScanner does not get an exception and also sees all the data.
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  @Test
-  public void testClientScannerIsResetWhenScanThrowsIOException()
-  throws IOException, InterruptedException {
-    TEST_UTIL.getConfiguration().setBoolean("hbase.client.log.scanner.activity", true);
-    TableName name = TableName.valueOf("testClientScannerIsResetWhenScanThrowsIOException");
-
-    HTableDescriptor htd = TEST_UTIL.createTableDescriptor(name, FAMILY);
-    htd.addCoprocessor(ExceptionInReseekRegionObserver.class.getName());
-    TEST_UTIL.getAdmin().createTable(htd);
-    try (Table t = TEST_UTIL.getConnection().getTable(name)) {
-      int rowCount = TEST_UTIL.loadTable(t, FAMILY, false);
-      TEST_UTIL.getAdmin().flush(name);
-      int actualRowCount = countRows(t, new Scan().addColumn(FAMILY, FAMILY));
-      assertEquals(rowCount, actualRowCount);
-    }
-    assertTrue(ExceptionInReseekRegionObserver.reqCount.get() > 0);
-  }
-
   /*
    * @param key
    * @return Scan with RowFilter that does LESS than passed key.

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java
index 8b9428f..0f0baff 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java
@@ -184,7 +184,7 @@ public class TestTableSnapshotScanner {
     }
 
     for (int j = 0; j < FAMILIES.length; j++) {
-      byte[] actual = result.getValue(FAMILIES[j], FAMILIES[j]);
+      byte[] actual = result.getValue(FAMILIES[j], null);
       Assert.assertArrayEquals("Row in snapshot does not match, expected:" + Bytes.toString(row)
           + " ,actual:" + Bytes.toString(actual), row, actual);
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java
index 66d290a..26e5897 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java
@@ -181,7 +181,7 @@ public abstract class TableSnapshotInputFormatTestBase {
     }
 
     for (int j = 0; j < FAMILIES.length; j++) {
-      byte[] actual = result.getValue(FAMILIES[j], FAMILIES[j]);
+      byte[] actual = result.getValue(FAMILIES[j], null);
       Assert.assertArrayEquals("Row in snapshot does not match, expected:" + Bytes.toString(row)
         + " ,actual:" + Bytes.toString(actual), row, actual);
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
index 694a359..5110ef7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
@@ -97,7 +97,6 @@ public class TestMultithreadedTableMapper {
      * @param context
      * @throws IOException
      */
-    @Override
     public void map(ImmutableBytesWritable key, Result value,
         Context context)
             throws IOException, InterruptedException {
@@ -111,7 +110,7 @@ public class TestMultithreadedTableMapper {
             Bytes.toString(INPUT_FAMILY) + "'.");
       }
       // Get the original value and reverse it
-      String originalValue = Bytes.toString(value.getValue(INPUT_FAMILY, INPUT_FAMILY));
+      String originalValue = Bytes.toString(value.getValue(INPUT_FAMILY, null));
       StringBuilder newValue = new StringBuilder(originalValue);
       newValue.reverse();
       // Now set the value to be collected

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
index 690e776..fa5b9a4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
@@ -57,7 +57,6 @@ import org.junit.experimental.categories.Category;
 public class TestTableMapReduce extends TestTableMapReduceBase {
   private static final Log LOG = LogFactory.getLog(TestTableMapReduce.class);
 
-  @Override
   protected Log getLog() { return LOG; }
 
   /**
@@ -73,7 +72,6 @@ public class TestTableMapReduce extends TestTableMapReduceBase {
      * @param context
      * @throws IOException
      */
-    @Override
     public void map(ImmutableBytesWritable key, Result value,
       Context context)
     throws IOException, InterruptedException {
@@ -88,7 +86,7 @@ public class TestTableMapReduce extends TestTableMapReduceBase {
       }
 
       // Get the original value and reverse it
-      String originalValue = Bytes.toString(value.getValue(INPUT_FAMILY, INPUT_FAMILY));
+      String originalValue = Bytes.toString(value.getValue(INPUT_FAMILY, null));
       StringBuilder newValue = new StringBuilder(originalValue);
       newValue.reverse();
       // Now set the value to be collected
@@ -98,7 +96,6 @@ public class TestTableMapReduce extends TestTableMapReduceBase {
     }
   }
 
-  @Override
   protected void runTestOnTable(Table table) throws IOException {
     Job job = null;
     try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
index 27bf063..e78bf4f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
@@ -126,7 +126,7 @@ public abstract class TestTableMapReduceBase {
 
     // Get the original value and reverse it
 
-    String originalValue = Bytes.toString(value.getValue(INPUT_FAMILY, INPUT_FAMILY));
+    String originalValue = Bytes.toString(value.getValue(INPUT_FAMILY, null));
     StringBuilder newValue = new StringBuilder(originalValue);
     newValue.reverse();
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DelegatingKeyValueScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DelegatingKeyValueScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DelegatingKeyValueScanner.java
deleted file mode 100644
index 51a2a97..0000000
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DelegatingKeyValueScanner.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.regionserver;
-
-import java.io.IOException;
-
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
-import org.apache.hadoop.hbase.regionserver.Store;
-
-public class DelegatingKeyValueScanner implements KeyValueScanner {
-  protected KeyValueScanner delegate;
-
-  public DelegatingKeyValueScanner(KeyValueScanner delegate) {
-    this.delegate = delegate;
-  }
-
-  @Override
-  public void shipped() throws IOException {
-    delegate.shipped();
-  }
-
-  @Override
-  public Cell peek() {
-    return delegate.peek();
-  }
-
-  @Override
-  public Cell next() throws IOException {
-    return delegate.next();
-  }
-
-  @Override
-  public boolean seek(Cell key) throws IOException {
-    return delegate.seek(key);
-  }
-
-  @Override
-  public boolean reseek(Cell key) throws IOException {
-    return delegate.reseek(key);
-  }
-
-  @Override
-  public long getScannerOrder() {
-    return delegate.getScannerOrder();
-  }
-
-  @Override
-  public void close() {
-    delegate.close();
-  }
-
-  @Override
-  public boolean shouldUseScanner(Scan scan, Store store, long oldestUnexpiredTS) {
-    return delegate.shouldUseScanner(scan, store, oldestUnexpiredTS);
-  }
-
-  @Override
-  public boolean requestSeek(Cell kv, boolean forward, boolean useBloom) throws IOException {
-    return delegate.requestSeek(kv, forward, useBloom);
-  }
-
-  @Override
-  public boolean realSeekDone() {
-    return delegate.realSeekDone();
-  }
-
-  @Override
-  public void enforceSeek() throws IOException {
-    delegate.enforceSeek();
-  }
-
-  @Override
-  public boolean isFileScanner() {
-    return delegate.isFileScanner();
-  }
-
-  @Override
-  public boolean backwardSeek(Cell key) throws IOException {
-    return delegate.backwardSeek(key);
-  }
-
-  @Override
-  public boolean seekToPreviousRow(Cell key) throws IOException {
-    return delegate.seekToPreviousRow(key);
-  }
-
-  @Override
-  public boolean seekToLastRow() throws IOException {
-    return delegate.seekToLastRow();
-  }
-
-  @Override
-  public Cell getNextIndexedKey() {
-    return delegate.getNextIndexedKey();
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/HThreadedSelectorServerArgs.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/HbaseHandlerMetricsProxy.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/HttpAuthenticationException.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescerMBean.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/ThriftMetrics.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/generated/AlreadyExists.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/generated/BatchMutation.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/generated/ColumnDescriptor.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/generated/IOError.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/generated/IllegalArgument.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/generated/Mutation.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/generated/TAppend.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/generated/TCell.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/generated/TColumn.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/generated/TIncrement.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/generated/TRegionInfo.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/generated/TRowResult.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift/generated/TScan.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TAppend.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TAuthorization.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TCellVisibility.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumn.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnIncrement.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnValue.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TCompareOp.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDelete.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDeleteType.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDurability.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/THRegionInfo.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/THRegionLocation.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIOError.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIllegalArgument.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIncrement.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TMutation.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPut.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TResult.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TRowMutations.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TServerName.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/generated/TTimeRange.java"
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/39db0cac/"hbase-thrift\036src/main/java/org/apache/hadoop/hbase/thrift2/package.html"
----------------------------------------------------------------------