You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by nd...@apache.org on 2015/05/13 20:37:58 UTC

hbase git commit: HBASE-13201 Remove HTablePool from thrift-server (Solomon Duskis)

Repository: hbase
Updated Branches:
  refs/heads/branch-1.0 6cfc04d5a -> 7dfd43990


HBASE-13201 Remove HTablePool from thrift-server (Solomon Duskis)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7dfd4399
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7dfd4399
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7dfd4399

Branch: refs/heads/branch-1.0
Commit: 7dfd4399053d06bad7d0d5dd3e538fdc5c4c5029
Parents: 6cfc04d
Author: Nick Dimiduk <nd...@apache.org>
Authored: Fri Mar 27 10:22:53 2015 -0700
Committer: Nick Dimiduk <nd...@apache.org>
Committed: Wed May 13 11:31:53 2015 -0700

----------------------------------------------------------------------
 .../thrift2/ThriftHBaseServiceHandler.java      | 69 ++++++++------------
 1 file changed, 27 insertions(+), 42 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/7dfd4399/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
----------------------------------------------------------------------
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
index b055918..2a90765 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
@@ -18,7 +18,18 @@
  */
 package org.apache.hadoop.hbase.thrift2;
 
-import static org.apache.hadoop.hbase.thrift2.ThriftUtilities.*;
+import static org.apache.hadoop.hbase.thrift2.ThriftUtilities.appendFromThrift;
+import static org.apache.hadoop.hbase.thrift2.ThriftUtilities.deleteFromThrift;
+import static org.apache.hadoop.hbase.thrift2.ThriftUtilities.deletesFromThrift;
+import static org.apache.hadoop.hbase.thrift2.ThriftUtilities.getFromThrift;
+import static org.apache.hadoop.hbase.thrift2.ThriftUtilities.getsFromThrift;
+import static org.apache.hadoop.hbase.thrift2.ThriftUtilities.incrementFromThrift;
+import static org.apache.hadoop.hbase.thrift2.ThriftUtilities.putFromThrift;
+import static org.apache.hadoop.hbase.thrift2.ThriftUtilities.putsFromThrift;
+import static org.apache.hadoop.hbase.thrift2.ThriftUtilities.resultFromHBase;
+import static org.apache.hadoop.hbase.thrift2.ThriftUtilities.resultsFromHBase;
+import static org.apache.hadoop.hbase.thrift2.ThriftUtilities.rowMutationsFromThrift;
+import static org.apache.hadoop.hbase.thrift2.ThriftUtilities.scanFromThrift;
 import static org.apache.thrift.TBaseHelper.byteBufferToByteArray;
 
 import java.io.IOException;
@@ -30,42 +41,40 @@ import java.nio.ByteBuffer;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
-import java.util.concurrent.Callable;
 import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.client.HTableFactory;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.HTablePool;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.security.UserProvider;
 import org.apache.hadoop.hbase.thrift.ThriftMetrics;
-import org.apache.hadoop.hbase.thrift2.generated.*;
+import org.apache.hadoop.hbase.thrift2.generated.TAppend;
+import org.apache.hadoop.hbase.thrift2.generated.TDelete;
+import org.apache.hadoop.hbase.thrift2.generated.TGet;
+import org.apache.hadoop.hbase.thrift2.generated.THBaseService;
+import org.apache.hadoop.hbase.thrift2.generated.TIOError;
+import org.apache.hadoop.hbase.thrift2.generated.TIllegalArgument;
+import org.apache.hadoop.hbase.thrift2.generated.TIncrement;
+import org.apache.hadoop.hbase.thrift2.generated.TPut;
+import org.apache.hadoop.hbase.thrift2.generated.TResult;
+import org.apache.hadoop.hbase.thrift2.generated.TRowMutations;
+import org.apache.hadoop.hbase.thrift2.generated.TScan;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ConnectionCache;
 import org.apache.thrift.TException;
 
-import com.google.common.cache.Cache;
-import com.google.common.cache.CacheBuilder;
-
 /**
  * This class is a glue object that connects Thrift RPC calls to the HBase client API primarily
  * defined in the HTableInterface.
  */
 @InterfaceAudience.Private
-@SuppressWarnings("deprecation")
 public class ThriftHBaseServiceHandler implements THBaseService.Iface {
 
   // TODO: Size of pool configuraple
-  private final Cache<String, HTablePool> htablePools;
-  private final Callable<? extends HTablePool> htablePoolCreater;
   private static final Log LOG = LogFactory.getLog(ThriftHBaseServiceHandler.class);
 
   // nextScannerId and scannerMap are used to manage scanner state
@@ -75,8 +84,6 @@ public class ThriftHBaseServiceHandler implements THBaseService.Iface {
       new ConcurrentHashMap<Integer, ResultScanner>();
 
   private final ConnectionCache connectionCache;
-  private final HTableFactory tableFactory;
-  private final int maxPoolSize;
 
   static final String CLEANUP_INTERVAL = "hbase.thrift.connection.cleanup-interval";
   static final String MAX_IDLETIME = "hbase.thrift.connection.max-idletime";
@@ -123,34 +130,13 @@ public class ThriftHBaseServiceHandler implements THBaseService.Iface {
     int maxIdleTime = conf.getInt(MAX_IDLETIME, 10 * 60 * 1000);
     connectionCache = new ConnectionCache(
       conf, userProvider, cleanInterval, maxIdleTime);
-    tableFactory = new HTableFactory() {
-      @Override
-      public HTableInterface createHTableInterface(Configuration config,
-          byte[] tableName) {
-        try {
-          return connectionCache.getTable(Bytes.toString(tableName));
-        } catch (IOException ioe) {
-          throw new RuntimeException(ioe);
-        }
-      }
-    };
-    htablePools = CacheBuilder.newBuilder().expireAfterAccess(
-      maxIdleTime, TimeUnit.MILLISECONDS).softValues().concurrencyLevel(4).build();
-    maxPoolSize = conf.getInt("hbase.thrift.htablepool.size.max", 1000);
-    htablePoolCreater = new Callable<HTablePool>() {
-      public HTablePool call() {
-        return new HTablePool(conf, maxPoolSize, tableFactory);
-      }
-    };
   }
 
   private Table getTable(ByteBuffer tableName) {
-    String currentUser = connectionCache.getEffectiveUser();
     try {
-      HTablePool htablePool = htablePools.get(currentUser, htablePoolCreater);
-      return htablePool.getTable(byteBufferToByteArray(tableName));
-    } catch (ExecutionException ee) {
-      throw new RuntimeException(ee);
+      return connectionCache.getTable(Bytes.toString(byteBufferToByteArray(tableName)));
+    } catch (IOException e) {
+      throw new RuntimeException(e);
     }
   }
 
@@ -424,5 +410,4 @@ public class ThriftHBaseServiceHandler implements THBaseService.Iface {
       closeTable(htable);
     }
   }
-
 }