You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by jx...@apache.org on 2013/08/29 18:28:08 UTC

svn commit: r1518693 - in /hbase/trunk: hbase-client/src/main/java/org/apache/hadoop/hbase/client/ hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ hbase-client/src/test/java/org/apache/hadoop/hbase/client/ hbase-protocol/src/main/java/org/apach...

Author: jxiang
Date: Thu Aug 29 16:28:07 2013
New Revision: 1518693

URL: http://svn.apache.org/r1518693
Log:
HBASE-9321 Contention getting the current user in RpcClient.writeRequest

Removed:
    hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionWrapper.java
Modified:
    hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
    hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java
    hbase/trunk/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
    hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java
    hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
    hbase/trunk/hbase-protocol/src/main/protobuf/RPC.proto
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServletContainer.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestRowResource.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java

Modified: hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java?rev=1518693&r1=1518692&r2=1518693&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java (original)
+++ hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java Thu Aug 29 16:28:07 2013
@@ -317,7 +317,7 @@ public class HConnectionManager {
    */
   public static HConnection createConnection(Configuration conf)
   throws IOException {
-    return createConnection(conf, false, null);
+    return createConnection(conf, false, null, User.getCurrent());
   }
 
   /**
@@ -342,17 +342,69 @@ public class HConnectionManager {
    */
   public static HConnection createConnection(Configuration conf, ExecutorService pool)
   throws IOException {
-    return createConnection(conf, false, pool);
+    return createConnection(conf, false, pool, User.getCurrent());
+  }
+
+  /**
+   * Create a new HConnection instance using the passed <code>conf</code> instance.
+   * <p>Note: This bypasses the usual HConnection life cycle management done by
+   * {@link #getConnection(Configuration)}. The caller is responsible for
+   * calling {@link HConnection#close()} on the returned connection instance.
+   * This is the recommended way to create HConnections.
+   * {@code
+   * ExecutorService pool = ...;
+   * HConnection connection = HConnectionManager.createConnection(conf, pool);
+   * HTableInterface table = connection.getTable("mytable");
+   * table.get(...);
+   * ...
+   * table.close();
+   * connection.close();
+   * }
+   * @param conf configuration
+   * @param user the user the connection is for
+   * @return HConnection object for <code>conf</code>
+   * @throws ZooKeeperConnectionException
+   */
+  public static HConnection createConnection(Configuration conf, User user)
+  throws IOException {
+    return createConnection(conf, false, null, user);
+  }
+
+  /**
+   * Create a new HConnection instance using the passed <code>conf</code> instance.
+   * <p>Note: This bypasses the usual HConnection life cycle management done by
+   * {@link #getConnection(Configuration)}. The caller is responsible for
+   * calling {@link HConnection#close()} on the returned connection instance.
+   * This is the recommended way to create HConnections.
+   * {@code
+   * ExecutorService pool = ...;
+   * HConnection connection = HConnectionManager.createConnection(conf, pool);
+   * HTableInterface table = connection.getTable("mytable");
+   * table.get(...);
+   * ...
+   * table.close();
+   * connection.close();
+   * }
+   * @param conf configuration
+   * @param pool the thread pool to use for batch operation in HTables used via this HConnection
+   * @param user the user the connection is for
+   * @return HConnection object for <code>conf</code>
+   * @throws ZooKeeperConnectionException
+   */
+  public static HConnection createConnection(Configuration conf, ExecutorService pool, User user)
+  throws IOException {
+    return createConnection(conf, false, pool, user);
   }
 
   @Deprecated
   static HConnection createConnection(final Configuration conf, final boolean managed)
       throws IOException {
-    return createConnection(conf, managed, null);
+    return createConnection(conf, managed, null, User.getCurrent());
   }
 
   @Deprecated
-  static HConnection createConnection(final Configuration conf, final boolean managed, final ExecutorService pool)
+  static HConnection createConnection(final Configuration conf, final boolean managed,
+      final ExecutorService pool, final User user)
   throws IOException {
     String className = conf.get("hbase.client.connection.impl",
       HConnectionManager.HConnectionImplementation.class.getName());
@@ -365,9 +417,10 @@ public class HConnectionManager {
     try {
       // Default HCM#HCI is not accessible; make it so before invoking.
       Constructor<?> constructor =
-        clazz.getDeclaredConstructor(Configuration.class, boolean.class, ExecutorService.class);
+        clazz.getDeclaredConstructor(Configuration.class,
+          boolean.class, ExecutorService.class, User.class);
       constructor.setAccessible(true);
-      return (HConnection) constructor.newInstance(conf, managed, pool);
+      return (HConnection) constructor.newInstance(conf, managed, pool, user);
     } catch (Exception e) {
       throw new IOException(e);
     }
@@ -583,13 +636,15 @@ public class HConnectionManager {
     // indicates whether this connection's life cycle is managed (by us)
     private boolean managed;
 
+    private User user;
+
     /**
      * Cluster registry of basic info such as clusterid and meta region location.
      */
      Registry registry;
 
      HConnectionImplementation(Configuration conf, boolean managed) throws IOException {
-       this(conf, managed, null);
+       this(conf, managed, null, null);
      }
      
     /**
@@ -603,8 +658,10 @@ public class HConnectionManager {
      * are shared, we have reference counting going on and will only do full cleanup when no more
      * users of an HConnectionImplementation instance.
      */
-    HConnectionImplementation(Configuration conf, boolean managed, ExecutorService pool) throws IOException {
+    HConnectionImplementation(Configuration conf, boolean managed,
+        ExecutorService pool, User user) throws IOException {
       this(conf);
+      this.user = user;
       this.batchPool = pool;
       this.managed = managed;
       this.registry = setupRegistry();
@@ -1574,7 +1631,7 @@ public class HConnectionManager {
             stub = stubs.get(key);
             if (stub == null) {
               BlockingRpcChannel channel = rpcClient.createBlockingRpcChannel(sn,
-                  User.getCurrent(), rpcTimeout);
+                user, rpcTimeout);
               stub = makeStub(channel);
               isMasterRunning();
               stubs.put(key, stub);
@@ -1723,7 +1780,7 @@ public class HConnectionManager {
         stub = (AdminService.BlockingInterface)this.stubs.get(key);
         if (stub == null) {
           BlockingRpcChannel channel = this.rpcClient.createBlockingRpcChannel(serverName,
-            User.getCurrent(), this.rpcTimeout);
+            user, this.rpcTimeout);
           stub = AdminService.newBlockingStub(channel);
           this.stubs.put(key, stub);
         }
@@ -1744,7 +1801,7 @@ public class HConnectionManager {
         stub = (ClientService.BlockingInterface)this.stubs.get(key);
         if (stub == null) {
           BlockingRpcChannel channel = this.rpcClient.createBlockingRpcChannel(sn,
-            User.getCurrent(), this.rpcTimeout);
+            user, this.rpcTimeout);
           stub = ClientService.newBlockingStub(channel);
           // In old days, after getting stub/proxy, we'd make a call.  We are not doing that here.
           // Just fail on first actual call rather than in here on setup.

Modified: hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java?rev=1518693&r1=1518692&r2=1518693&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java (original)
+++ hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java Thu Aug 29 16:28:07 2013
@@ -1011,24 +1011,6 @@ public class RpcClient {
           builder.setTraceInfo(RPCTInfo.newBuilder().
             setParentId(s.getSpanId()).setTraceId(s.getTraceId()));
         }
-        UserGroupInformation ticket = remoteId.getTicket().getUGI();
-        if (ticket != null) {
-          UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
-          String effectiveUser = currentUser == null ? null : currentUser.getShortUserName();
-          if (effectiveUser != null && !effectiveUser.equals(ticket.getShortUserName())) {
-            if (ticket.getRealUser() != null) {
-              LOG.warn("Current user " + effectiveUser
-                + " is different from the ticket user " + ticket.getShortUserName()
-                + ". But the ticket is already a proxy user with real user "
-                + ticket.getRealUser().getShortUserName());
-            } else {
-              // If the ticket is not a proxy user, and the current user
-              // is not the same as the ticket user, pass the current user
-              // over to the server as the actual effective user.
-              builder.setEffectiveUser(effectiveUser);
-            }
-          }
-        }
         builder.setMethodName(call.md.getName());
         builder.setRequestParam(call.param != null);
         ByteBuffer cellBlock = ipcUtil.buildCellBlock(this.codec, this.compressor, call.cells);

Modified: hbase/trunk/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java?rev=1518693&r1=1518692&r2=1518693&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java (original)
+++ hbase/trunk/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java Thu Aug 29 16:28:07 2013
@@ -17,7 +17,8 @@
  */
 package org.apache.hadoop.hbase.client;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.io.IOException;
 import java.net.SocketTimeoutException;
@@ -26,21 +27,22 @@ import java.util.concurrent.ExecutorServ
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.SmallTests;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService.BlockingInterface;
 import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
+import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.Before;
-import org.junit.Test;
 import org.junit.Ignore;
+import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.mockito.Mockito;
 
@@ -249,7 +251,7 @@ public class TestClientNoCluster {
     final ClientService.BlockingInterface stub;
 
     RegionServerStoppedOnScannerOpenConnection(Configuration conf, boolean managed,
-        ExecutorService pool) throws IOException {
+        ExecutorService pool, User user) throws IOException {
       super(conf, managed);
       // Mock up my stub so open scanner returns a scanner id and then on next, we throw
       // exceptions for three times and then after that, we return no more to scan.
@@ -280,7 +282,7 @@ public class TestClientNoCluster {
   extends HConnectionManager.HConnectionImplementation {
     final ClientService.BlockingInterface stub;
 
-    RpcTimeoutConnection(Configuration conf, boolean managed, ExecutorService pool)
+    RpcTimeoutConnection(Configuration conf, boolean managed, ExecutorService pool, User user)
     throws IOException {
       super(conf, managed);
       // Mock up my stub so an exists call -- which turns into a get -- throws an exception

Modified: hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java?rev=1518693&r1=1518692&r2=1518693&view=diff
==============================================================================
--- hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java (original)
+++ hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java Thu Aug 29 16:28:07 2013
@@ -3662,33 +3662,6 @@ public final class RPCProtos {
      * </pre>
      */
     org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder();
-
-    // optional string effective_user = 6;
-    /**
-     * <code>optional string effective_user = 6;</code>
-     *
-     * <pre>
-     * If present, the request is made on behalf of this user
-     * </pre>
-     */
-    boolean hasEffectiveUser();
-    /**
-     * <code>optional string effective_user = 6;</code>
-     *
-     * <pre>
-     * If present, the request is made on behalf of this user
-     * </pre>
-     */
-    java.lang.String getEffectiveUser();
-    /**
-     * <code>optional string effective_user = 6;</code>
-     *
-     * <pre>
-     * If present, the request is made on behalf of this user
-     * </pre>
-     */
-    com.google.protobuf.ByteString
-        getEffectiveUserBytes();
   }
   /**
    * Protobuf type {@code RequestHeader}
@@ -3786,11 +3759,6 @@ public final class RPCProtos {
               bitField0_ |= 0x00000010;
               break;
             }
-            case 50: {
-              bitField0_ |= 0x00000020;
-              effectiveUser_ = input.readBytes();
-              break;
-            }
           }
         }
       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
@@ -3978,68 +3946,12 @@ public final class RPCProtos {
       return cellBlockMeta_;
     }
 
-    // optional string effective_user = 6;
-    public static final int EFFECTIVE_USER_FIELD_NUMBER = 6;
-    private java.lang.Object effectiveUser_;
-    /**
-     * <code>optional string effective_user = 6;</code>
-     *
-     * <pre>
-     * If present, the request is made on behalf of this user
-     * </pre>
-     */
-    public boolean hasEffectiveUser() {
-      return ((bitField0_ & 0x00000020) == 0x00000020);
-    }
-    /**
-     * <code>optional string effective_user = 6;</code>
-     *
-     * <pre>
-     * If present, the request is made on behalf of this user
-     * </pre>
-     */
-    public java.lang.String getEffectiveUser() {
-      java.lang.Object ref = effectiveUser_;
-      if (ref instanceof java.lang.String) {
-        return (java.lang.String) ref;
-      } else {
-        com.google.protobuf.ByteString bs = 
-            (com.google.protobuf.ByteString) ref;
-        java.lang.String s = bs.toStringUtf8();
-        if (bs.isValidUtf8()) {
-          effectiveUser_ = s;
-        }
-        return s;
-      }
-    }
-    /**
-     * <code>optional string effective_user = 6;</code>
-     *
-     * <pre>
-     * If present, the request is made on behalf of this user
-     * </pre>
-     */
-    public com.google.protobuf.ByteString
-        getEffectiveUserBytes() {
-      java.lang.Object ref = effectiveUser_;
-      if (ref instanceof java.lang.String) {
-        com.google.protobuf.ByteString b = 
-            com.google.protobuf.ByteString.copyFromUtf8(
-                (java.lang.String) ref);
-        effectiveUser_ = b;
-        return b;
-      } else {
-        return (com.google.protobuf.ByteString) ref;
-      }
-    }
-
     private void initFields() {
       callId_ = 0;
       traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance();
       methodName_ = "";
       requestParam_ = false;
       cellBlockMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance();
-      effectiveUser_ = "";
     }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
@@ -4068,9 +3980,6 @@ public final class RPCProtos {
       if (((bitField0_ & 0x00000010) == 0x00000010)) {
         output.writeMessage(5, cellBlockMeta_);
       }
-      if (((bitField0_ & 0x00000020) == 0x00000020)) {
-        output.writeBytes(6, getEffectiveUserBytes());
-      }
       getUnknownFields().writeTo(output);
     }
 
@@ -4100,10 +4009,6 @@ public final class RPCProtos {
         size += com.google.protobuf.CodedOutputStream
           .computeMessageSize(5, cellBlockMeta_);
       }
-      if (((bitField0_ & 0x00000020) == 0x00000020)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(6, getEffectiveUserBytes());
-      }
       size += getUnknownFields().getSerializedSize();
       memoizedSerializedSize = size;
       return size;
@@ -4152,11 +4057,6 @@ public final class RPCProtos {
         result = result && getCellBlockMeta()
             .equals(other.getCellBlockMeta());
       }
-      result = result && (hasEffectiveUser() == other.hasEffectiveUser());
-      if (hasEffectiveUser()) {
-        result = result && getEffectiveUser()
-            .equals(other.getEffectiveUser());
-      }
       result = result &&
           getUnknownFields().equals(other.getUnknownFields());
       return result;
@@ -4190,10 +4090,6 @@ public final class RPCProtos {
         hash = (37 * hash) + CELL_BLOCK_META_FIELD_NUMBER;
         hash = (53 * hash) + getCellBlockMeta().hashCode();
       }
-      if (hasEffectiveUser()) {
-        hash = (37 * hash) + EFFECTIVE_USER_FIELD_NUMBER;
-        hash = (53 * hash) + getEffectiveUser().hashCode();
-      }
       hash = (29 * hash) + getUnknownFields().hashCode();
       memoizedHashCode = hash;
       return hash;
@@ -4327,8 +4223,6 @@ public final class RPCProtos {
           cellBlockMetaBuilder_.clear();
         }
         bitField0_ = (bitField0_ & ~0x00000010);
-        effectiveUser_ = "";
-        bitField0_ = (bitField0_ & ~0x00000020);
         return this;
       }
 
@@ -4385,10 +4279,6 @@ public final class RPCProtos {
         } else {
           result.cellBlockMeta_ = cellBlockMetaBuilder_.build();
         }
-        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
-          to_bitField0_ |= 0x00000020;
-        }
-        result.effectiveUser_ = effectiveUser_;
         result.bitField0_ = to_bitField0_;
         onBuilt();
         return result;
@@ -4422,11 +4312,6 @@ public final class RPCProtos {
         if (other.hasCellBlockMeta()) {
           mergeCellBlockMeta(other.getCellBlockMeta());
         }
-        if (other.hasEffectiveUser()) {
-          bitField0_ |= 0x00000020;
-          effectiveUser_ = other.effectiveUser_;
-          onChanged();
-        }
         this.mergeUnknownFields(other.getUnknownFields());
         return this;
       }
@@ -4896,104 +4781,6 @@ public final class RPCProtos {
         return cellBlockMetaBuilder_;
       }
 
-      // optional string effective_user = 6;
-      private java.lang.Object effectiveUser_ = "";
-      /**
-       * <code>optional string effective_user = 6;</code>
-       *
-       * <pre>
-       * If present, the request is made on behalf of this user
-       * </pre>
-       */
-      public boolean hasEffectiveUser() {
-        return ((bitField0_ & 0x00000020) == 0x00000020);
-      }
-      /**
-       * <code>optional string effective_user = 6;</code>
-       *
-       * <pre>
-       * If present, the request is made on behalf of this user
-       * </pre>
-       */
-      public java.lang.String getEffectiveUser() {
-        java.lang.Object ref = effectiveUser_;
-        if (!(ref instanceof java.lang.String)) {
-          java.lang.String s = ((com.google.protobuf.ByteString) ref)
-              .toStringUtf8();
-          effectiveUser_ = s;
-          return s;
-        } else {
-          return (java.lang.String) ref;
-        }
-      }
-      /**
-       * <code>optional string effective_user = 6;</code>
-       *
-       * <pre>
-       * If present, the request is made on behalf of this user
-       * </pre>
-       */
-      public com.google.protobuf.ByteString
-          getEffectiveUserBytes() {
-        java.lang.Object ref = effectiveUser_;
-        if (ref instanceof String) {
-          com.google.protobuf.ByteString b = 
-              com.google.protobuf.ByteString.copyFromUtf8(
-                  (java.lang.String) ref);
-          effectiveUser_ = b;
-          return b;
-        } else {
-          return (com.google.protobuf.ByteString) ref;
-        }
-      }
-      /**
-       * <code>optional string effective_user = 6;</code>
-       *
-       * <pre>
-       * If present, the request is made on behalf of this user
-       * </pre>
-       */
-      public Builder setEffectiveUser(
-          java.lang.String value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000020;
-        effectiveUser_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>optional string effective_user = 6;</code>
-       *
-       * <pre>
-       * If present, the request is made on behalf of this user
-       * </pre>
-       */
-      public Builder clearEffectiveUser() {
-        bitField0_ = (bitField0_ & ~0x00000020);
-        effectiveUser_ = getDefaultInstance().getEffectiveUser();
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>optional string effective_user = 6;</code>
-       *
-       * <pre>
-       * If present, the request is made on behalf of this user
-       * </pre>
-       */
-      public Builder setEffectiveUserBytes(
-          com.google.protobuf.ByteString value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000020;
-        effectiveUser_ = value;
-        onChanged();
-        return this;
-      }
-
       // @@protoc_insertion_point(builder_scope:RequestHeader)
     }
 
@@ -6010,16 +5797,15 @@ public final class RPCProtos {
       "\001(\r\"|\n\021ExceptionResponse\022\034\n\024exception_cl" +
       "ass_name\030\001 \001(\t\022\023\n\013stack_trace\030\002 \001(\t\022\020\n\010h" +
       "ostname\030\003 \001(\t\022\014\n\004port\030\004 \001(\005\022\024\n\014do_not_re",
-      "try\030\005 \001(\010\"\254\001\n\rRequestHeader\022\017\n\007call_id\030\001" +
+      "try\030\005 \001(\010\"\224\001\n\rRequestHeader\022\017\n\007call_id\030\001" +
       " \001(\r\022\035\n\ntrace_info\030\002 \001(\0132\t.RPCTInfo\022\023\n\013m" +
       "ethod_name\030\003 \001(\t\022\025\n\rrequest_param\030\004 \001(\010\022" +
       "\'\n\017cell_block_meta\030\005 \001(\0132\016.CellBlockMeta" +
-      "\022\026\n\016effective_user\030\006 \001(\t\"q\n\016ResponseHead" +
-      "er\022\017\n\007call_id\030\001 \001(\r\022%\n\texception\030\002 \001(\0132\022" +
-      ".ExceptionResponse\022\'\n\017cell_block_meta\030\003 " +
-      "\001(\0132\016.CellBlockMetaB<\n*org.apache.hadoop" +
-      ".hbase.protobuf.generatedB\tRPCProtosH\001\240\001" +
-      "\001"
+      "\"q\n\016ResponseHeader\022\017\n\007call_id\030\001 \001(\r\022%\n\te" +
+      "xception\030\002 \001(\0132\022.ExceptionResponse\022\'\n\017ce" +
+      "ll_block_meta\030\003 \001(\0132\016.CellBlockMetaB<\n*o" +
+      "rg.apache.hadoop.hbase.protobuf.generate" +
+      "dB\tRPCProtosH\001\240\001\001"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -6055,7 +5841,7 @@ public final class RPCProtos {
           internal_static_RequestHeader_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_RequestHeader_descriptor,
-              new java.lang.String[] { "CallId", "TraceInfo", "MethodName", "RequestParam", "CellBlockMeta", "EffectiveUser", });
+              new java.lang.String[] { "CallId", "TraceInfo", "MethodName", "RequestParam", "CellBlockMeta", });
           internal_static_ResponseHeader_descriptor =
             getDescriptor().getMessageTypes().get(5);
           internal_static_ResponseHeader_fieldAccessorTable = new

Modified: hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java?rev=1518693&r1=1518692&r2=1518693&view=diff
==============================================================================
--- hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java (original)
+++ hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java Thu Aug 29 16:28:07 2013
@@ -646,7 +646,7 @@ public final class WALProtos {
      *
      * <pre>
      *
-     *cluster_ids field contains the list of clusters that have
+     *This field contains the list of clusters that have
      *consumed the change
      * </pre>
      */
@@ -657,7 +657,7 @@ public final class WALProtos {
      *
      * <pre>
      *
-     *cluster_ids field contains the list of clusters that have
+     *This field contains the list of clusters that have
      *consumed the change
      * </pre>
      */
@@ -667,7 +667,7 @@ public final class WALProtos {
      *
      * <pre>
      *
-     *cluster_ids field contains the list of clusters that have
+     *This field contains the list of clusters that have
      *consumed the change
      * </pre>
      */
@@ -677,7 +677,7 @@ public final class WALProtos {
      *
      * <pre>
      *
-     *cluster_ids field contains the list of clusters that have
+     *This field contains the list of clusters that have
      *consumed the change
      * </pre>
      */
@@ -688,7 +688,7 @@ public final class WALProtos {
      *
      * <pre>
      *
-     *cluster_ids field contains the list of clusters that have
+     *This field contains the list of clusters that have
      *consumed the change
      * </pre>
      */
@@ -1020,7 +1020,7 @@ public final class WALProtos {
      *
      * <pre>
      *
-     *cluster_ids field contains the list of clusters that have
+     *This field contains the list of clusters that have
      *consumed the change
      * </pre>
      */
@@ -1032,7 +1032,7 @@ public final class WALProtos {
      *
      * <pre>
      *
-     *cluster_ids field contains the list of clusters that have
+     *This field contains the list of clusters that have
      *consumed the change
      * </pre>
      */
@@ -1045,7 +1045,7 @@ public final class WALProtos {
      *
      * <pre>
      *
-     *cluster_ids field contains the list of clusters that have
+     *This field contains the list of clusters that have
      *consumed the change
      * </pre>
      */
@@ -1057,7 +1057,7 @@ public final class WALProtos {
      *
      * <pre>
      *
-     *cluster_ids field contains the list of clusters that have
+     *This field contains the list of clusters that have
      *consumed the change
      * </pre>
      */
@@ -1069,7 +1069,7 @@ public final class WALProtos {
      *
      * <pre>
      *
-     *cluster_ids field contains the list of clusters that have
+     *This field contains the list of clusters that have
      *consumed the change
      * </pre>
      */
@@ -2284,7 +2284,7 @@ public final class WALProtos {
        *
        * <pre>
        *
-       *cluster_ids field contains the list of clusters that have
+       *This field contains the list of clusters that have
        *consumed the change
        * </pre>
        */
@@ -2300,7 +2300,7 @@ public final class WALProtos {
        *
        * <pre>
        *
-       *cluster_ids field contains the list of clusters that have
+       *This field contains the list of clusters that have
        *consumed the change
        * </pre>
        */
@@ -2316,7 +2316,7 @@ public final class WALProtos {
        *
        * <pre>
        *
-       *cluster_ids field contains the list of clusters that have
+       *This field contains the list of clusters that have
        *consumed the change
        * </pre>
        */
@@ -2332,7 +2332,7 @@ public final class WALProtos {
        *
        * <pre>
        *
-       *cluster_ids field contains the list of clusters that have
+       *This field contains the list of clusters that have
        *consumed the change
        * </pre>
        */
@@ -2355,7 +2355,7 @@ public final class WALProtos {
        *
        * <pre>
        *
-       *cluster_ids field contains the list of clusters that have
+       *This field contains the list of clusters that have
        *consumed the change
        * </pre>
        */
@@ -2375,7 +2375,7 @@ public final class WALProtos {
        *
        * <pre>
        *
-       *cluster_ids field contains the list of clusters that have
+       *This field contains the list of clusters that have
        *consumed the change
        * </pre>
        */
@@ -2397,7 +2397,7 @@ public final class WALProtos {
        *
        * <pre>
        *
-       *cluster_ids field contains the list of clusters that have
+       *This field contains the list of clusters that have
        *consumed the change
        * </pre>
        */
@@ -2420,7 +2420,7 @@ public final class WALProtos {
        *
        * <pre>
        *
-       *cluster_ids field contains the list of clusters that have
+       *This field contains the list of clusters that have
        *consumed the change
        * </pre>
        */
@@ -2440,7 +2440,7 @@ public final class WALProtos {
        *
        * <pre>
        *
-       *cluster_ids field contains the list of clusters that have
+       *This field contains the list of clusters that have
        *consumed the change
        * </pre>
        */
@@ -2460,7 +2460,7 @@ public final class WALProtos {
        *
        * <pre>
        *
-       *cluster_ids field contains the list of clusters that have
+       *This field contains the list of clusters that have
        *consumed the change
        * </pre>
        */
@@ -2480,7 +2480,7 @@ public final class WALProtos {
        *
        * <pre>
        *
-       *cluster_ids field contains the list of clusters that have
+       *This field contains the list of clusters that have
        *consumed the change
        * </pre>
        */
@@ -2499,7 +2499,7 @@ public final class WALProtos {
        *
        * <pre>
        *
-       *cluster_ids field contains the list of clusters that have
+       *This field contains the list of clusters that have
        *consumed the change
        * </pre>
        */
@@ -2518,7 +2518,7 @@ public final class WALProtos {
        *
        * <pre>
        *
-       *cluster_ids field contains the list of clusters that have
+       *This field contains the list of clusters that have
        *consumed the change
        * </pre>
        */
@@ -2531,7 +2531,7 @@ public final class WALProtos {
        *
        * <pre>
        *
-       *cluster_ids field contains the list of clusters that have
+       *This field contains the list of clusters that have
        *consumed the change
        * </pre>
        */
@@ -2547,7 +2547,7 @@ public final class WALProtos {
        *
        * <pre>
        *
-       *cluster_ids field contains the list of clusters that have
+       *This field contains the list of clusters that have
        *consumed the change
        * </pre>
        */
@@ -2564,7 +2564,7 @@ public final class WALProtos {
        *
        * <pre>
        *
-       *cluster_ids field contains the list of clusters that have
+       *This field contains the list of clusters that have
        *consumed the change
        * </pre>
        */
@@ -2577,7 +2577,7 @@ public final class WALProtos {
        *
        * <pre>
        *
-       *cluster_ids field contains the list of clusters that have
+       *This field contains the list of clusters that have
        *consumed the change
        * </pre>
        */
@@ -2591,7 +2591,7 @@ public final class WALProtos {
        *
        * <pre>
        *
-       *cluster_ids field contains the list of clusters that have
+       *This field contains the list of clusters that have
        *consumed the change
        * </pre>
        */

Modified: hbase/trunk/hbase-protocol/src/main/protobuf/RPC.proto
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-protocol/src/main/protobuf/RPC.proto?rev=1518693&r1=1518692&r2=1518693&view=diff
==============================================================================
--- hbase/trunk/hbase-protocol/src/main/protobuf/RPC.proto (original)
+++ hbase/trunk/hbase-protocol/src/main/protobuf/RPC.proto Thu Aug 29 16:28:07 2013
@@ -119,8 +119,6 @@ message RequestHeader {
   optional bool request_param = 4;
   // If present, then an encoded data block follows.
   optional CellBlockMeta cell_block_meta = 5;
-  // If present, the request is made on behalf of this user
-  optional string effective_user = 6;
   // TODO: Have client specify priority
 }
 

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java?rev=1518693&r1=1518692&r2=1518693&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java Thu Aug 29 16:28:07 2013
@@ -259,11 +259,10 @@ public class RpcServer implements RpcSer
     protected long size;                          // size of current call
     protected boolean isError;
     protected TraceInfo tinfo;
-    protected String effectiveUser;
 
     Call(int id, final BlockingService service, final MethodDescriptor md, RequestHeader header,
          Message param, CellScanner cellScanner, Connection connection, Responder responder,
-         long size, TraceInfo tinfo, String effectiveUser) {
+         long size, TraceInfo tinfo) {
       this.id = id;
       this.service = service;
       this.md = md;
@@ -278,7 +277,6 @@ public class RpcServer implements RpcSer
       this.isError = false;
       this.size = size;
       this.tinfo = tinfo;
-      this.effectiveUser = effectiveUser;
     }
 
     @Override
@@ -1127,13 +1125,13 @@ public class RpcServer implements RpcSer
     private static final int AUTHROIZATION_FAILED_CALLID = -1;
     private final Call authFailedCall =
       new Call(AUTHROIZATION_FAILED_CALLID, this.service, null,
-        null, null, null, this, null, 0, null, null);
+        null, null, null, this, null, 0, null);
     private ByteArrayOutputStream authFailedResponse =
         new ByteArrayOutputStream();
     // Fake 'call' for SASL context setup
     private static final int SASL_CALLID = -33;
     private final Call saslCall =
-      new Call(SASL_CALLID, this.service, null, null, null, null, this, null, 0, null, null);
+      new Call(SASL_CALLID, this.service, null, null, null, null, this, null, 0, null);
 
     public UserGroupInformation attemptingUser = null; // user name before auth
 
@@ -1488,7 +1486,7 @@ public class RpcServer implements RpcSer
 
     private int doBadPreambleHandling(final String msg, final Exception e) throws IOException {
       LOG.warn(msg);
-      Call fakeCall = new Call(-1, null, null, null, null, null, this, responder, -1, null, null);
+      Call fakeCall = new Call(-1, null, null, null, null, null, this, responder, -1, null);
       setupResponse(null, fakeCall, e, msg);
       responder.doRespond(fakeCall);
       // Returning -1 closes out the connection.
@@ -1640,7 +1638,7 @@ public class RpcServer implements RpcSer
       if ((totalRequestSize + callQueueSize.get()) > maxQueueSize) {
         final Call callTooBig =
           new Call(id, this.service, null, null, null, null, this,
-            responder, totalRequestSize, null, null);
+            responder, totalRequestSize, null);
         ByteArrayOutputStream responseBuffer = new ByteArrayOutputStream();
         setupResponse(responseBuffer, callTooBig, new CallQueueTooBigException(),
           "Call queue is full, is ipc.server.max.callqueue.size too small?");
@@ -1650,7 +1648,6 @@ public class RpcServer implements RpcSer
       MethodDescriptor md = null;
       Message param = null;
       CellScanner cellScanner = null;
-      String effectiveUser = null;
       try {
         if (header.hasRequestParam() && header.getRequestParam()) {
           md = this.service.getDescriptorForType().findMethodByName(header.getMethodName());
@@ -1669,15 +1666,12 @@ public class RpcServer implements RpcSer
           cellScanner = ipcUtil.createCellScanner(this.codec, this.compressionCodec,
             buf, offset, buf.length);
         }
-        if (header.hasEffectiveUser()) {
-          effectiveUser = header.getEffectiveUser();
-        }
       } catch (Throwable t) {
         String msg = "Unable to read call parameter from client " + getHostAddress();
         LOG.warn(msg, t);
         final Call readParamsFailedCall =
           new Call(id, this.service, null, null, null, null, this,
-            responder, totalRequestSize, null, null);
+            responder, totalRequestSize, null);
         ByteArrayOutputStream responseBuffer = new ByteArrayOutputStream();
         setupResponse(responseBuffer, readParamsFailedCall, t,
           msg + "; " + t.getMessage());
@@ -1690,8 +1684,7 @@ public class RpcServer implements RpcSer
           : null;
       Call call = new Call(id, this.service, md, header, param, cellScanner, this, responder,
               totalRequestSize,
-              traceInfo,
-              effectiveUser);
+              traceInfo);
       callQueueSize.add(totalRequestSize);
       scheduler.dispatch(new CallRunner(call));
     }
@@ -1801,21 +1794,8 @@ public class RpcServer implements RpcSer
           if (call.tinfo != null) {
             traceScope = Trace.startSpan(call.toTraceString(), call.tinfo);
           }
-          User user;
-          if (call.effectiveUser == null) {
-            user = User.create(call.connection.user);
-          } else {
-            UserGroupInformation ugi = UserGroupInformation.createProxyUser(
-              call.effectiveUser, call.connection.user);
-            ProxyUsers.authorize(ugi, call.connection.getHostAddress(), conf);
-            if (LOG.isDebugEnabled()) {
-              LOG.debug("Authorized " + call.connection.user
-                + " to impersonate " + call.effectiveUser);
-            }
-            user = User.create(ugi);
-          }
-          RequestContext.set(user, getRemoteIp(), call.connection.service);
-
+          RequestContext.set(User.create(call.connection.user), getRemoteIp(),
+            call.connection.service);
           // make the call
           resultPair = call(call.service, call.md, call.param, call.cellScanner, call.timestamp,
               status);

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java?rev=1518693&r1=1518692&r2=1518693&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java Thu Aug 29 16:28:07 2013
@@ -1,5 +1,4 @@
-/*
- *
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -41,7 +40,6 @@ import org.apache.hadoop.hbase.util.Info
 import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.hbase.util.VersionInfo;
 import org.apache.hadoop.net.DNS;
-import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.mortbay.jetty.Connector;
 import org.mortbay.jetty.Server;
@@ -86,7 +84,6 @@ public class RESTServer implements Const
 
     VersionInfo.logVersion();
     FilterHolder authFilter = null;
-    UserGroupInformation realUser = null;
     Configuration conf = HBaseConfiguration.create();
     Class<? extends ServletContainer> containerClass = ServletContainer.class;
 
@@ -102,7 +99,6 @@ public class RESTServer implements Const
       Preconditions.checkArgument(principalConfig != null && !principalConfig.isEmpty(),
         REST_KERBEROS_PRINCIPAL + " should be set if security is enabled");
       User.login(conf, REST_KEYTAB_FILE, REST_KERBEROS_PRINCIPAL, machineName);
-      realUser = User.getCurrent().getUGI();
       if (conf.get(REST_AUTHENTICATION_TYPE) != null) {
         containerClass = RESTServletContainer.class;
         authFilter = new FilterHolder();
@@ -111,6 +107,7 @@ public class RESTServer implements Const
       }
     }
 
+    UserGroupInformation realUser = User.getCurrent().getUGI();
     RESTServlet servlet = RESTServlet.getInstance(conf, realUser);
 
     Options options = new Options();

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java?rev=1518693&r1=1518692&r2=1518693&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java Thu Aug 29 16:28:07 2013
@@ -19,52 +19,139 @@
 package org.apache.hadoop.hbase.rest;
 
 import java.io.IOException;
-import java.security.PrivilegedAction;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.locks.Lock;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Chore;
+import org.apache.hadoop.hbase.Stoppable;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
 import org.apache.hadoop.hbase.client.HConnectionManager;
-import org.apache.hadoop.hbase.client.HConnectionWrapper;
 import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.HTablePool;
+import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.KeyLocker;
+import org.apache.hadoop.hbase.util.Threads;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.log4j.Logger;
 
 /**
  * Singleton class encapsulating global REST servlet state and functions.
  */
 @InterfaceAudience.Private
 public class RESTServlet implements Constants {
+  private static Logger LOG = Logger.getLogger(RESTServlet.class);
   private static RESTServlet INSTANCE;
   private final Configuration conf;
-  private final HTablePool pool;
   private final MetricsREST metrics = new MetricsREST();
-  private final HBaseAdmin admin;
-  private final UserGroupInformation ugi;
+  private final Map<String, ConnectionInfo>
+    connections = new ConcurrentHashMap<String, ConnectionInfo>();
+  private final KeyLocker<String> locker = new KeyLocker<String>();
+  private final UserGroupInformation realUser;
+
+  static final String CLEANUP_INTERVAL = "hbase.rest.connection.cleanup-interval";
+  static final String MAX_IDLETIME = "hbase.rest.connection.max-idletime";
+
+  static final String NULL_USERNAME = "--NULL--";
+
+  private final ThreadLocal<String> effectiveUser = new ThreadLocal<String>() {
+    protected String initialValue() {
+      return NULL_USERNAME;
+    }
+  };
+
+  // A chore to clean up idle connections.
+  private final Chore connectionCleaner;
+  private final Stoppable stoppable;
+
+  class ConnectionInfo {
+    final HConnection connection;
+    final String userName;
+
+    volatile HBaseAdmin admin;
+    private long lastAccessTime;
+    private boolean closed;
+
+    ConnectionInfo(HConnection conn, String user) {
+      lastAccessTime = EnvironmentEdgeManager.currentTimeMillis();
+      connection = conn;
+      closed = false;
+      userName = user;
+    }
+
+    synchronized boolean updateAccessTime() {
+      if (closed) {
+        return false;
+      }
+      if (connection.isAborted() || connection.isClosed()) {
+        LOG.info("Unexpected: cached HConnection is aborted/closed, removed from cache");
+        connections.remove(userName);
+        return false;
+      }
+      lastAccessTime = EnvironmentEdgeManager.currentTimeMillis();
+      return true;
+    }
+
+    synchronized boolean timedOut(int maxIdleTime) {
+      long timeoutTime = lastAccessTime + maxIdleTime;
+      if (EnvironmentEdgeManager.currentTimeMillis() > timeoutTime) {
+        connections.remove(userName);
+        closed = true;
+      }
+      return false;
+    }
+  }
+
+  class ConnectionCleaner extends Chore {
+    private final int maxIdleTime;
+
+    public ConnectionCleaner(int cleanInterval, int maxIdleTime) {
+      super("REST-ConnectionCleaner", cleanInterval, stoppable);
+      this.maxIdleTime = maxIdleTime;
+    }
+
+    @Override
+    protected void chore() {
+      for (Map.Entry<String, ConnectionInfo> entry: connections.entrySet()) {
+        ConnectionInfo connInfo = entry.getValue();
+        if (connInfo.timedOut(maxIdleTime)) {
+          if (connInfo.admin != null) {
+            try {
+              connInfo.admin.close();
+            } catch (Throwable t) {
+              LOG.info("Got exception in closing idle admin", t);
+            }
+          }
+          try {
+            connInfo.connection.close();
+          } catch (Throwable t) {
+            LOG.info("Got exception in closing idle connection", t);
+          }
+        }
+      }
+    }
+  }
 
   /**
    * @return the RESTServlet singleton instance
-   * @throws IOException
    */
-  public synchronized static RESTServlet getInstance() throws IOException {
+  public synchronized static RESTServlet getInstance() {
     assert(INSTANCE != null);
     return INSTANCE;
   }
 
   /**
    * @param conf Existing configuration to use in rest servlet
+   * @param realUser the login user
    * @return the RESTServlet singleton instance
-   * @throws IOException
    */
-  public synchronized static RESTServlet getInstance(Configuration conf)
-  throws IOException {
-    return getInstance(conf, null);
-  }
-
   public synchronized static RESTServlet getInstance(Configuration conf,
-      UserGroupInformation ugi) throws IOException {
+      UserGroupInformation realUser) {
     if (INSTANCE == null) {
-      INSTANCE = new RESTServlet(conf, ugi);
+      INSTANCE = new RESTServlet(conf, realUser);
     }
     return INSTANCE;
   }
@@ -76,54 +163,50 @@ public class RESTServlet implements Cons
   /**
    * Constructor with existing configuration
    * @param conf existing configuration
-   * @throws IOException
+   * @param realUser the login user
    */
   RESTServlet(final Configuration conf,
-      final UserGroupInformation ugi) throws IOException {
+      final UserGroupInformation realUser) {
+    stoppable = new Stoppable() {
+      private volatile boolean isStopped = false;
+      @Override public void stop(String why) { isStopped = true;}
+      @Override public boolean isStopped() {return isStopped;}
+    };
+
+    int cleanInterval = conf.getInt(CLEANUP_INTERVAL, 10 * 1000);
+    int maxIdleTime = conf.getInt(MAX_IDLETIME, 10 * 60 * 1000);
+    connectionCleaner = new ConnectionCleaner(cleanInterval, maxIdleTime);
+    Threads.setDaemonThreadRunning(connectionCleaner.getThread());
+
+    this.realUser = realUser;
     this.conf = conf;
-    this.ugi = ugi;
-    int maxSize = conf.getInt("hbase.rest.htablepool.size", 10);
-    if (ugi == null) {
-      pool = new HTablePool(conf, maxSize);
-      admin = new HBaseAdmin(conf);
-    } else {
-      admin = new HBaseAdmin(new HConnectionWrapper(ugi,
-        HConnectionManager.getConnection(new Configuration(conf))));
-
-      pool = new HTablePool(conf, maxSize) {
-        /**
-         * A HTablePool adapter. It makes sure the real user is
-         * always used in creating any table so that the HConnection
-         * is not any proxy user in case impersonation with
-         * RESTServletContainer.
-         */
-        @Override
-        protected HTableInterface createHTable(final String tableName) {
-          return ugi.doAs(new PrivilegedAction<HTableInterface>() {
-            @Override
-            public HTableInterface run() {
-              return callCreateHTable(tableName);
-             }
-           });
-          
-        }
+  }
 
-        /**
-         * A helper method used to call super.createHTable.
-         */
-        HTableInterface callCreateHTable(final String tableName) {
-          return super.createHTable(tableName);
+  /**
+   * Caller doesn't close the admin afterwards.
+   * We need to manage it and close it properly.
+   */
+  HBaseAdmin getAdmin() throws IOException {
+    ConnectionInfo connInfo = getCurrentConnection();
+    if (connInfo.admin == null) {
+      Lock lock = locker.acquireLock(effectiveUser.get());
+      try {
+        if (connInfo.admin == null) {
+          connInfo.admin = new HBaseAdmin(connInfo.connection);
         }
-      };
+      } finally {
+        lock.unlock();
+      }
     }
+    return connInfo.admin;
   }
 
-  HBaseAdmin getAdmin() {
-    return admin;
-  }
-
-  HTablePool getTablePool() {
-    return pool;
+  /**
+   * Caller closes the table afterwards.
+   */
+  HTableInterface getTable(String tableName) throws IOException {
+    ConnectionInfo connInfo = getCurrentConnection();
+    return connInfo.connection.getTable(tableName);
   }
 
   Configuration getConfiguration() {
@@ -143,7 +226,31 @@ public class RESTServlet implements Cons
     return getConfiguration().getBoolean("hbase.rest.readonly", false);
   }
 
-  UserGroupInformation getUser() {
-    return ugi;
+  void setEffectiveUser(String effectiveUser) {
+    this.effectiveUser.set(effectiveUser);
+  }
+
+  private ConnectionInfo getCurrentConnection() throws IOException {
+    String userName = effectiveUser.get();
+    ConnectionInfo connInfo = connections.get(userName);
+    if (connInfo == null || !connInfo.updateAccessTime()) {
+      Lock lock = locker.acquireLock(userName);
+      try {
+        connInfo = connections.get(userName);
+        if (connInfo == null) {
+          UserGroupInformation ugi = realUser;
+          if (!userName.equals(NULL_USERNAME)) {
+            ugi = UserGroupInformation.createProxyUser(userName, realUser);
+          }
+          User user = User.create(ugi);
+          HConnection conn = HConnectionManager.createConnection(conf, user);
+          connInfo = new ConnectionInfo(conn, userName);
+          connections.put(userName, connInfo);
+        }
+      } finally {
+        lock.unlock();
+      }
+    }
+    return connInfo;
   }
 }

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServletContainer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServletContainer.java?rev=1518693&r1=1518692&r2=1518693&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServletContainer.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServletContainer.java Thu Aug 29 16:28:07 2013
@@ -19,14 +19,12 @@
 package org.apache.hadoop.hbase.rest;
 
 import java.io.IOException;
-import java.security.PrivilegedExceptionAction;
 
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.security.UserGroupInformation;
 
 import com.sun.jersey.spi.container.servlet.ServletContainer;
 
@@ -46,28 +44,7 @@ public class RESTServletContainer extend
   @Override
   public void service(final HttpServletRequest request,
       final HttpServletResponse response) throws ServletException, IOException {
-    UserGroupInformation effectiveUser = UserGroupInformation.createProxyUser(
-      request.getRemoteUser(), RESTServlet.getInstance().getUser());
-    try {
-      effectiveUser.doAs(new PrivilegedExceptionAction<Void>() {
-        @Override
-         public Void run() throws ServletException, IOException {
-            callService(request, response);
-            return null;
-          }
-        });
-    } catch (InterruptedException e) {
-      Thread.currentThread().interrupt();
-      throw new IOException(e);
-    }
-  }
-
-  /**
-   * A helper method used to invoke super.service()
-   * on behalf of an effective user with doAs.
-   */
-  void callService(HttpServletRequest request,
-      HttpServletResponse response) throws ServletException, IOException {
+    RESTServlet.getInstance().setEffectiveUser(request.getRemoteUser());
     super.service(request, response);
   }
 }

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java?rev=1518693&r1=1518692&r2=1518693&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java Thu Aug 29 16:28:07 2013
@@ -32,8 +32,8 @@ import javax.ws.rs.Produces;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.HttpHeaders;
 import javax.ws.rs.core.Response;
-import javax.ws.rs.core.UriInfo;
 import javax.ws.rs.core.Response.ResponseBuilder;
+import javax.ws.rs.core.UriInfo;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -43,7 +43,6 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.HTablePool;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.rest.model.CellModel;
 import org.apache.hadoop.hbase.rest.model.CellSetModel;
@@ -187,7 +186,6 @@ public class RowResource extends Resourc
         .build();
     }
 
-    HTablePool pool = servlet.getTablePool();
     HTableInterface table = null;
     try {
       List<RowModel> rows = model.getRows();
@@ -228,7 +226,7 @@ public class RowResource extends Resourc
           LOG.debug("PUT " + put.toString());
         }
       }
-      table = pool.getTable(tableResource.getName());
+      table = servlet.getTable(tableResource.getName());
       table.put(puts);
       table.flushCommits();
       ResponseBuilder response = Response.ok();
@@ -257,7 +255,6 @@ public class RowResource extends Resourc
         .type(MIMETYPE_TEXT).entity("Forbidden" + CRLF)
         .build();
     }
-    HTablePool pool = servlet.getTablePool();
     HTableInterface table = null;
     try {
       byte[] row = rowspec.getRow();
@@ -291,7 +288,7 @@ public class RowResource extends Resourc
       } else {
         put.add(parts[0], null, timestamp, message);
       }
-      table = pool.getTable(tableResource.getName());
+      table = servlet.getTable(tableResource.getName());
       table.put(put);
       if (LOG.isDebugEnabled()) {
         LOG.debug("PUT " + put.toString());
@@ -387,10 +384,9 @@ public class RowResource extends Resourc
         }
       }
     }
-    HTablePool pool = servlet.getTablePool();
     HTableInterface table = null;
     try {
-      table = pool.getTable(tableResource.getName());
+      table = servlet.getTable(tableResource.getName());
       table.delete(delete);
       servlet.getMetrics().incrementSucessfulDeleteRequests(1);
       if (LOG.isDebugEnabled()) {
@@ -417,7 +413,6 @@ public class RowResource extends Resourc
    * @return Response 200 OK, 304 Not modified, 400 Bad request
    */
   Response checkAndPut(final CellSetModel model) {
-    HTablePool pool = servlet.getTablePool();
     HTableInterface table = null;
     try {
       if (model.getRows().size() != 1) {
@@ -467,7 +462,7 @@ public class RowResource extends Resourc
           .build();
       }
 
-      table = pool.getTable(this.tableResource.getName());
+      table = servlet.getTable(this.tableResource.getName());
       boolean retValue = table.checkAndPut(key, valueToPutParts[0],
         valueToPutParts[1], valueToCheckCell.getValue(), put);
       if (LOG.isDebugEnabled()) {
@@ -498,7 +493,6 @@ public class RowResource extends Resourc
    * @return Response 200 OK, 304 Not modified, 400 Bad request
    */
   Response checkAndDelete(final CellSetModel model) {
-    HTablePool pool = servlet.getTablePool();
     HTableInterface table = null;
     Delete delete = null;
     try {
@@ -539,7 +533,7 @@ public class RowResource extends Resourc
           .build();
       }
 
-      table = pool.getTable(tableResource.getName());
+      table = servlet.getTable(tableResource.getName());
       boolean retValue = table.checkAndDelete(key, parts[0], parts[1],
         valueToDeleteCell.getValue(), delete);
       if (LOG.isDebugEnabled()) {

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java?rev=1518693&r1=1518692&r2=1518693&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java Thu Aug 29 16:28:07 2013
@@ -25,16 +25,14 @@ import java.util.NoSuchElementException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
-import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.HTablePool;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.util.StringUtils;
 
 @InterfaceAudience.Private
 public class RowResultGenerator extends ResultGenerator {
@@ -45,8 +43,7 @@ public class RowResultGenerator extends 
 
   public RowResultGenerator(final String tableName, final RowSpec rowspec,
       final Filter filter) throws IllegalArgumentException, IOException {
-    HTablePool pool = RESTServlet.getInstance().getTablePool(); 
-    HTableInterface table = pool.getTable(tableName);
+    HTableInterface table = RESTServlet.getInstance().getTable(tableName);
     try {
       Get get = new Get(rowspec.getRow());
       if (rowspec.hasColumns()) {

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java?rev=1518693&r1=1518692&r2=1518693&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java Thu Aug 29 16:28:07 2013
@@ -101,18 +101,17 @@ public class ScannerResource extends Res
       URI uri = builder.path(id).build();
       servlet.getMetrics().incrementSucessfulPutRequests(1);
       return Response.created(uri).build();
-    } catch (RuntimeException e) {
+    } catch (Exception e) {
       servlet.getMetrics().incrementFailedPutRequests(1);
-      if (e.getCause() instanceof TableNotFoundException) {
+      if (e instanceof TableNotFoundException) {
         return Response.status(Response.Status.NOT_FOUND)
           .type(MIMETYPE_TEXT).entity("Not found" + CRLF)
           .build();
+      } else if (e instanceof RuntimeException) {
+        return Response.status(Response.Status.BAD_REQUEST)
+          .type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
+          .build();
       }
-      return Response.status(Response.Status.BAD_REQUEST)
-        .type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
-        .build();
-    } catch (Exception e) {
-      servlet.getMetrics().incrementFailedPutRequests(1);
       return Response.status(Response.Status.SERVICE_UNAVAILABLE)
         .type(MIMETYPE_TEXT).entity("Unavailable" + CRLF)
         .build();

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java?rev=1518693&r1=1518692&r2=1518693&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java Thu Aug 29 16:28:07 2013
@@ -28,7 +28,6 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.UnknownScannerException;
 import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.HTablePool;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
@@ -64,8 +63,7 @@ public class ScannerResultGenerator exte
 
   public ScannerResultGenerator(final String tableName, final RowSpec rowspec,
       final Filter filter, final int caching) throws IllegalArgumentException, IOException {
-    HTablePool pool = RESTServlet.getInstance().getTablePool();
-    HTableInterface table = pool.getTable(tableName);
+    HTableInterface table = RESTServlet.getInstance().getTable(tableName);
     try {
       Scan scan;
       if (rowspec.hasEndRow()) {

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java?rev=1518693&r1=1518692&r2=1518693&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java Thu Aug 29 16:28:07 2013
@@ -31,23 +31,20 @@ import javax.ws.rs.Produces;
 import javax.ws.rs.core.CacheControl;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.Response;
-import javax.ws.rs.core.UriInfo;
 import javax.ws.rs.core.Response.ResponseBuilder;
-
+import javax.ws.rs.core.UriInfo;
 import javax.xml.namespace.QName;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.TableExistsException;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.HTablePool;
 import org.apache.hadoop.hbase.rest.model.ColumnSchemaModel;
 import org.apache.hadoop.hbase.rest.model.TableSchemaModel;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -77,8 +74,7 @@ public class SchemaResource extends Reso
 
   private HTableDescriptor getTableSchema() throws IOException,
       TableNotFoundException {
-    HTablePool pool = servlet.getTablePool();
-    HTableInterface table = pool.getTable(tableResource.getName());
+    HTableInterface table = servlet.getTable(tableResource.getName());
     try {
       return table.getTableDescriptor();
     } finally {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java?rev=1518693&r1=1518692&r2=1518693&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java Thu Aug 29 16:28:07 2013
@@ -22,6 +22,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.rest.filter.GzipFilter;
+import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.util.StringUtils;
 import org.mortbay.jetty.Server;
 import org.mortbay.jetty.servlet.Context;
@@ -47,7 +48,7 @@ public class HBaseRESTTestingUtility {
     }
 
     // Inject the conf for the test by being first to make singleton
-    RESTServlet.getInstance(conf);
+    RESTServlet.getInstance(conf, User.getCurrent().getUGI());
 
     // set up the Jersey servlet container for Jetty
     ServletHolder sh = new ServletHolder(ServletContainer.class);

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestRowResource.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestRowResource.java?rev=1518693&r1=1518692&r2=1518693&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestRowResource.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestRowResource.java Thu Aug 29 16:28:07 2013
@@ -19,6 +19,9 @@
 
 package org.apache.hadoop.hbase.rest;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.StringWriter;
@@ -31,7 +34,13 @@ import javax.xml.bind.Unmarshaller;
 
 import org.apache.commons.httpclient.Header;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.CompatibilityFactory;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.rest.client.Client;
 import org.apache.hadoop.hbase.rest.client.Cluster;
@@ -39,11 +48,10 @@ import org.apache.hadoop.hbase.rest.clie
 import org.apache.hadoop.hbase.rest.model.CellModel;
 import org.apache.hadoop.hbase.rest.model.CellSetModel;
 import org.apache.hadoop.hbase.rest.model.RowModel;
+import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.test.MetricsAssertHelper;
 import org.apache.hadoop.hbase.util.Bytes;
-
-import static org.junit.Assert.*;
-
+import org.apache.hadoop.security.UserGroupInformation;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -546,21 +554,18 @@ public class TestRowResource {
     response = deleteRow(TABLE, ROW_4);
     assertEquals(response.getCode(), 200);
 
-    METRICS_ASSERT.assertCounterGt("requests",
-                                    2l,
-                                    RESTServlet.getInstance(conf).getMetrics().getSource());
-
-    METRICS_ASSERT.assertCounterGt("successfulGet",
-                                   0l,
-                                   RESTServlet.getInstance(conf).getMetrics().getSource());
-
-    METRICS_ASSERT.assertCounterGt("successfulPut",
-                                    0l,
-                                    RESTServlet.getInstance(conf).getMetrics().getSource());
-
-    METRICS_ASSERT.assertCounterGt("successfulDelete",
-                                    0l,
-                                    RESTServlet.getInstance(conf).getMetrics().getSource());
+    UserGroupInformation ugi = User.getCurrent().getUGI();
+    METRICS_ASSERT.assertCounterGt("requests", 2l,
+      RESTServlet.getInstance(conf, ugi).getMetrics().getSource());
+
+    METRICS_ASSERT.assertCounterGt("successfulGet", 0l,
+      RESTServlet.getInstance(conf, ugi).getMetrics().getSource());
+
+    METRICS_ASSERT.assertCounterGt("successfulPut", 0l,
+      RESTServlet.getInstance(conf, ugi).getMetrics().getSource());
+
+    METRICS_ASSERT.assertCounterGt("successfulDelete", 0l,
+      RESTServlet.getInstance(conf, ugi).getMetrics().getSource());
   }
 
   @Test

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java?rev=1518693&r1=1518692&r2=1518693&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java Thu Aug 29 16:28:07 2013
@@ -1933,7 +1933,7 @@ public class TestAccessController extend
     } finally {
       acl.close();
     }
-    HBaseAdmin admin = TEST_UTIL.getHBaseAdmin();
+    final HBaseAdmin admin = TEST_UTIL.getHBaseAdmin();
     HTableDescriptor htd = new HTableDescriptor(TEST_TABLE2);
     htd.addFamily(new HColumnDescriptor(TEST_FAMILY));
     admin.createTable(htd);
@@ -1944,7 +1944,7 @@ public class TestAccessController extend
     final HRegionServer newRs = newRsThread.getRegionServer();
 
     // Move region to the new RegionServer.
-    HTable table = new HTable(TEST_UTIL.getConfiguration(), TEST_TABLE2);
+    final HTable table = new HTable(TEST_UTIL.getConfiguration(), TEST_TABLE2);
     try {
       NavigableMap<HRegionInfo, ServerName> regions = table
           .getRegionLocations();
@@ -1953,7 +1953,6 @@ public class TestAccessController extend
 
       PrivilegedExceptionAction moveAction = new PrivilegedExceptionAction() {
         public Object run() throws Exception {
-          HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
           admin.move(firstRegion.getKey().getEncodedNameAsBytes(),
               Bytes.toBytes(newRs.getServerName().getServerName()));
           return null;
@@ -1979,7 +1978,6 @@ public class TestAccessController extend
       // permissions.
       PrivilegedExceptionAction putAction = new PrivilegedExceptionAction() {
         public Object run() throws Exception {
-          HTable table = new HTable(TEST_UTIL.getConfiguration(), TEST_TABLE2);
           Put put = new Put(Bytes.toBytes("test"));
           put.add(TEST_FAMILY, Bytes.toBytes("qual"), Bytes.toBytes("value"));
           table.put(put);