You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2011/12/29 23:59:03 UTC

svn commit: r1225683 [2/2] - in /hive/trunk: common/src/java/org/apache/hadoop/hive/conf/ conf/ metastore/if/ metastore/src/gen/thrift/gen-cpp/ metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ metastore/src/gen/thrift/gen-php...

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1225683&r1=1225682&r2=1225683&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Thu Dec 29 22:59:02 2011
@@ -103,6 +103,7 @@ import org.apache.hadoop.hive.serde2.Ser
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
+import org.apache.hadoop.hive.thrift.TUGIContainingTransport;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
@@ -3445,6 +3446,13 @@ public class HiveMetaStore extends Thrif
 
       return ret;
     }
+
+    @Override
+    public List<String> set_ugi(String username, List<String> groupNames) throws MetaException,
+      TException {
+      Collections.addAll(groupNames, username);
+      return groupNames;
+  }
   }
 
 
@@ -3606,19 +3614,28 @@ public class HiveMetaStore extends Thrif
       TProcessor processor;
       TTransportFactory transFactory;
       if (useSasl) {
+        // we are in secure mode.
          saslServer = bridge.createServer(
            conf.getVar(HiveConf.ConfVars.METASTORE_KERBEROS_KEYTAB_FILE),
            conf.getVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL));
-
         // start delegation token manager
         saslServer.startDelegationTokenSecretManager(conf);
         transFactory = saslServer.createTransportFactory();
-        processor = saslServer.wrapProcessor(new ThriftHiveMetastore.Processor(
+        processor = saslServer.wrapProcessor(new ThriftHiveMetastore.Processor<HMSHandler>(
             new HMSHandler("new db based metaserver", conf)));
+        LOG.info("Starting DB backed MetaStore Server in Secure Mode");
       } else {
-        processor = new ThriftHiveMetastore.Processor(
-            new HMSHandler("new db based metaserver", conf));
+        // we are in unsecure mode.
+        HMSHandler handler = new HMSHandler("new db based metaserver", conf);
+        if (conf.getBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI)){
+          transFactory = new TUGIContainingTransport.Factory();
+          processor = new TUGIBasedProcessor<HMSHandler>(handler);
+          LOG.info("Starting DB backed MetaStore Server with SetUGI enabled");
+        } else{
         transFactory = new TTransportFactory();
+          processor  = new ThriftHiveMetastore.Processor<HMSHandler>(handler);
+          LOG.info("Starting DB backed MetaStore Server");
+      }
       }
 
       TThreadPoolServer.Args args = new TThreadPoolServer.Args(serverTransport)

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java?rev=1225683&r1=1225682&r2=1225683&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java Thu Dec 29 22:59:02 2011
@@ -28,10 +28,13 @@ import java.lang.reflect.Proxy;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 
+import javax.security.auth.login.LoginException;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -60,8 +63,10 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.UnknownDBException;
 import org.apache.hadoop.hive.metastore.api.UnknownPartitionException;
 import org.apache.hadoop.hive.metastore.api.UnknownTableException;
+import org.apache.hadoop.hive.shims.HadoopShims;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.thrift.TException;
 import org.apache.thrift.protocol.TBinaryProtocol;
 import org.apache.thrift.protocol.TProtocol;
@@ -219,7 +224,8 @@ public class HiveMetaStoreClient impleme
       ((TSocket)transport).setTimeout(1000 * conf.getIntVar(ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT));
 
       // Wrap thrift connection with SASL if enabled.
-      boolean useSasl = conf.getBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL);
+      HadoopShims shim = ShimLoader.getHadoopShims();
+      boolean useSasl = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL);
       if (useSasl) {
         try {
           HadoopThriftAuthBridge.Client authBridge =
@@ -232,7 +238,7 @@ public class HiveMetaStoreClient impleme
           // submission.
           String tokenSig = conf.get("hive.metastore.token.signature");
           // tokenSig could be null
-          tokenStrForm = ShimLoader.getHadoopShims().getTokenStrForm(tokenSig);
+          tokenStrForm = shim.getTokenStrForm(tokenSig);
 
           if(tokenStrForm != null) {
             // authenticate using delegation tokens via the "DIGEST" mechanism
@@ -264,6 +270,21 @@ public class HiveMetaStoreClient impleme
           LOG.warn("Failed to connect to the MetaStore Server...");
         }
       }
+     if (!useSasl && conf.getBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI)){
+       // Call set_ugi, only in unsecure mode.
+       try {
+         UserGroupInformation ugi = shim.getUGIForConf(conf);
+         client.set_ugi(ugi.getUserName(), Arrays.asList(ugi.getGroupNames()));
+       } catch (LoginException e) {
+         LOG.warn("Failed to do login. set_ugi() is not successful, Continuing without it.", e);
+       } catch (IOException e) {
+         LOG.warn("Failed to find ugi of client set_ugi() is not successful, " +
+            "Continuing without it.", e);
+       } catch (TException e) {
+         LOG.warn("set_ugi() not successful, Likely cause: new client talking to old server. " +
+         		"Continuing without it.", e);
+    }
+     }
     }
     if (!isConnected) {
       throw new MetaException("Could not connect to the MetaStore server!");

Added: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/TUGIBasedProcessor.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/TUGIBasedProcessor.java?rev=1225683&view=auto
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/TUGIBasedProcessor.java (added)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/TUGIBasedProcessor.java Thu Dec 29 22:59:02 2011
@@ -0,0 +1,178 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.security.PrivilegedExceptionAction;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore;
+import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface;
+import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Processor;
+import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.set_ugi_args;
+import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.set_ugi_result;
+import org.apache.hadoop.hive.shims.HadoopShims;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.thrift.TUGIContainingTransport;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.thrift.ProcessFunction;
+import org.apache.thrift.TApplicationException;
+import org.apache.thrift.TBase;
+import org.apache.thrift.TException;
+import org.apache.thrift.protocol.TMessage;
+import org.apache.thrift.protocol.TMessageType;
+import org.apache.thrift.protocol.TProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.protocol.TProtocolUtil;
+import org.apache.thrift.protocol.TType;
+
+/** TUGIBasedProcessor is used in unsecure mode for thrift metastore client server communication.
+ *  This processor checks whether the first rpc call after connection is set up is set_ugi()
+ *  through which client sends ugi to server. Processor then perform all subsequent rpcs on the
+ *  connection using ugi.doAs() so all actions are performed in client user context.
+ *  Note that old clients will never call set_ugi() and thus ugi will never be received on server
+ *  side, in which case server exhibits previous behavior and continues as usual.
+ */
+public class TUGIBasedProcessor<I extends Iface> extends ThriftHiveMetastore.Processor<Iface> {
+
+  private final I iface;
+  private final Map<String,  org.apache.thrift.ProcessFunction<I, ? extends  TBase<?,?>>>
+    functions;
+  private final HadoopShims shim;
+
+  @SuppressWarnings("unchecked")
+  public TUGIBasedProcessor(I iface) throws SecurityException, NoSuchFieldException,
+    IllegalArgumentException, IllegalAccessException, NoSuchMethodException,
+    InvocationTargetException {
+    super(iface);
+    this.iface = iface;
+    // TODO get rid of following reflection after THRIFT-1465 is fixed.
+    Method map = Processor.class.getDeclaredMethod("getProcessMap",Map.class);
+    map.setAccessible(true);
+    this.functions = (Map<String,  ProcessFunction<I, ? extends  TBase<?,?>>>)
+        map.invoke(null, new HashMap<String, ProcessFunction<I, ? extends TBase<?,?>>>());
+    shim = ShimLoader.getHadoopShims();
+  }
+
+  @Override
+  public boolean process(final TProtocol in, final TProtocol out) throws TException {
+
+    final TMessage msg = in.readMessageBegin();
+    final ProcessFunction<I, ? extends  TBase<?,?>> fn = functions.get(msg.name);
+    if (fn == null) {
+      TProtocolUtil.skip(in, TType.STRUCT);
+      in.readMessageEnd();
+      TApplicationException x = new TApplicationException(TApplicationException.UNKNOWN_METHOD,
+          "Invalid method name: '"+msg.name+"'");
+      out.writeMessageBegin(new TMessage(msg.name, TMessageType.EXCEPTION, msg.seqid));
+      x.write(out);
+      out.writeMessageEnd();
+      out.getTransport().flush();
+      return true;
+    }
+    TUGIContainingTransport ugiTrans = (TUGIContainingTransport)in.getTransport();
+    // Store ugi in transport if the rpc is set_ugi
+    if (msg.name.equalsIgnoreCase("set_ugi")){
+      try {
+        handleSetUGI(ugiTrans, fn, msg, in, out);
+      } catch (TException e) {
+        throw e;
+      } catch (Exception e) {
+        throw new TException(e.getCause());
+      }
+      return true;
+    }
+    UserGroupInformation clientUgi = ugiTrans.getClientUGI();
+    if (null == clientUgi){
+      // At this point, transport must contain client ugi, if it doesn't then its an old client.
+      fn.process(msg.seqid, in, out, iface);
+      return true;
+    } else { // Found ugi, perform doAs().
+      PrivilegedExceptionAction<Void> pvea = new PrivilegedExceptionAction<Void>() {
+        public Void run() {
+          try {
+            fn.process(msg.seqid,in, out, iface);
+            return null;
+          } catch (TException te) {
+            throw new RuntimeException(te);
+          }
+        }
+      };
+      try {
+        shim.doAs(clientUgi, pvea);
+        return true;
+      } catch (RuntimeException rte) {
+        if (rte.getCause() instanceof TException) {
+          throw (TException)rte.getCause();
+        }
+        throw rte;
+      } catch (InterruptedException ie) {
+        throw new RuntimeException(ie); // unexpected!
+      } catch (IOException ioe) {
+        throw new RuntimeException(ioe); // unexpected!
+      }
+    }
+  }
+
+  private void handleSetUGI(TUGIContainingTransport ugiTrans,
+      ProcessFunction<I, ? extends  TBase<?,?>> fn, TMessage msg, TProtocol iprot, TProtocol oprot)
+      throws TException, SecurityException, NoSuchMethodException, IllegalArgumentException,
+      IllegalAccessException, InvocationTargetException{
+
+    UserGroupInformation clientUgi = ugiTrans.getClientUGI();
+    if( null != clientUgi){
+      throw new TException(new IllegalStateException("UGI is already set. Resetting is not " +
+      "allowed. Current ugi is: " + clientUgi.getUserName()));
+    }
+
+    // TODO get rid of following reflection after THRIFT-1465 is fixed.
+    Method method = fn.getClass().getDeclaredMethod("getEmptyArgsInstance", new Class<?>[0]);
+    method.setAccessible(true);
+    set_ugi_args args = (set_ugi_args)method.invoke(fn, new Object[0]);
+    try {
+      args.read(iprot);
+    } catch (TProtocolException e) {
+      iprot.readMessageEnd();
+      TApplicationException x = new TApplicationException(TApplicationException.PROTOCOL_ERROR,
+          e.getMessage());
+      oprot.writeMessageBegin(new TMessage(msg.name, TMessageType.EXCEPTION, msg.seqid));
+      x.write(oprot);
+      oprot.writeMessageEnd();
+      oprot.getTransport().flush();
+      return;
+    }
+    iprot.readMessageEnd();
+    // TODO get rid of following reflection after THRIFT-1465 is fixed.
+    method = fn.getClass().getDeclaredMethod("getResult", Iface.class, set_ugi_args.class);
+    method.setAccessible(true);
+    set_ugi_result result = (set_ugi_result)method.invoke(fn, iface,args);
+    List<String> principals = result.getSuccess();
+    // Store the ugi in transport and then continue as usual.
+    ugiTrans.setClientUGI(shim.createRemoteUser(principals.remove(principals.size()-1),
+        principals));
+    oprot.writeMessageBegin(new TMessage(msg.name, TMessageType.REPLY, msg.seqid));
+    result.write(oprot);
+    oprot.writeMessageEnd();
+    oprot.getTransport().flush();
+  }
+}

Modified: hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStore.java?rev=1225683&r1=1225682&r2=1225683&view=diff
==============================================================================
--- hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStore.java (original)
+++ hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStore.java Thu Dec 29 22:59:02 2011
@@ -23,7 +23,7 @@ import org.apache.hadoop.hive.conf.HiveC
 
 
 public class TestRemoteHiveMetaStore extends TestHiveMetaStore {
-  private static final String METASTORE_PORT = "29083";
+  protected static final String METASTORE_PORT = "29083";
   private static boolean isServerStarted = false;
 
   public TestRemoteHiveMetaStore() {
@@ -62,14 +62,14 @@ public class TestRemoteHiveMetaStore ext
     // Wait a little bit for the metastore to start. Should probably have
     // a better way of detecting if the metastore has started?
     Thread.sleep(5000);
+    // This is default case with setugi off for both client and server
+    createClient(false);
+  }
 
-    // hive.metastore.local should be defined in HiveConf
+  protected void createClient(boolean setugi) throws Exception {
     hiveConf.setBoolVar(ConfVars.METASTORE_MODE, false);
     hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + METASTORE_PORT);
-    hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTRETRIES, 3);
-    hiveConf.setIntVar(ConfVars.METASTORE_CLIENT_CONNECT_RETRY_DELAY, 60);
-
+    hiveConf.setBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI,setugi);
     client = new HiveMetaStoreClient(hiveConf);
   }
-
 }

Added: hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestSetUGIOnBothClientServer.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestSetUGIOnBothClientServer.java?rev=1225683&view=auto
==============================================================================
--- hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestSetUGIOnBothClientServer.java (added)
+++ hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestSetUGIOnBothClientServer.java Thu Dec 29 22:59:02 2011
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+
+public class TestSetUGIOnBothClientServer extends TestRemoteHiveMetaStore{
+
+  public TestSetUGIOnBothClientServer() {
+    super();
+    isThriftClient = true;
+    // This will turn on setugi on both client and server processes of the test.
+    System.setProperty(ConfVars.METASTORE_EXECUTE_SET_UGI.varname, "true");
+  }
+}

Added: hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestSetUGIOnOnlyClient.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestSetUGIOnOnlyClient.java?rev=1225683&view=auto
==============================================================================
--- hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestSetUGIOnOnlyClient.java (added)
+++ hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestSetUGIOnOnlyClient.java Thu Dec 29 22:59:02 2011
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+public class TestSetUGIOnOnlyClient extends TestRemoteHiveMetaStore{
+
+  @Override
+  protected void createClient(boolean setugi) throws Exception {
+    // turn it on for client.
+    super.createClient(true);
+  }
+}

Added: hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestSetUGIOnOnlyServer.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestSetUGIOnOnlyServer.java?rev=1225683&view=auto
==============================================================================
--- hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestSetUGIOnOnlyServer.java (added)
+++ hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestSetUGIOnOnlyServer.java Thu Dec 29 22:59:02 2011
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+public class TestSetUGIOnOnlyServer extends TestSetUGIOnBothClientServer {
+
+  @Override
+  protected void createClient(boolean setugi) throws Exception {
+    // It is turned on for both client and server because of super class. Turn it off for client.
+    super.createClient(false);
+  }
+}

Modified: hive/trunk/shims/ivy.xml
URL: http://svn.apache.org/viewvc/hive/trunk/shims/ivy.xml?rev=1225683&r1=1225682&r2=1225683&view=diff
==============================================================================
--- hive/trunk/shims/ivy.xml (original)
+++ hive/trunk/shims/ivy.xml Thu Dec 29 22:59:02 2011
@@ -43,6 +43,8 @@
                 transitive="false"/>
     <dependency org="commons-logging" name="commons-logging-api" rev="${commons-logging-api.version}"
                 transitive="false"/>
+    <dependency org="com.google.guava" name="guava" rev="${guava.version}"
+                transitive="false"/>
     <conflict manager="all" />
   </dependencies>
 </ivy-module>

Modified: hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1225683&r1=1225682&r2=1225683&view=diff
==============================================================================
--- hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Thu Dec 29 22:59:02 2011
@@ -21,9 +21,12 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 import java.lang.reflect.Constructor;
+import java.security.PrivilegedActionException;
+import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.List;
 
+import javax.security.auth.Subject;
 import javax.security.auth.login.LoginException;
 
 import org.apache.hadoop.conf.Configuration;
@@ -32,7 +35,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.hive.io.HiveIOExceptionHandlerChain;
 import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.ClusterStatus;
@@ -54,6 +56,7 @@ import org.apache.hadoop.mapred.lib.Comb
 import org.apache.hadoop.mapred.lib.NullOutputFormat;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UnixUserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.tools.HadoopArchives;
@@ -510,6 +513,21 @@ public class Hadoop20Shims implements Ha
   }
 
   @Override
+  public void doAs(UserGroupInformation ugi, PrivilegedExceptionAction<Void> pvea) throws
+    IOException, InterruptedException {
+    try {
+      Subject.doAs(SecurityUtil.getSubject(ugi),pvea);
+    } catch (PrivilegedActionException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public UserGroupInformation createRemoteUser(String userName, List<String> groupNames) {
+    return new UnixUserGroupInformation(userName, groupNames.toArray(new String[0]));
+  }
+
+  @Override
   public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception {
     JobTrackerState state;
     switch (clusterStatus.getJobTrackerState()) {

Modified: hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java?rev=1225683&r1=1225682&r2=1225683&view=diff
==============================================================================
--- hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java (original)
+++ hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java Thu Dec 29 22:59:02 2011
@@ -21,6 +21,7 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 import java.lang.reflect.Constructor;
+import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -30,14 +31,9 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.hive.io.HiveIOExceptionHandlerChain;
 import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
-import org.apache.hadoop.hive.shims.Hadoop20Shims;
-import org.apache.hadoop.hive.shims.HadoopShims;
-import org.apache.hadoop.hive.shims.Hadoop20Shims.InputSplitShim;
 import org.apache.hadoop.hive.thrift.DelegationTokenSelector;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.ClusterStatus;
 import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.InputFormat;
@@ -517,6 +513,16 @@ public class Hadoop20SShims implements H
   }
   
   @Override
+  public void doAs(UserGroupInformation ugi, PrivilegedExceptionAction<Void> pvea) throws IOException, InterruptedException {
+    ugi.doAs(pvea);
+  }
+
+  @Override
+  public UserGroupInformation createRemoteUser(String userName, List<String> groupNames) {
+    return UserGroupInformation.createRemoteUser(userName);
+  }
+
+  @Override
   public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception {
     JobTrackerState state;
     switch (clusterStatus.getJobTrackerState()) {

Modified: hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java?rev=1225683&r1=1225682&r2=1225683&view=diff
==============================================================================
--- hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java (original)
+++ hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java Thu Dec 29 22:59:02 2011
@@ -39,6 +39,7 @@ import org.apache.commons.lang.StringUti
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport;
 import org.apache.hadoop.security.SaslRpcServer;
 import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
 import org.apache.hadoop.security.SecurityUtil;
@@ -189,131 +190,7 @@ import org.apache.thrift.transport.TTran
         return new String(Base64.encodeBase64(password)).toCharArray();
        }
      }
-    /**
-      * The Thrift SASL transports call Sasl.createSaslServer and Sasl.createSaslClient
-      * inside open(). So, we need to assume the correct UGI when the transport is opened
-      * so that the SASL mechanisms have access to the right principal. This transport
-      * wraps the Sasl transports to set up the right UGI context for open().
-      *
-      * This is used on the client side, where the API explicitly opens a transport to
-      * the server.
-      */
-     private static class TUGIAssumingTransport extends TFilterTransport {
-       private final UserGroupInformation ugi;
-
-       public TUGIAssumingTransport(TTransport wrapped, UserGroupInformation ugi) {
-         super(wrapped);
-         this.ugi = ugi;
-       }
-
-       @Override
-       public void open() throws TTransportException {
-         try {
-           ugi.doAs(new PrivilegedExceptionAction<Void>() {
-             public Void run() {
-               try {
-                 wrapped.open();
-               } catch (TTransportException tte) {
-                 // Wrap the transport exception in an RTE, since UGI.doAs() then goes
-                 // and unwraps this for us out of the doAs block. We then unwrap one
-                 // more time in our catch clause to get back the TTE. (ugh)
-                 throw new RuntimeException(tte);
-               }
-               return null;
-             }
-           });
-         } catch (IOException ioe) {
-           assert false : "Never thrown!";
-           throw new RuntimeException("Received an ioe we never threw!", ioe);
-         } catch (InterruptedException ie) {
-           assert false : "We never expect to see an InterruptedException thrown in this block";
-           throw new RuntimeException("Received an ie we never threw!", ie);
-         } catch (RuntimeException rte) {
-           if (rte.getCause() instanceof TTransportException) {
-             throw (TTransportException)rte.getCause();
-           } else {
-             throw rte;
-           }
-         }
        }
-     }
-    /**
-      * Transport that simply wraps another transport.
-      * This is the equivalent of FilterInputStream for Thrift transports.
-      */
-     private static class TFilterTransport extends TTransport {
-       protected final TTransport wrapped;
-
-       public TFilterTransport(TTransport wrapped) {
-         this.wrapped = wrapped;
-       }
-
-       @Override
-       public void open() throws TTransportException {
-         wrapped.open();
-       }
-
-       @Override
-       public boolean isOpen() {
-         return wrapped.isOpen();
-       }
-
-       @Override
-       public boolean peek() {
-         return wrapped.peek();
-       }
-
-       @Override
-       public void close() {
-         wrapped.close();
-       }
-
-       @Override
-       public int read(byte[] buf, int off, int len) throws TTransportException {
-         return wrapped.read(buf, off, len);
-       }
-
-       @Override
-       public int readAll(byte[] buf, int off, int len) throws TTransportException {
-         return wrapped.readAll(buf, off, len);
-       }
-
-       @Override
-       public void write(byte[] buf) throws TTransportException {
-         wrapped.write(buf);
-       }
-
-       @Override
-       public void write(byte[] buf, int off, int len) throws TTransportException {
-         wrapped.write(buf, off, len);
-       }
-
-       @Override
-       public void flush() throws TTransportException {
-         wrapped.flush();
-       }
-
-       @Override
-       public byte[] getBuffer() {
-         return wrapped.getBuffer();
-       }
-
-       @Override
-       public int getBufferPosition() {
-         return wrapped.getBufferPosition();
-       }
-
-       @Override
-       public int getBytesRemainingInBuffer() {
-         return wrapped.getBytesRemainingInBuffer();
-       }
-
-       @Override
-       public void consumeBuffer(int len) {
-         wrapped.consumeBuffer(len);
-       }
-     }
-   }
 
    public static class Server extends HadoopThriftAuthBridge.Server {
      final UserGroupInformation realUgi;

Added: hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/client/TUGIAssumingTransport.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/client/TUGIAssumingTransport.java?rev=1225683&view=auto
==============================================================================
--- hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/client/TUGIAssumingTransport.java (added)
+++ hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/client/TUGIAssumingTransport.java Thu Dec 29 22:59:02 2011
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.thrift.client;
+
+import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
+
+import org.apache.hadoop.hive.thrift.TFilterTransport;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportException;
+
+/**
+  * The Thrift SASL transports call Sasl.createSaslServer and Sasl.createSaslClient
+  * inside open(). So, we need to assume the correct UGI when the transport is opened
+  * so that the SASL mechanisms have access to the right principal. This transport
+  * wraps the Sasl transports to set up the right UGI context for open().
+  *
+  * This is used on the client side, where the API explicitly opens a transport to
+  * the server.
+  */
+ public class TUGIAssumingTransport extends TFilterTransport {
+   protected UserGroupInformation ugi;
+
+   public TUGIAssumingTransport(TTransport wrapped, UserGroupInformation ugi) {
+     super(wrapped);
+     this.ugi = ugi;
+   }
+
+   @Override
+   public void open() throws TTransportException {
+     try {
+       ugi.doAs(new PrivilegedExceptionAction<Void>() {
+         public Void run() {
+           try {
+             wrapped.open();
+           } catch (TTransportException tte) {
+             // Wrap the transport exception in an RTE, since UGI.doAs() then goes
+             // and unwraps this for us out of the doAs block. We then unwrap one
+             // more time in our catch clause to get back the TTE. (ugh)
+             throw new RuntimeException(tte);
+           }
+           return null;
+         }
+       });
+     } catch (IOException ioe) {
+       throw new RuntimeException("Received an ioe we never threw!", ioe);
+     } catch (InterruptedException ie) {
+       throw new RuntimeException("Received an ie we never threw!", ie);
+     } catch (RuntimeException rte) {
+       if (rte.getCause() instanceof TTransportException) {
+         throw (TTransportException)rte.getCause();
+       } else {
+         throw rte;
+       }
+     }
+   }
+ }
\ No newline at end of file

Modified: hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1225683&r1=1225682&r2=1225683&view=diff
==============================================================================
--- hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java Thu Dec 29 22:59:02 2011
@@ -20,6 +20,8 @@ package org.apache.hadoop.hive.shims;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
+import java.util.List;
 
 import javax.security.auth.login.LoginException;
 
@@ -164,6 +166,24 @@ public interface HadoopShims {
   public UserGroupInformation getUGIForConf(Configuration conf) throws LoginException, IOException;
 
   /**
+   * Used by metastore server to perform requested rpc in client context.
+   * @param ugi
+   * @param pvea
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public void doAs(UserGroupInformation ugi, PrivilegedExceptionAction<Void> pvea) throws
+    IOException, InterruptedException;
+
+  /**
+   * Used by metastore server to creates UGI object for a remote user.
+   * @param userName remote User Name
+   * @param groupNames group names associated with remote user name
+   * @return UGI created for the remote user.
+   */
+
+  public UserGroupInformation createRemoteUser(String userName, List<String> groupNames);
+  /**
    * Get the short name corresponding to the subject in the passed UGI
    *
    * In secure versions of Hadoop, this returns the short name (after

Added: hive/trunk/shims/src/common/java/org/apache/hadoop/hive/thrift/TFilterTransport.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/thrift/TFilterTransport.java?rev=1225683&view=auto
==============================================================================
--- hive/trunk/shims/src/common/java/org/apache/hadoop/hive/thrift/TFilterTransport.java (added)
+++ hive/trunk/shims/src/common/java/org/apache/hadoop/hive/thrift/TFilterTransport.java Thu Dec 29 22:59:02 2011
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.thrift;
+
+import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportException;
+
+/**
+  * Transport that simply wraps another transport.
+  * This is the equivalent of FilterInputStream for Thrift transports.
+  */
+ public class TFilterTransport extends TTransport {
+   protected final TTransport wrapped;
+
+   public TFilterTransport(TTransport wrapped) {
+     this.wrapped = wrapped;
+   }
+
+   @Override
+   public void open() throws TTransportException {
+     wrapped.open();
+   }
+
+   @Override
+   public boolean isOpen() {
+     return wrapped.isOpen();
+   }
+
+   @Override
+   public boolean peek() {
+     return wrapped.peek();
+   }
+
+   @Override
+   public void close() {
+     wrapped.close();
+   }
+
+   @Override
+   public int read(byte[] buf, int off, int len) throws TTransportException {
+     return wrapped.read(buf, off, len);
+   }
+
+   @Override
+   public int readAll(byte[] buf, int off, int len) throws TTransportException {
+     return wrapped.readAll(buf, off, len);
+   }
+
+   @Override
+   public void write(byte[] buf) throws TTransportException {
+     wrapped.write(buf);
+   }
+
+   @Override
+   public void write(byte[] buf, int off, int len) throws TTransportException {
+     wrapped.write(buf, off, len);
+   }
+
+   @Override
+   public void flush() throws TTransportException {
+     wrapped.flush();
+   }
+
+   @Override
+   public byte[] getBuffer() {
+     return wrapped.getBuffer();
+   }
+
+   @Override
+   public int getBufferPosition() {
+     return wrapped.getBufferPosition();
+   }
+
+   @Override
+   public int getBytesRemainingInBuffer() {
+     return wrapped.getBytesRemainingInBuffer();
+   }
+
+   @Override
+   public void consumeBuffer(int len) {
+     wrapped.consumeBuffer(len);
+   }
+ }
\ No newline at end of file

Added: hive/trunk/shims/src/common/java/org/apache/hadoop/hive/thrift/TUGIContainingTransport.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/thrift/TUGIContainingTransport.java?rev=1225683&view=auto
==============================================================================
--- hive/trunk/shims/src/common/java/org/apache/hadoop/hive/thrift/TUGIContainingTransport.java (added)
+++ hive/trunk/shims/src/common/java/org/apache/hadoop/hive/thrift/TUGIContainingTransport.java Thu Dec 29 22:59:02 2011
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.thrift;
+
+import java.util.concurrent.ConcurrentMap;
+
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportFactory;
+
+import com.google.common.collect.MapMaker;
+
+/** TUGIContainingTransport associates ugi information with connection (transport).
+ *  Wraps underlying <code>TSocket</code> transport and annotates it with ugi.
+*/
+
+public class TUGIContainingTransport extends TFilterTransport {
+
+  private UserGroupInformation ugi;
+
+  public TUGIContainingTransport(TTransport wrapped, UserGroupInformation ugi) {
+    super(wrapped);
+  }
+
+  public UserGroupInformation getClientUGI(){
+    return ugi;
+  }
+
+  public void setClientUGI(UserGroupInformation ugi){
+    this.ugi = ugi;
+  }
+
+  /** Factory to create TUGIContainingTransport.
+   */
+
+  public static class Factory extends TTransportFactory {
+
+    // Need a concurrent weak hashmap.
+    private static final ConcurrentMap<TTransport, TUGIContainingTransport> transMap =
+        new MapMaker().weakKeys().makeMap();
+
+    /**
+     * Get a new <code>TUGIContainingTransport</code> instance, or reuse the
+     * existing one if a <code>TUGIContainingTransport</code> has already been
+     * created before using the given <code>TTransport</code> as an underlying
+     * transport. This ensures that a given underlying transport instance
+     * receives the same <code>TUGIContainingTransport</code>.
+     */
+    @Override
+    public TUGIContainingTransport getTransport(TTransport trans) {
+
+      // UGI information is not available at connection setup time, it will be set later
+      // via set_ugi() rpc.
+      transMap.putIfAbsent(trans, new TUGIContainingTransport(trans,null));
+      return transMap.get(trans);
+    }
+  }
+}
\ No newline at end of file