You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by vg...@apache.org on 2014/10/20 08:52:26 UTC

svn commit: r1633058 - in /hive/trunk: jdbc/src/java/org/apache/hive/jdbc/ ql/src/java/org/apache/hadoop/hive/ql/util/ service/src/java/org/apache/hive/service/server/ shims/0.20/src/main/java/org/apache/hadoop/hive/shims/ shims/common-secure/src/main/...

Author: vgumashta
Date: Mon Oct 20 06:52:26 2014
New Revision: 1633058

URL: http://svn.apache.org/r1633058
Log:
HIVE-8173: HiveServer2 dynamic service discovery: figure out best ZooKeeper ACLs for security (Vaibhav Gumashta reviewed by Thejas Nair)

Modified:
    hive/trunk/jdbc/src/java/org/apache/hive/jdbc/Utils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java
    hive/trunk/service/src/java/org/apache/hive/service/server/HiveServer2.java
    hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
    hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java

Modified: hive/trunk/jdbc/src/java/org/apache/hive/jdbc/Utils.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hive/jdbc/Utils.java?rev=1633058&r1=1633057&r2=1633058&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hive/jdbc/Utils.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hive/jdbc/Utils.java Mon Oct 20 06:52:26 2014
@@ -295,10 +295,6 @@ public class Utils {
     // key=value pattern
     Pattern pattern = Pattern.compile("([^;]*)=([^;]*)[;]?");
 
-    Map<String, String> sessionVarMap = connParams.getSessionVars();
-    Map<String, String> hiveConfMap = connParams.getHiveConfs();
-    Map<String, String> hiveVarMap = connParams.getHiveVars();
-
     // dbname and session settings
     String sessVars = jdbcURI.getPath();
     if ((sessVars != null) && !sessVars.isEmpty()) {
@@ -315,7 +311,7 @@ public class Utils {
         if (sessVars != null) {
           Matcher sessMatcher = pattern.matcher(sessVars);
           while (sessMatcher.find()) {
-            if (sessionVarMap.put(sessMatcher.group(1), sessMatcher.group(2)) != null) {
+            if (connParams.getSessionVars().put(sessMatcher.group(1), sessMatcher.group(2)) != null) {
               throw new JdbcUriParseException("Bad URL format: Multiple values for property "
                   + sessMatcher.group(1));
             }
@@ -332,7 +328,7 @@ public class Utils {
     if (confStr != null) {
       Matcher confMatcher = pattern.matcher(confStr);
       while (confMatcher.find()) {
-        hiveConfMap.put(confMatcher.group(1), confMatcher.group(2));
+        connParams.getHiveConfs().put(confMatcher.group(1), confMatcher.group(2));
       }
     }
 
@@ -341,7 +337,7 @@ public class Utils {
     if (varStr != null) {
       Matcher varMatcher = pattern.matcher(varStr);
       while (varMatcher.find()) {
-        hiveVarMap.put(varMatcher.group(1), varMatcher.group(2));
+        connParams.getHiveVars().put(varMatcher.group(1), varMatcher.group(2));
       }
     }
 
@@ -350,19 +346,19 @@ public class Utils {
     String usageUrlBase = "jdbc:hive2://<host>:<port>/dbName;";
     // Handle deprecation of AUTH_QOP_DEPRECATED
     newUsage = usageUrlBase + JdbcConnectionParams.AUTH_QOP + "=<qop_value>";
-    handleParamDeprecation(sessionVarMap, sessionVarMap, JdbcConnectionParams.AUTH_QOP_DEPRECATED,
-        JdbcConnectionParams.AUTH_QOP, newUsage);
+    handleParamDeprecation(connParams.getSessionVars(), connParams.getSessionVars(),
+        JdbcConnectionParams.AUTH_QOP_DEPRECATED, JdbcConnectionParams.AUTH_QOP, newUsage);
 
     // Handle deprecation of TRANSPORT_MODE_DEPRECATED
     newUsage = usageUrlBase + JdbcConnectionParams.TRANSPORT_MODE + "=<transport_mode_value>";
-    handleParamDeprecation(hiveConfMap, sessionVarMap,
+    handleParamDeprecation(connParams.getHiveConfs(), connParams.getSessionVars(),
         JdbcConnectionParams.TRANSPORT_MODE_DEPRECATED, JdbcConnectionParams.TRANSPORT_MODE,
         newUsage);
 
     // Handle deprecation of HTTP_PATH_DEPRECATED
     newUsage = usageUrlBase + JdbcConnectionParams.HTTP_PATH + "=<http_path_value>";
-    handleParamDeprecation(hiveConfMap, sessionVarMap, JdbcConnectionParams.HTTP_PATH_DEPRECATED,
-        JdbcConnectionParams.HTTP_PATH, newUsage);
+    handleParamDeprecation(connParams.getHiveConfs(), connParams.getSessionVars(),
+        JdbcConnectionParams.HTTP_PATH_DEPRECATED, JdbcConnectionParams.HTTP_PATH, newUsage);
 
     // Extract host, port
     if (connParams.isEmbeddedMode()) {
@@ -374,6 +370,7 @@ public class Utils {
       // Else substitute the dummy authority with a resolved one.
       // In case of dynamic service discovery using ZooKeeper, it picks a server uri from ZooKeeper
       String resolvedAuthorityString = resolveAuthority(connParams);
+      LOG.info("Resolved authority: " + resolvedAuthorityString);
       uri = uri.replace(dummyAuthorityString, resolvedAuthorityString);
       connParams.setJdbcUriString(uri);
       // Create a Java URI from the resolved URI for extracting the host/port

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java?rev=1633058&r1=1633057&r2=1633058&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java Mon Oct 20 06:52:26 2014
@@ -18,12 +18,18 @@
 
 package org.apache.hadoop.hive.ql.util;
 
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
+
+import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.security.authentication.util.KerberosUtil;
 import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.Watcher;
@@ -33,6 +39,7 @@ import org.apache.zookeeper.data.ACL;
 public class ZooKeeperHiveHelper {
   public static final Log LOG = LogFactory.getLog(ZooKeeperHiveHelper.class.getName());
   public static final String ZOOKEEPER_PATH_SEPARATOR = "/";
+  public static final String SASL_LOGIN_CONTEXT_NAME = "HiveZooKeeperClient";
   /**
    * Get the ensemble server addresses from the configuration. The format is: host1:port,
    * host2:port..
@@ -94,4 +101,55 @@ public class ZooKeeperHiveHelper {
     }
   }
 
+  /**
+   * Dynamically sets up the JAAS configuration
+   * @param principal
+   * @param keyTabFile
+   */
+  public static void setUpJaasConfiguration(String principal, String keyTabFile) {
+    JaasConfiguration jaasConf =
+        new JaasConfiguration(ZooKeeperHiveHelper.SASL_LOGIN_CONTEXT_NAME, principal, keyTabFile);
+    // Install the Configuration in the runtime.
+    javax.security.auth.login.Configuration.setConfiguration(jaasConf);
+  }
+
+  /**
+   * A JAAS configuration for ZooKeeper clients intended to use for SASL Kerberos.
+   */
+  private static class JaasConfiguration extends javax.security.auth.login.Configuration {
+    // Current installed Configuration
+    private javax.security.auth.login.Configuration baseConfig =
+        javax.security.auth.login.Configuration.getConfiguration();
+    private final String loginContextName;
+    private final String principal;
+    private final String keyTabFile;
+
+    public JaasConfiguration(String hiveLoginContextName, String principal, String keyTabFile) {
+      this.loginContextName = hiveLoginContextName;
+      this.principal = principal;
+      this.keyTabFile = keyTabFile;
+    }
+
+    @Override
+    public AppConfigurationEntry[] getAppConfigurationEntry(String appName) {
+      if (loginContextName.equals(appName)) {
+        Map<String, String> krbOptions = new HashMap<String, String>();
+        krbOptions.put("doNotPrompt", "true");
+        krbOptions.put("storeKey", "true");
+        krbOptions.put("useKeyTab", "true");
+        krbOptions.put("principal", principal);
+        krbOptions.put("keyTab", keyTabFile);
+        krbOptions.put("refreshKrb5Config", "true");
+        AppConfigurationEntry hiveZooKeeperClientEntry =
+            new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
+                LoginModuleControlFlag.REQUIRED, krbOptions);
+        return new AppConfigurationEntry[] { hiveZooKeeperClientEntry };
+      }
+      // Try the base config
+      if (baseConfig != null) {
+        return baseConfig.getAppConfigurationEntry(appName);
+      }
+      return null;
+    }
+  }
 }

Modified: hive/trunk/service/src/java/org/apache/hive/service/server/HiveServer2.java
URL: http://svn.apache.org/viewvc/hive/trunk/service/src/java/org/apache/hive/service/server/HiveServer2.java?rev=1633058&r1=1633057&r2=1633058&view=diff
==============================================================================
--- hive/trunk/service/src/java/org/apache/hive/service/server/HiveServer2.java (original)
+++ hive/trunk/service/src/java/org/apache/hive/service/server/HiveServer2.java Mon Oct 20 06:52:26 2014
@@ -18,7 +18,9 @@
 
 package org.apache.hive.service.server;
 
+import java.io.IOException;
 import java.nio.charset.Charset;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Properties;
 
@@ -36,6 +38,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager;
 import org.apache.hadoop.hive.ql.util.ZooKeeperHiveHelper;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hive.common.util.HiveStringUtils;
 import org.apache.hive.common.util.HiveVersionInfo;
 import org.apache.hive.service.CompositeService;
@@ -48,7 +51,10 @@ import org.apache.zookeeper.KeeperExcept
 import org.apache.zookeeper.WatchedEvent;
 import org.apache.zookeeper.Watcher;
 import org.apache.zookeeper.ZooDefs.Ids;
+import org.apache.zookeeper.ZooDefs.Perms;
 import org.apache.zookeeper.ZooKeeper;
+import org.apache.zookeeper.client.ZooKeeperSaslClient;
+import org.apache.zookeeper.data.ACL;
 
 /**
  * HiveServer2.
@@ -115,14 +121,19 @@ public class HiveServer2 extends Composi
     String rootNamespace = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_ZOOKEEPER_NAMESPACE);
     String instanceURI = getServerInstanceURI(hiveConf);
     byte[] znodeDataUTF8 = instanceURI.getBytes(Charset.forName("UTF-8"));
+    // Znode ACLs
+    List<ACL> nodeAcls = new ArrayList<ACL>();
+    setUpAuthAndAcls(hiveConf, nodeAcls);
+    // Create a ZooKeeper client
     zooKeeperClient =
         new ZooKeeper(zooKeeperEnsemble, zooKeeperSessionTimeout,
             new ZooKeeperHiveHelper.DummyWatcher());
-
-    // Create the parent znodes recursively; ignore if the parent already exists
+    // Create the parent znodes recursively; ignore if the parent already exists.
+    // If pre-creating the parent on a kerberized cluster, ensure that you give ACLs,
+    // as explained in {@link #setUpAuthAndAcls(HiveConf, List<ACL>) setUpAuthAndAcls}
     try {
-      ZooKeeperHiveHelper.createPathRecursively(zooKeeperClient, rootNamespace,
-          Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
+      ZooKeeperHiveHelper.createPathRecursively(zooKeeperClient, rootNamespace, nodeAcls,
+          CreateMode.PERSISTENT);
       LOG.info("Created the root name space: " + rootNamespace + " on ZooKeeper for HiveServer2");
     } catch (KeeperException e) {
       if (e.code() != KeeperException.Code.NODEEXISTS) {
@@ -133,12 +144,12 @@ public class HiveServer2 extends Composi
     // Create a znode under the rootNamespace parent for this instance of the server
     // Znode name: serverUri=host:port;version=versionInfo;sequence=sequenceNumber
     try {
-      String znodePath =
+      String pathPrefix =
           ZooKeeperHiveHelper.ZOOKEEPER_PATH_SEPARATOR + rootNamespace
               + ZooKeeperHiveHelper.ZOOKEEPER_PATH_SEPARATOR + "serverUri=" + instanceURI + ";"
               + "version=" + HiveVersionInfo.getVersion() + ";" + "sequence=";
       znodePath =
-          zooKeeperClient.create(znodePath, znodeDataUTF8, Ids.OPEN_ACL_UNSAFE,
+          zooKeeperClient.create(pathPrefix, znodeDataUTF8, nodeAcls,
               CreateMode.EPHEMERAL_SEQUENTIAL);
       setRegisteredWithZooKeeper(true);
       // Set a watch on the znode
@@ -154,6 +165,48 @@ public class HiveServer2 extends Composi
   }
 
   /**
+   * Set up ACLs for znodes based on whether the cluster is secure or not.
+   * On a kerberized cluster, ZooKeeper performs Kerberos-SASL authentication.
+   * We give Read privilege to the world, but Create/Delete/Write/Admin to the authenticated user.
+   * On a non-kerberized cluster, we give Create/Read/Delete/Write/Admin privileges to the world.
+   *
+   * For a kerberized cluster, we also dynamically set up the client's JAAS conf.
+   * @param hiveConf
+   * @param nodeAcls
+   * @return
+   * @throws Exception
+   */
+  private void setUpAuthAndAcls(HiveConf hiveConf, List<ACL> nodeAcls) throws Exception {
+    if (ShimLoader.getHadoopShims().isSecurityEnabled()) {
+      String principal =
+          ShimLoader.getHadoopShims().getResolvedPrincipal(
+              hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL));
+      String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
+      if (principal.isEmpty()) {
+        throw new IOException(
+            "HiveServer2 Kerberos principal is empty");
+      }
+      if (keyTabFile.isEmpty()) {
+        throw new IOException(
+            "HiveServer2 Kerberos keytab is empty");
+      }
+      // ZooKeeper property name to pick the correct JAAS conf section
+      System.setProperty(ZooKeeperSaslClient.LOGIN_CONTEXT_NAME_KEY,
+          ZooKeeperHiveHelper.SASL_LOGIN_CONTEXT_NAME);
+      // Install the JAAS Configuration for the runtime
+      ZooKeeperHiveHelper.setUpJaasConfiguration(principal, keyTabFile);
+      // Read all to the world
+      nodeAcls.addAll(Ids.READ_ACL_UNSAFE);
+      // Create/Delete/Write/Admin to the authenticated user
+      nodeAcls.add(new ACL(Perms.ALL, Ids.AUTH_IDS));
+    } else {
+      // ACLs for znodes on a non-kerberized cluster
+      // Create/Read/Delete/Write/Admin to the world
+      nodeAcls.addAll(Ids.OPEN_ACL_UNSAFE);
+    }
+  }
+
+  /**
    * The watcher class which sets the de-register flag when the znode corresponding to this server
    * instance is deleted. Additionally, it shuts down the server if there are no more active client
    * sessions at the time of receiving a 'NodeDeleted' notification from ZooKeeper.

Modified: hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1633058&r1=1633057&r2=1633058&view=diff
==============================================================================
--- hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Mon Oct 20 06:52:26 2014
@@ -620,6 +620,12 @@ public class Hadoop20Shims implements Ha
   }
 
   @Override
+  public String getResolvedPrincipal(String principal) throws IOException {
+    // Not supported
+    return null;
+  }
+
+  @Override
   public void reLoginUserFromKeytab() throws IOException{
     throwKerberosUnsupportedError();
   }

Modified: hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java?rev=1633058&r1=1633057&r2=1633058&view=diff
==============================================================================
--- hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java (original)
+++ hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java Mon Oct 20 06:52:26 2014
@@ -573,6 +573,17 @@ public abstract class HadoopShimsSecure 
     return UserGroupInformation.loginUserFromKeytabAndReturnUGI(hostPrincipal, keytabFile);
   }
 
+  /**
+   * Convert Kerberos principal name pattern to valid Kerberos principal names.
+   * @param principal (principal name pattern)
+   * @return
+   * @throws IOException
+   */
+  @Override
+  public String getResolvedPrincipal(String principal) throws IOException {
+    return SecurityUtil.getServerPrincipal(principal, "0.0.0.0");
+  }
+
   @Override
   public String getTokenFileLocEnvName() {
     return UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION;

Modified: hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1633058&r1=1633057&r2=1633058&view=diff
==============================================================================
--- hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java Mon Oct 20 06:52:26 2014
@@ -61,6 +61,7 @@ import org.apache.hadoop.mapreduce.Outpu
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Progressable;
 
@@ -322,6 +323,14 @@ public interface HadoopShims {
       String keytabFile) throws IOException;
 
   /**
+   * Convert Kerberos principal name pattern to valid Kerberos principal names.
+   * @param principal (principal name pattern)
+   * @return
+   * @throws IOException
+   */
+  public String getResolvedPrincipal(String principal) throws IOException;
+
+  /**
    * Perform kerberos re-login using the given principal and keytab, to renew
    * the credentials
    * @throws IOException