You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by vi...@apache.org on 2017/10/17 02:43:03 UTC
[1/3] hive git commit: HIVE-17371 : Move tokenstores to metastore
module (Vihang Karajgaonkar, reviewed by Alan Gates, Thejas M Nair)
Repository: hive
Updated Branches:
refs/heads/master 9975131cc -> 8fea11769
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenStore.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenStore.java
index 0cafeff..ed14998 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenStore.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenStore.java
@@ -21,7 +21,6 @@ import java.io.Closeable;
import java.util.List;
import org.apache.hadoop.conf.Configurable;
-import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge.Server.ServerMode;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation;
/**
@@ -30,7 +29,6 @@ import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecret
* Internal, store specific errors are translated into {@link TokenStoreException}.
*/
public interface DelegationTokenStore extends Configurable, Closeable {
-
/**
* Exception for internal token store errors that typically cannot be handled by the caller.
*/
@@ -111,8 +109,8 @@ public interface DelegationTokenStore extends Configurable, Closeable {
/**
* @param hmsHandler ObjectStore used by DBTokenStore
- * @param smode Indicate whether this is a metastore or hiveserver2 token store
+ * @param serverMode indicate if this tokenstore is for Metastore and HiveServer2
*/
- void init(Object hmsHandler, ServerMode smode);
+ void init(Object hmsHandler, HadoopThriftAuthBridge.Server.ServerMode serverMode);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/MemoryTokenStore.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/MemoryTokenStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/MemoryTokenStore.java
index c484cd3..c29dc79 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/MemoryTokenStore.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/MemoryTokenStore.java
@@ -26,7 +26,6 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge.Server.ServerMode;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -130,7 +129,7 @@ public class MemoryTokenStore implements DelegationTokenStore {
}
@Override
- public void init(Object hmsHandler, ServerMode smode) throws TokenStoreException {
+ public void init(Object hmsHandler, HadoopThriftAuthBridge.Server.ServerMode smode) throws TokenStoreException {
// no-op
}
}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/MetastoreDelegationTokenManager.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/MetastoreDelegationTokenManager.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/MetastoreDelegationTokenManager.java
index 2b0110f..3cfdd8a 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/MetastoreDelegationTokenManager.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/MetastoreDelegationTokenManager.java
@@ -26,6 +26,7 @@ import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.utils.SecurityUtils;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.UserGroupInformation;
@@ -33,8 +34,20 @@ import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.util.ReflectionUtils;
public class MetastoreDelegationTokenManager {
-
+ public static final String DELEGATION_TOKEN_STORE_ZK_CONNECT_STR =
+ "hive.cluster.delegation.token.store.zookeeper.connectString";
protected DelegationTokenSecretManager secretManager;
+ // Alternate connect string specification configuration
+ public static final String DELEGATION_TOKEN_STORE_ZK_CONNECT_STR_ALTERNATE =
+ "hive.zookeeper.quorum";
+
+ public static final String DELEGATION_TOKEN_STORE_ZK_CONNECT_TIMEOUTMILLIS =
+ "hive.cluster.delegation.token.store.zookeeper.connectTimeoutMillis";
+ public static final String DELEGATION_TOKEN_STORE_ZK_ZNODE =
+ "hive.cluster.delegation.token.store.zookeeper.znode";
+ public static final String DELEGATION_TOKEN_STORE_ZK_ACL =
+ "hive.cluster.delegation.token.store.zookeeper.acl";
+ public static final String DELEGATION_TOKEN_STORE_ZK_ZNODE_DEFAULT = "/hivedelegation";
public MetastoreDelegationTokenManager() {
}
@@ -43,6 +56,10 @@ public class MetastoreDelegationTokenManager {
return secretManager;
}
+ public void startDelegationTokenSecretManager(Configuration conf, Object hms) throws IOException {
+ startDelegationTokenSecretManager(conf, hms, HadoopThriftAuthBridge.Server.ServerMode.METASTORE);
+ }
+
public void startDelegationTokenSecretManager(Configuration conf, Object hms, HadoopThriftAuthBridge.Server.ServerMode smode)
throws IOException {
long secretKeyInterval = MetastoreConf.getTimeVar(conf,
@@ -121,17 +138,7 @@ public class MetastoreDelegationTokenManager {
}
private DelegationTokenStore getTokenStore(Configuration conf) throws IOException {
- String tokenStoreClassName =
- MetastoreConf.getVar(conf, MetastoreConf.ConfVars.DELEGATION_TOKEN_STORE_CLS, "");
- // The second half of this if is to catch cases where users are passing in a HiveConf for
- // configuration. It will have set the default value of
- // "hive.cluster.delegation.token.store .class" to
- // "org.apache.hadoop.hive.thrift.MemoryTokenStore" as part of its construction. But this is
- // the hive-shims version of the memory store. We want to convert this to our default value.
- if (StringUtils.isBlank(tokenStoreClassName) ||
- "org.apache.hadoop.hive.thrift.MemoryTokenStore".equals(tokenStoreClassName)) {
- return new MemoryTokenStore();
- }
+ String tokenStoreClassName = SecurityUtils.getTokenStoreClassName(conf);
try {
Class<? extends DelegationTokenStore> storeClass =
Class.forName(tokenStoreClassName).asSubclass(DelegationTokenStore.class);
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/TokenStoreDelegationTokenSecretManager.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/TokenStoreDelegationTokenSecretManager.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/TokenStoreDelegationTokenSecretManager.java
index 4abcec7..3f5bd53 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/TokenStoreDelegationTokenSecretManager.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/TokenStoreDelegationTokenSecretManager.java
@@ -33,6 +33,7 @@ import java.util.Map;
import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
import org.apache.hadoop.security.token.delegation.DelegationKey;
import org.apache.hadoop.security.token.delegation.MetastoreDelegationTokenSupport;
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/ZooKeeperTokenStore.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/ZooKeeperTokenStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/ZooKeeperTokenStore.java
new file mode 100644
index 0000000..42f2f62
--- /dev/null
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/ZooKeeperTokenStore.java
@@ -0,0 +1,474 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore.security;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.framework.api.ACLProvider;
+import org.apache.curator.framework.imps.CuratorFrameworkState;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.utils.SecurityUtils;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation;
+import org.apache.hadoop.security.token.delegation.MetastoreDelegationTokenSupport;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.KeeperException;
+import org.apache.zookeeper.ZooDefs;
+import org.apache.zookeeper.ZooDefs.Ids;
+import org.apache.zookeeper.ZooDefs.Perms;
+import org.apache.zookeeper.data.ACL;
+import org.apache.zookeeper.data.Id;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * ZooKeeper token store implementation.
+ */
+public class ZooKeeperTokenStore implements DelegationTokenStore {
+
+ private static final Logger LOGGER =
+ LoggerFactory.getLogger(ZooKeeperTokenStore.class.getName());
+
+ protected static final String ZK_SEQ_FORMAT = "%010d";
+ private static final String NODE_KEYS = "/keys";
+ private static final String NODE_TOKENS = "/tokens";
+
+ private String rootNode = "";
+ private volatile CuratorFramework zkSession;
+ private String zkConnectString;
+ private int connectTimeoutMillis;
+ private List<ACL> newNodeAcl = Arrays.asList(new ACL(Perms.ALL, Ids.AUTH_IDS));
+
+ /**
+ * ACLProvider permissions will be used in case parent dirs need to be created
+ */
+ private final ACLProvider aclDefaultProvider = new ACLProvider() {
+
+ @Override
+ public List<ACL> getDefaultAcl() {
+ return newNodeAcl;
+ }
+
+ @Override
+ public List<ACL> getAclForPath(String path) {
+ return getDefaultAcl();
+ }
+ };
+
+
+ private final String WHEN_ZK_DSTORE_MSG = "when zookeeper based delegation token storage is enabled"
+ + "(hive.cluster.delegation.token.store.class=" + ZooKeeperTokenStore.class.getName() + ")";
+
+ private Configuration conf;
+
+ private HadoopThriftAuthBridge.Server.ServerMode serverMode;
+
+ /**
+ * Default constructor for dynamic instantiation w/ Configurable
+ * (ReflectionUtils does not support Configuration constructor injection).
+ */
+ protected ZooKeeperTokenStore() {
+ }
+
+ private CuratorFramework getSession() {
+ if (zkSession == null || zkSession.getState() == CuratorFrameworkState.STOPPED) {
+ synchronized (this) {
+ if (zkSession == null || zkSession.getState() == CuratorFrameworkState.STOPPED) {
+ zkSession =
+ CuratorFrameworkFactory.builder().connectString(zkConnectString)
+ .connectionTimeoutMs(connectTimeoutMillis).aclProvider(aclDefaultProvider)
+ .retryPolicy(new ExponentialBackoffRetry(1000, 3)).build();
+ zkSession.start();
+ }
+ }
+ }
+ return zkSession;
+ }
+
+ private void setupJAASConfig(Configuration conf) throws IOException {
+ if (!UserGroupInformation.getLoginUser().isFromKeytab()) {
+ // The process has not logged in using keytab
+ // this should be a test mode, can't use keytab to authenticate
+ // with zookeeper.
+ LOGGER.warn("Login is not from keytab");
+ return;
+ }
+
+ String principal;
+ String keytab;
+ switch (serverMode) {
+ case METASTORE:
+ principal = getNonEmptyConfVar(conf, "hive.metastore.kerberos.principal");
+ keytab = getNonEmptyConfVar(conf, "hive.metastore.kerberos.keytab.file");
+ break;
+ case HIVESERVER2:
+ principal = getNonEmptyConfVar(conf, "hive.server2.authentication.kerberos.principal");
+ keytab = getNonEmptyConfVar(conf, "hive.server2.authentication.kerberos.keytab");
+ break;
+ default:
+ throw new AssertionError("Unexpected server mode " + serverMode);
+ }
+ SecurityUtils.setZookeeperClientKerberosJaasConfig(principal, keytab);
+ }
+
+ private String getNonEmptyConfVar(Configuration conf, String param) throws IOException {
+ String val = conf.get(param);
+ if (val == null || val.trim().isEmpty()) {
+ throw new IOException("Configuration parameter " + param + " should be set, "
+ + WHEN_ZK_DSTORE_MSG);
+ }
+ return val;
+ }
+
+ /**
+ * Create a path if it does not already exist ("mkdir -p")
+ * @param path string with '/' separator
+ * @param acl list of ACL entries
+ * @throws TokenStoreException
+ */
+ public void ensurePath(String path, List<ACL> acl)
+ throws TokenStoreException {
+ try {
+ CuratorFramework zk = getSession();
+ String node = zk.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT)
+ .withACL(acl).forPath(path);
+ LOGGER.info("Created path: {} ", node);
+ } catch (KeeperException.NodeExistsException e) {
+ // node already exists
+ } catch (Exception e) {
+ throw new TokenStoreException("Error creating path " + path, e);
+ }
+ }
+
+ /**
+ * Parse ACL permission string, from ZooKeeperMain private method
+ * @param permString
+ * @return
+ */
+ public static int getPermFromString(String permString) {
+ int perm = 0;
+ for (int i = 0; i < permString.length(); i++) {
+ switch (permString.charAt(i)) {
+ case 'r':
+ perm |= ZooDefs.Perms.READ;
+ break;
+ case 'w':
+ perm |= ZooDefs.Perms.WRITE;
+ break;
+ case 'c':
+ perm |= ZooDefs.Perms.CREATE;
+ break;
+ case 'd':
+ perm |= ZooDefs.Perms.DELETE;
+ break;
+ case 'a':
+ perm |= ZooDefs.Perms.ADMIN;
+ break;
+ default:
+ LOGGER.error("Unknown perm type: " + permString.charAt(i));
+ }
+ }
+ return perm;
+ }
+
+ /**
+ * Parse comma separated list of ACL entries to secure generated nodes, e.g.
+ * <code>sasl:hive/host1@MY.DOMAIN:cdrwa,sasl:hive/host2@MY.DOMAIN:cdrwa</code>
+ * @param aclString
+ * @return ACL list
+ */
+ public static List<ACL> parseACLs(String aclString) {
+ String[] aclComps = StringUtils.splitByWholeSeparator(aclString, ",");
+ List<ACL> acl = new ArrayList<ACL>(aclComps.length);
+ for (String a : aclComps) {
+ if (StringUtils.isBlank(a)) {
+ continue;
+ }
+ a = a.trim();
+ // from ZooKeeperMain private method
+ int firstColon = a.indexOf(':');
+ int lastColon = a.lastIndexOf(':');
+ if (firstColon == -1 || lastColon == -1 || firstColon == lastColon) {
+ LOGGER.error(a + " does not have the form scheme:id:perm");
+ continue;
+ }
+ ACL newAcl = new ACL();
+ newAcl.setId(new Id(a.substring(0, firstColon), a.substring(
+ firstColon + 1, lastColon)));
+ newAcl.setPerms(getPermFromString(a.substring(lastColon + 1)));
+ acl.add(newAcl);
+ }
+ return acl;
+ }
+
+ private void initClientAndPaths() {
+ if (this.zkSession != null) {
+ this.zkSession.close();
+ }
+ try {
+ ensurePath(rootNode + NODE_KEYS, newNodeAcl);
+ ensurePath(rootNode + NODE_TOKENS, newNodeAcl);
+ } catch (TokenStoreException e) {
+ throw e;
+ }
+ }
+
+ @Override
+ public void setConf(Configuration conf) {
+ if (conf == null) {
+ throw new IllegalArgumentException("conf is null");
+ }
+ this.conf = conf;
+ }
+
+ @Override
+ public Configuration getConf() {
+ return null; // not required
+ }
+
+ private Map<Integer, byte[]> getAllKeys() throws KeeperException, InterruptedException {
+
+ String masterKeyNode = rootNode + NODE_KEYS;
+
+ // get children of key node
+ List<String> nodes = zkGetChildren(masterKeyNode);
+
+ // read each child node, add to results
+ Map<Integer, byte[]> result = new HashMap<Integer, byte[]>();
+ for (String node : nodes) {
+ String nodePath = masterKeyNode + "/" + node;
+ byte[] data = zkGetData(nodePath);
+ if (data != null) {
+ result.put(getSeq(node), data);
+ }
+ }
+ return result;
+ }
+
+ private List<String> zkGetChildren(String path) {
+ CuratorFramework zk = getSession();
+ try {
+ return zk.getChildren().forPath(path);
+ } catch (Exception e) {
+ throw new TokenStoreException("Error getting children for " + path, e);
+ }
+ }
+
+ private byte[] zkGetData(String nodePath) {
+ CuratorFramework zk = getSession();
+ try {
+ return zk.getData().forPath(nodePath);
+ } catch (KeeperException.NoNodeException ex) {
+ return null;
+ } catch (Exception e) {
+ throw new TokenStoreException("Error reading " + nodePath, e);
+ }
+ }
+
+ private int getSeq(String path) {
+ String[] pathComps = path.split("/");
+ return Integer.parseInt(pathComps[pathComps.length-1]);
+ }
+
+ @Override
+ public int addMasterKey(String s) {
+ String keysPath = rootNode + NODE_KEYS + "/";
+ CuratorFramework zk = getSession();
+ String newNode;
+ try {
+ newNode = zk.create().withMode(CreateMode.PERSISTENT_SEQUENTIAL).withACL(newNodeAcl)
+ .forPath(keysPath, s.getBytes());
+ } catch (Exception e) {
+ throw new TokenStoreException("Error creating new node with path " + keysPath, e);
+ }
+ LOGGER.info("Added key {}", newNode);
+ return getSeq(newNode);
+ }
+
+ @Override
+ public void updateMasterKey(int keySeq, String s) {
+ CuratorFramework zk = getSession();
+ String keyPath = rootNode + NODE_KEYS + "/" + String.format(ZK_SEQ_FORMAT, keySeq);
+ try {
+ zk.setData().forPath(keyPath, s.getBytes());
+ } catch (Exception e) {
+ throw new TokenStoreException("Error setting data in " + keyPath, e);
+ }
+ }
+
+ @Override
+ public boolean removeMasterKey(int keySeq) {
+ String keyPath = rootNode + NODE_KEYS + "/" + String.format(ZK_SEQ_FORMAT, keySeq);
+ zkDelete(keyPath);
+ return true;
+ }
+
+ private void zkDelete(String path) {
+ CuratorFramework zk = getSession();
+ try {
+ zk.delete().forPath(path);
+ } catch (KeeperException.NoNodeException ex) {
+ // already deleted
+ } catch (Exception e) {
+ throw new TokenStoreException("Error deleting " + path, e);
+ }
+ }
+
+ @Override
+ public String[] getMasterKeys() {
+ try {
+ Map<Integer, byte[]> allKeys = getAllKeys();
+ String[] result = new String[allKeys.size()];
+ int resultIdx = 0;
+ for (byte[] keyBytes : allKeys.values()) {
+ result[resultIdx++] = new String(keyBytes);
+ }
+ return result;
+ } catch (KeeperException ex) {
+ throw new TokenStoreException(ex);
+ } catch (InterruptedException ex) {
+ throw new TokenStoreException(ex);
+ }
+ }
+
+
+ private String getTokenPath(DelegationTokenIdentifier tokenIdentifier) {
+ try {
+ return rootNode + NODE_TOKENS + "/"
+ + TokenStoreDelegationTokenSecretManager.encodeWritable(tokenIdentifier);
+ } catch (IOException ex) {
+ throw new TokenStoreException("Failed to encode token identifier", ex);
+ }
+ }
+
+ @Override
+ public boolean addToken(DelegationTokenIdentifier tokenIdentifier,
+ DelegationTokenInformation token) {
+ byte[] tokenBytes = MetastoreDelegationTokenSupport.encodeDelegationTokenInformation(token);
+ String tokenPath = getTokenPath(tokenIdentifier);
+ CuratorFramework zk = getSession();
+ String newNode;
+ try {
+ newNode = zk.create().withMode(CreateMode.PERSISTENT).withACL(newNodeAcl)
+ .forPath(tokenPath, tokenBytes);
+ } catch (Exception e) {
+ throw new TokenStoreException("Error creating new node with path " + tokenPath, e);
+ }
+
+ LOGGER.info("Added token: {}", newNode);
+ return true;
+ }
+
+ @Override
+ public boolean removeToken(DelegationTokenIdentifier tokenIdentifier) {
+ String tokenPath = getTokenPath(tokenIdentifier);
+ zkDelete(tokenPath);
+ return true;
+ }
+
+ @Override
+ public DelegationTokenInformation getToken(DelegationTokenIdentifier tokenIdentifier) {
+ byte[] tokenBytes = zkGetData(getTokenPath(tokenIdentifier));
+ if(tokenBytes == null) {
+ // The token is already removed.
+ return null;
+ }
+ try {
+ return MetastoreDelegationTokenSupport.decodeDelegationTokenInformation(tokenBytes);
+ } catch (Exception ex) {
+ throw new TokenStoreException("Failed to decode token", ex);
+ }
+ }
+
+ @Override
+ public List<DelegationTokenIdentifier> getAllDelegationTokenIdentifiers() {
+ String containerNode = rootNode + NODE_TOKENS;
+ final List<String> nodes = zkGetChildren(containerNode);
+ List<DelegationTokenIdentifier> result = new java.util.ArrayList<DelegationTokenIdentifier>(
+ nodes.size());
+ for (String node : nodes) {
+ DelegationTokenIdentifier id = new DelegationTokenIdentifier();
+ try {
+ TokenStoreDelegationTokenSecretManager.decodeWritable(id, node);
+ result.add(id);
+ } catch (Exception e) {
+ LOGGER.warn("Failed to decode token '{}'", node);
+ }
+ }
+ return result;
+ }
+
+ @Override
+ public void close() throws IOException {
+ if (this.zkSession != null) {
+ this.zkSession.close();
+ }
+ }
+
+ @Override
+ public void init(Object hmsHandler, HadoopThriftAuthBridge.Server.ServerMode sMode) {
+ this.serverMode = sMode;
+ zkConnectString =
+ conf.get(MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_CONNECT_STR, null);
+ if (zkConnectString == null || zkConnectString.trim().isEmpty()) {
+ // try alternate config param
+ zkConnectString =
+ conf.get(
+ MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_CONNECT_STR_ALTERNATE,
+ null);
+ if (zkConnectString == null || zkConnectString.trim().isEmpty()) {
+ throw new IllegalArgumentException("Zookeeper connect string has to be specifed through "
+ + "either " + MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_CONNECT_STR
+ + " or "
+ + MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_CONNECT_STR_ALTERNATE
+ + WHEN_ZK_DSTORE_MSG);
+ }
+ }
+ connectTimeoutMillis =
+ conf.getInt(
+ MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_CONNECT_TIMEOUTMILLIS,
+ CuratorFrameworkFactory.builder().getConnectionTimeoutMs());
+ String aclStr = conf.get(MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ACL, null);
+ if (StringUtils.isNotBlank(aclStr)) {
+ this.newNodeAcl = parseACLs(aclStr);
+ }
+ rootNode =
+ conf.get(MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ZNODE,
+ MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ZNODE_DEFAULT) + serverMode;
+
+ try {
+ // Install the JAAS Configuration for the runtime
+ setupJAASConfig(conf);
+ } catch (IOException e) {
+ throw new TokenStoreException("Error setting up JAAS configuration for zookeeper client "
+ + e.getMessage(), e);
+ }
+ initClientAndPaths();
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/SecurityUtils.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/SecurityUtils.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/SecurityUtils.java
index 9f0ca82..b05c995 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/SecurityUtils.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/SecurityUtils.java
@@ -17,10 +17,29 @@
*/
package org.apache.hadoop.hive.metastore.utils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.security.DBTokenStore;
+import org.apache.hadoop.hive.metastore.security.DelegationTokenIdentifier;
+import org.apache.hadoop.hive.metastore.security.DelegationTokenSelector;
+import org.apache.hadoop.hive.metastore.security.MemoryTokenStore;
+import org.apache.hadoop.hive.metastore.security.ZooKeeperTokenStore;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.util.KerberosUtil;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.security.token.TokenSelector;
+import org.apache.zookeeper.client.ZooKeeperSaslClient;
+import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.LoginException;
+import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
+
import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
public class SecurityUtils {
public static UserGroupInformation getUGI() throws LoginException, IOException {
@@ -36,5 +55,158 @@ public class SecurityUtils {
}
return UserGroupInformation.getCurrentUser();
}
+ /**
+ * Dynamically sets up the JAAS configuration that uses kerberos
+ * @param principal
+ * @param keyTabFile
+ * @throws IOException
+ */
+ public static void setZookeeperClientKerberosJaasConfig(String principal, String keyTabFile) throws IOException {
+ // ZooKeeper property name to pick the correct JAAS conf section
+ final String SASL_LOGIN_CONTEXT_NAME = "HiveZooKeeperClient";
+ System.setProperty(ZooKeeperSaslClient.LOGIN_CONTEXT_NAME_KEY, SASL_LOGIN_CONTEXT_NAME);
+
+ principal = SecurityUtil.getServerPrincipal(principal, "0.0.0.0");
+ JaasConfiguration jaasConf = new JaasConfiguration(SASL_LOGIN_CONTEXT_NAME, principal, keyTabFile);
+
+ // Install the Configuration in the runtime.
+ javax.security.auth.login.Configuration.setConfiguration(jaasConf);
+ }
+
+ /**
+ * A JAAS configuration for ZooKeeper clients intended to use for SASL
+ * Kerberos.
+ */
+ private static class JaasConfiguration extends javax.security.auth.login.Configuration {
+ // Current installed Configuration
+ private static final boolean IBM_JAVA = System.getProperty("java.vendor")
+ .contains("IBM");
+ private final javax.security.auth.login.Configuration baseConfig = javax.security.auth.login.Configuration
+ .getConfiguration();
+ private final String loginContextName;
+ private final String principal;
+ private final String keyTabFile;
+
+ public JaasConfiguration(String hiveLoginContextName, String principal, String keyTabFile) {
+ this.loginContextName = hiveLoginContextName;
+ this.principal = principal;
+ this.keyTabFile = keyTabFile;
+ }
+
+ @Override
+ public AppConfigurationEntry[] getAppConfigurationEntry(String appName) {
+ if (loginContextName.equals(appName)) {
+ Map<String, String> krbOptions = new HashMap<String, String>();
+ if (IBM_JAVA) {
+ krbOptions.put("credsType", "both");
+ krbOptions.put("useKeytab", keyTabFile);
+ } else {
+ krbOptions.put("doNotPrompt", "true");
+ krbOptions.put("storeKey", "true");
+ krbOptions.put("useKeyTab", "true");
+ krbOptions.put("keyTab", keyTabFile);
+ }
+ krbOptions.put("principal", principal);
+ krbOptions.put("refreshKrb5Config", "true");
+ AppConfigurationEntry hiveZooKeeperClientEntry = new AppConfigurationEntry(
+ KerberosUtil.getKrb5LoginModuleName(), LoginModuleControlFlag.REQUIRED, krbOptions);
+ return new AppConfigurationEntry[] { hiveZooKeeperClientEntry };
+ }
+ // Try the base config
+ if (baseConfig != null) {
+ return baseConfig.getAppConfigurationEntry(appName);
+ }
+ return null;
+ }
+ }
+
+ /**
+ * Get the string form of the token given a token signature. The signature is used as the value of
+ * the "service" field in the token for lookup. Ref: AbstractDelegationTokenSelector in Hadoop. If
+ * there exists such a token in the token cache (credential store) of the job, the lookup returns
+ * that. This is relevant only when running against a "secure" hadoop release The method gets hold
+ * of the tokens if they are set up by hadoop - this should happen on the map/reduce tasks if the
+ * client added the tokens into hadoop's credential store in the front end during job submission.
+ * The method will select the hive delegation token among the set of tokens and return the string
+ * form of it
+ *
+ * @param tokenSignature
+ * @return the string form of the token found
+ * @throws IOException
+ */
+ public static String getTokenStrForm(String tokenSignature) throws IOException {
+ UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
+ TokenSelector<? extends TokenIdentifier> tokenSelector = new DelegationTokenSelector();
+ Token<? extends TokenIdentifier> token = tokenSelector.selectToken(
+ tokenSignature == null ? new Text() : new Text(tokenSignature), ugi.getTokens());
+ return token != null ? token.encodeToUrlString() : null;
+ }
+
+ /**
+ * Create a delegation token object for the given token string and service. Add the token to given
+ * UGI
+ *
+ * @param ugi
+ * @param tokenStr
+ * @param tokenService
+ * @throws IOException
+ */
+ public static void setTokenStr(UserGroupInformation ugi, String tokenStr, String tokenService)
+ throws IOException {
+ Token<DelegationTokenIdentifier> delegationToken = createToken(tokenStr, tokenService);
+ ugi.addToken(delegationToken);
+ }
+
+ /**
+ * Create a new token using the given string and service
+ *
+ * @param tokenStr
+ * @param tokenService
+ * @return
+ * @throws IOException
+ */
+ private static Token<DelegationTokenIdentifier> createToken(String tokenStr, String tokenService)
+ throws IOException {
+ Token<DelegationTokenIdentifier> delegationToken = new Token<DelegationTokenIdentifier>();
+ delegationToken.decodeFromUrlString(tokenStr);
+ delegationToken.setService(new Text(tokenService));
+ return delegationToken;
+ }
+
+ private static final String DELEGATION_TOKEN_STORE_CLS = "hive.cluster.delegation.token.store.class";
+
+ /**
+ * This method should be used to return the metastore specific tokenstore class name to main
+ * backwards compatibility
+ *
+ * @param conf - HiveConf object
+ * @return the tokenStoreClass name from the HiveConf. It maps the hive specific tokenstoreclass
+ * name to metastore module specific class name. For eg:
+ * hive.cluster.delegation.token.store.class is set to
+ * org.apache.hadoop.hive.thrift.MemoryTokenStore it returns the equivalent tokenstore
+ * class defined in the metastore module which is
+ * org.apache.hadoop.hive.metastore.security.MemoryTokenStore Similarly,
+ * org.apache.hadoop.hive.thrift.DBTokenStore maps to
+ * org.apache.hadoop.hive.metastore.security.DBTokenStore and
+ * org.apache.hadoop.hive.thrift.ZooKeeperTokenStore maps to
+ * org.apache.hadoop.hive.metastore.security.ZooKeeperTokenStore
+ */
+ public static String getTokenStoreClassName(Configuration conf) {
+ String tokenStoreClass = conf.get(DELEGATION_TOKEN_STORE_CLS, "");
+ if (StringUtils.isBlank(tokenStoreClass)) {
+ // default tokenstore is MemoryTokenStore
+ return MemoryTokenStore.class.getName();
+ }
+ switch (tokenStoreClass) {
+ case "org.apache.hadoop.hive.thrift.DBTokenStore":
+ return DBTokenStore.class.getName();
+ case "org.apache.hadoop.hive.thrift.MemoryTokenStore":
+ return MemoryTokenStore.class.getName();
+ case "org.apache.hadoop.hive.thrift.ZooKeeperTokenStore":
+ return ZooKeeperTokenStore.class.getName();
+ default:
+ return tokenStoreClass;
+ }
+ }
}
[3/3] hive git commit: HIVE-17371 : Move tokenstores to metastore
module (Vihang Karajgaonkar, reviewed by Alan Gates, Thejas M Nair)
Posted by vi...@apache.org.
HIVE-17371 : Move tokenstores to metastore module (Vihang Karajgaonkar, reviewed by Alan Gates, Thejas M Nair)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8fea1176
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8fea1176
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8fea1176
Branch: refs/heads/master
Commit: 8fea117695a6d5257c1f915b5f9f24e119c6523c
Parents: 9975131
Author: Vihang Karajgaonkar <vi...@cloudera.com>
Authored: Mon Sep 18 22:46:09 2017 -0700
Committer: Vihang Karajgaonkar <vi...@cloudera.com>
Committed: Mon Oct 16 19:37:02 2017 -0700
----------------------------------------------------------------------
.../org/apache/hive/beeline/ProxyAuthTest.java | 3 +-
.../apache/hive/hcatalog/common/HCatUtil.java | 2 +-
.../hive/hcatalog/mapreduce/Security.java | 2 +-
.../templeton/tool/TempletonControllerJob.java | 4 +-
.../hive/minikdc/TestHiveAuthFactory.java | 6 +-
.../hive/minikdc/TestJdbcWithDBTokenStore.java | 1 +
.../hive/minikdc/TestJdbcWithMiniKdc.java | 3 +-
.../metastore/security/TestDBTokenStore.java | 95 +++
.../security/TestZooKeeperTokenStore.java | 180 +++++
.../hadoop/hive/thrift/TestDBTokenStore.java | 95 ---
.../hive/thrift/TestZooKeeperTokenStore.java | 179 -----
.../org/apache/hive/jdbc/HiveConnection.java | 7 +-
jdbc/src/java/org/apache/hive/jdbc/Utils.java | 19 +
.../hadoop/hive/metastore/HiveMetaStore.java | 3 +-
.../hive/metastore/HiveMetaStoreClient.java | 5 +-
.../hadoop/hive/metastore/MetaStoreUtils.java | 4 +-
.../hive/service/auth/HiveAuthFactory.java | 26 +-
.../apache/hive/service/auth/HttpAuthUtils.java | 5 +-
.../hive/service/auth/KerberosSaslHelper.java | 9 +-
.../cli/session/HiveSessionImplwithUGI.java | 3 +-
.../hive/service/cli/session/SessionUtils.java | 97 +++
.../hive/thrift/HadoopThriftAuthBridge23.java | 107 ---
.../apache/hadoop/hive/shims/ShimLoader.java | 10 -
.../org/apache/hadoop/hive/shims/Utils.java | 82 +--
.../apache/hadoop/hive/thrift/DBTokenStore.java | 192 ------
.../hive/thrift/DelegationTokenIdentifier.java | 52 --
.../thrift/DelegationTokenSecretManager.java | 125 ----
.../hive/thrift/DelegationTokenSelector.java | 33 -
.../hive/thrift/DelegationTokenStore.java | 118 ----
.../hive/thrift/HadoopThriftAuthBridge.java | 689 -------------------
.../hive/thrift/HiveDelegationTokenManager.java | 172 -----
.../hadoop/hive/thrift/MemoryTokenStore.java | 137 ----
.../TokenStoreDelegationTokenSecretManager.java | 335 ---------
.../hadoop/hive/thrift/ZooKeeperTokenStore.java | 476 -------------
.../hive/metastore/security/DBTokenStore.java | 192 ++++++
.../security/DelegationTokenSelector.java | 33 +
.../security/DelegationTokenStore.java | 6 +-
.../metastore/security/MemoryTokenStore.java | 3 +-
.../MetastoreDelegationTokenManager.java | 31 +-
.../TokenStoreDelegationTokenSecretManager.java | 1 +
.../metastore/security/ZooKeeperTokenStore.java | 474 +++++++++++++
.../hive/metastore/utils/SecurityUtils.java | 172 +++++
42 files changed, 1336 insertions(+), 2852 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java
----------------------------------------------------------------------
diff --git a/beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java b/beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java
index 318857d..70fabfd 100644
--- a/beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java
+++ b/beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java
@@ -30,6 +30,7 @@ import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hive.jdbc.HiveConnection;
import org.apache.hive.service.auth.HiveAuthConstants;
+import org.apache.hive.service.cli.session.SessionUtils;
import org.apache.hive.beeline.BeeLine;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.shims.Utils;
@@ -202,7 +203,7 @@ public class ProxyAuthTest {
}
private static void storeTokenInJobConf(String tokenStr) throws Exception {
- Utils.setTokenStr(Utils.getUGI(),
+ SessionUtils.setTokenStr(Utils.getUGI(),
tokenStr, HiveAuthConstants.HS2_CLIENT_TOKEN);
System.out.println("Stored token " + tokenStr);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
index 81804cf..4ace16c 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
@@ -49,6 +49,7 @@ import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.security.DelegationTokenIdentifier;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
@@ -58,7 +59,6 @@ import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hadoop.hive.thrift.DelegationTokenIdentifier;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/Security.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/Security.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/Security.java
index 3e05a0e..834a281 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/Security.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/Security.java
@@ -28,8 +28,8 @@ import java.util.Map.Entry;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.security.DelegationTokenSelector;
import org.apache.hadoop.hive.shims.ShimLoader;
-import org.apache.hadoop.hive.thrift.DelegationTokenSelector;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
----------------------------------------------------------------------
diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
index f4c4b76..7dc678e 100644
--- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
+++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
@@ -161,8 +161,8 @@ public class TempletonControllerJob extends Configured implements Tool, JobSubmi
if(!secureMetastoreAccess) {
return null;
}
- Token<org.apache.hadoop.hive.thrift.DelegationTokenIdentifier> hiveToken =
- new Token<org.apache.hadoop.hive.thrift.DelegationTokenIdentifier>();
+ Token<org.apache.hadoop.hive.metastore.security.DelegationTokenIdentifier> hiveToken =
+ new Token<org.apache.hadoop.hive.metastore.security.DelegationTokenIdentifier>();
String metastoreTokenStrForm = buildHcatDelegationToken(user);
hiveToken.decodeFromUrlString(metastoreTokenStrForm);
job.getCredentials().addToken(new
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java
----------------------------------------------------------------------
diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java
index e3a5190..d355777 100644
--- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java
+++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java
@@ -59,7 +59,7 @@ public class TestHiveAuthFactory {
HiveAuthFactory authFactory = new HiveAuthFactory(hiveConf);
Assert.assertNotNull(authFactory);
- Assert.assertEquals("org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge$Server$TUGIAssumingTransportFactory",
+ Assert.assertEquals("org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge$Server$TUGIAssumingTransportFactory",
authFactory.getAuthTransFactory().getClass().getName());
}
@@ -79,11 +79,11 @@ public class TestHiveAuthFactory {
Assert.assertNotNull(keyTabFile);
hiveConf.setVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB, keyTabFile);
- hiveConf.setVar(ConfVars.METASTORE_CLUSTER_DELEGATION_TOKEN_STORE_CLS, "org.apache.hadoop.hive.thrift.DBTokenStore");
+ hiveConf.setVar(ConfVars.METASTORE_CLUSTER_DELEGATION_TOKEN_STORE_CLS, "org.apache.hadoop.hive.metastore.security.DBTokenStore");
HiveAuthFactory authFactory = new HiveAuthFactory(hiveConf);
Assert.assertNotNull(authFactory);
- Assert.assertEquals("org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge$Server$TUGIAssumingTransportFactory",
+ Assert.assertEquals("org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge$Server$TUGIAssumingTransportFactory",
authFactory.getAuthTransFactory().getClass().getName());
}
}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithDBTokenStore.java
----------------------------------------------------------------------
diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithDBTokenStore.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithDBTokenStore.java
index 8de94a9..8695232 100644
--- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithDBTokenStore.java
+++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithDBTokenStore.java
@@ -37,6 +37,7 @@ public class TestJdbcWithDBTokenStore extends TestJdbcWithMiniKdc{
SessionHookTest.class.getName());
HiveConf hiveConf = new HiveConf();
+ //using old config value tests backwards compatibility
hiveConf.setVar(ConfVars.METASTORE_CLUSTER_DELEGATION_TOKEN_STORE_CLS, "org.apache.hadoop.hive.thrift.DBTokenStore");
miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf);
miniHS2 = MiniHiveKdc.getMiniHS2WithKerbWithRemoteHMS(miniHiveKdc, hiveConf);
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
----------------------------------------------------------------------
diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
index 60dbce1..1ab698f 100644
--- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
+++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
@@ -36,6 +36,7 @@ import org.apache.hive.service.auth.HiveAuthConstants;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.session.HiveSessionHook;
import org.apache.hive.service.cli.session.HiveSessionHookContext;
+import org.apache.hive.service.cli.session.SessionUtils;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
@@ -279,7 +280,7 @@ public class TestJdbcWithMiniKdc {
// Store the given token in the UGI
protected void storeToken(String tokenStr, UserGroupInformation ugi)
throws Exception {
- Utils.setTokenStr(ugi,
+ SessionUtils.setTokenStr(ugi,
tokenStr, HiveAuthConstants.HS2_CLIENT_TOKEN);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/TestDBTokenStore.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/TestDBTokenStore.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/TestDBTokenStore.java
new file mode 100644
index 0000000..14c245d
--- /dev/null
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/TestDBTokenStore.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore.security;
+
+import java.io.IOException;
+import java.util.List;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.security.DBTokenStore;
+import org.apache.hadoop.hive.metastore.security.DelegationTokenStore.TokenStoreException;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation;
+import org.apache.hadoop.security.token.delegation.HiveDelegationTokenSupport;
+import org.junit.Assert;
+
+public class TestDBTokenStore extends TestCase{
+
+ public void testDBTokenStore() throws TokenStoreException, MetaException, IOException {
+
+ DelegationTokenStore ts = new DBTokenStore();
+ ts.init(new HMSHandler("Test handler"), HadoopThriftAuthBridge.Server.ServerMode.METASTORE);
+ assertEquals(0, ts.getMasterKeys().length);
+ assertEquals(false,ts.removeMasterKey(-1));
+ try{
+ ts.updateMasterKey(-1, "non-existent-key");
+ fail("Updated non-existent key.");
+ } catch (TokenStoreException e) {
+ assertTrue(e.getCause() instanceof NoSuchObjectException);
+ }
+ int keySeq = ts.addMasterKey("key1Data");
+ int keySeq2 = ts.addMasterKey("key2Data");
+ int keySeq2same = ts.addMasterKey("key2Data");
+ assertEquals("keys sequential", keySeq + 1, keySeq2);
+ assertEquals("keys sequential", keySeq + 2, keySeq2same);
+ assertEquals("expected number of keys", 3, ts.getMasterKeys().length);
+ assertTrue(ts.removeMasterKey(keySeq));
+ assertTrue(ts.removeMasterKey(keySeq2same));
+ assertEquals("expected number of keys", 1, ts.getMasterKeys().length);
+ assertEquals("key2Data",ts.getMasterKeys()[0]);
+ ts.updateMasterKey(keySeq2, "updatedData");
+ assertEquals("updatedData",ts.getMasterKeys()[0]);
+ assertTrue(ts.removeMasterKey(keySeq2));
+
+ // tokens
+ assertEquals(0, ts.getAllDelegationTokenIdentifiers().size());
+ DelegationTokenIdentifier tokenId = new DelegationTokenIdentifier(
+ new Text("owner"), new Text("renewer"), new Text("realUser"));
+ assertNull(ts.getToken(tokenId));
+ assertFalse(ts.removeToken(tokenId));
+ DelegationTokenInformation tokenInfo = new DelegationTokenInformation(
+ 99, "password".getBytes());
+ assertTrue(ts.addToken(tokenId, tokenInfo));
+ assertFalse(ts.addToken(tokenId, tokenInfo));
+ DelegationTokenInformation tokenInfoRead = ts.getToken(tokenId);
+ assertEquals(tokenInfo.getRenewDate(), tokenInfoRead.getRenewDate());
+ assertNotSame(tokenInfo, tokenInfoRead);
+ Assert.assertArrayEquals(HiveDelegationTokenSupport
+ .encodeDelegationTokenInformation(tokenInfo),
+ HiveDelegationTokenSupport
+ .encodeDelegationTokenInformation(tokenInfoRead));
+
+ List<DelegationTokenIdentifier> allIds = ts
+ .getAllDelegationTokenIdentifiers();
+ assertEquals(1, allIds.size());
+ Assert.assertEquals(TokenStoreDelegationTokenSecretManager
+ .encodeWritable(tokenId),
+ TokenStoreDelegationTokenSecretManager.encodeWritable(allIds
+ .get(0)));
+
+ assertTrue(ts.removeToken(tokenId));
+ assertEquals(0, ts.getAllDelegationTokenIdentifiers().size());
+ assertNull(ts.getToken(tokenId));
+ ts.close();
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/TestZooKeeperTokenStore.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/TestZooKeeperTokenStore.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/TestZooKeeperTokenStore.java
new file mode 100644
index 0000000..7aa8e31
--- /dev/null
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/TestZooKeeperTokenStore.java
@@ -0,0 +1,180 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore.security;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+
+import junit.framework.TestCase;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
+import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge.Server.ServerMode;
+import org.apache.hadoop.hive.metastore.security.ZooKeeperTokenStore;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation;
+import org.apache.hadoop.security.token.delegation.HiveDelegationTokenSupport;
+import org.apache.zookeeper.KeeperException;
+import org.apache.zookeeper.data.ACL;
+import org.junit.Assert;
+
+public class TestZooKeeperTokenStore extends TestCase {
+
+ private MiniZooKeeperCluster zkCluster = null;
+ private CuratorFramework zkClient = null;
+ private int zkPort = -1;
+ private ZooKeeperTokenStore ts;
+
+ @Override
+ protected void setUp() throws Exception {
+ File zkDataDir = new File(System.getProperty("test.tmp.dir"));
+ if (this.zkCluster != null) {
+ throw new IOException("Cluster already running");
+ }
+ this.zkCluster = new MiniZooKeeperCluster();
+ this.zkPort = this.zkCluster.startup(zkDataDir);
+ this.zkClient =
+ CuratorFrameworkFactory.builder().connectString("localhost:" + zkPort)
+ .retryPolicy(new ExponentialBackoffRetry(1000, 3)).build();
+ this.zkClient.start();
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ this.zkClient.close();
+ if (ts != null) {
+ ts.close();
+ }
+ this.zkCluster.shutdown();
+ this.zkCluster = null;
+ }
+
+ private Configuration createConf(String zkPath) {
+ Configuration conf = new Configuration();
+ conf.set(MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_CONNECT_STR, "localhost:"
+ + this.zkPort);
+ conf.set(MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ZNODE, zkPath);
+ return conf;
+ }
+
+ public void testTokenStorage() throws Exception {
+ String ZK_PATH = "/zktokenstore-testTokenStorage";
+ ts = new ZooKeeperTokenStore();
+ Configuration conf = createConf(ZK_PATH);
+ conf.set(MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ACL, "world:anyone:cdrwa");
+ ts.setConf(conf);
+ ts.init(null, HadoopThriftAuthBridge.Server.ServerMode.METASTORE);
+
+
+ String metastore_zk_path = ZK_PATH + ServerMode.METASTORE;
+ int keySeq = ts.addMasterKey("key1Data");
+ byte[] keyBytes = zkClient.getData().forPath(
+ metastore_zk_path + "/keys/" + String.format(ZooKeeperTokenStore.ZK_SEQ_FORMAT, keySeq));
+ assertNotNull(keyBytes);
+ assertEquals(new String(keyBytes), "key1Data");
+
+ int keySeq2 = ts.addMasterKey("key2Data");
+ assertEquals("keys sequential", keySeq + 1, keySeq2);
+ assertEquals("expected number keys", 2, ts.getMasterKeys().length);
+
+ ts.removeMasterKey(keySeq);
+ assertEquals("expected number keys", 1, ts.getMasterKeys().length);
+
+ // tokens
+ DelegationTokenIdentifier tokenId = new DelegationTokenIdentifier(
+ new Text("owner"), new Text("renewer"), new Text("realUser"));
+ DelegationTokenInformation tokenInfo = new DelegationTokenInformation(
+ 99, "password".getBytes());
+ ts.addToken(tokenId, tokenInfo);
+ DelegationTokenInformation tokenInfoRead = ts.getToken(tokenId);
+ assertEquals(tokenInfo.getRenewDate(), tokenInfoRead.getRenewDate());
+ assertNotSame(tokenInfo, tokenInfoRead);
+ Assert.assertArrayEquals(HiveDelegationTokenSupport
+ .encodeDelegationTokenInformation(tokenInfo),
+ HiveDelegationTokenSupport
+ .encodeDelegationTokenInformation(tokenInfoRead));
+
+ List<DelegationTokenIdentifier> allIds = ts.getAllDelegationTokenIdentifiers();
+ assertEquals(1, allIds.size());
+ Assert.assertEquals(TokenStoreDelegationTokenSecretManager
+ .encodeWritable(tokenId),
+ TokenStoreDelegationTokenSecretManager.encodeWritable(allIds
+ .get(0)));
+
+ assertTrue(ts.removeToken(tokenId));
+ assertEquals(0, ts.getAllDelegationTokenIdentifiers().size());
+ assertNull(ts.getToken(tokenId));
+ }
+
+ public void testAclNoAuth() throws Exception {
+ String ZK_PATH = "/zktokenstore-testAclNoAuth";
+ Configuration conf = createConf(ZK_PATH);
+ conf.set(
+ MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ACL,
+ "ip:127.0.0.1:r");
+
+ ts = new ZooKeeperTokenStore();
+ try {
+ ts.setConf(conf);
+ ts.init(null, HadoopThriftAuthBridge.Server.ServerMode.METASTORE);
+ fail("expected ACL exception");
+ } catch (DelegationTokenStore.TokenStoreException e) {
+ assertEquals(KeeperException.NoAuthException.class, e.getCause().getClass());
+ }
+ }
+
+ public void testAclInvalid() throws Exception {
+ String ZK_PATH = "/zktokenstore-testAclInvalid";
+ String aclString = "sasl:hive/host@TEST.DOMAIN:cdrwa, fail-parse-ignored";
+ Configuration conf = createConf(ZK_PATH);
+ conf.set(
+ MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ACL,
+ aclString);
+
+ List<ACL> aclList = ZooKeeperTokenStore.parseACLs(aclString);
+ assertEquals(1, aclList.size());
+
+ ts = new ZooKeeperTokenStore();
+ try {
+ ts.setConf(conf);
+ ts.init(null, HadoopThriftAuthBridge.Server.ServerMode.METASTORE);
+ fail("expected ACL exception");
+ } catch (DelegationTokenStore.TokenStoreException e) {
+ assertEquals(KeeperException.InvalidACLException.class, e.getCause().getClass());
+ }
+ }
+
+ public void testAclPositive() throws Exception {
+ String ZK_PATH = "/zktokenstore-testAcl";
+ Configuration conf = createConf(ZK_PATH);
+ conf.set(
+ MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ACL,
+ "ip:127.0.0.1:cdrwa,world:anyone:cdrwa");
+ ts = new ZooKeeperTokenStore();
+ ts.setConf(conf);
+ ts.init(null, HadoopThriftAuthBridge.Server.ServerMode.METASTORE);
+ List<ACL> acl = zkClient.getACL().forPath(ZK_PATH + ServerMode.METASTORE);
+ assertEquals(2, acl.size());
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestDBTokenStore.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestDBTokenStore.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestDBTokenStore.java
deleted file mode 100644
index 4bfa224..0000000
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestDBTokenStore.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.thrift;
-
-import java.io.IOException;
-import java.util.List;
-
-import junit.framework.TestCase;
-
-import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
-import org.apache.hadoop.hive.thrift.DelegationTokenStore.TokenStoreException;
-import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Server.ServerMode;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation;
-import org.apache.hadoop.security.token.delegation.HiveDelegationTokenSupport;
-import org.junit.Assert;
-
-public class TestDBTokenStore extends TestCase{
-
- public void testDBTokenStore() throws TokenStoreException, MetaException, IOException {
-
- DelegationTokenStore ts = new DBTokenStore();
- ts.init(new HMSHandler("Test handler"), ServerMode.METASTORE);
- assertEquals(0, ts.getMasterKeys().length);
- assertEquals(false,ts.removeMasterKey(-1));
- try{
- ts.updateMasterKey(-1, "non-existent-key");
- fail("Updated non-existent key.");
- } catch (TokenStoreException e) {
- assertTrue(e.getCause() instanceof NoSuchObjectException);
- }
- int keySeq = ts.addMasterKey("key1Data");
- int keySeq2 = ts.addMasterKey("key2Data");
- int keySeq2same = ts.addMasterKey("key2Data");
- assertEquals("keys sequential", keySeq + 1, keySeq2);
- assertEquals("keys sequential", keySeq + 2, keySeq2same);
- assertEquals("expected number of keys", 3, ts.getMasterKeys().length);
- assertTrue(ts.removeMasterKey(keySeq));
- assertTrue(ts.removeMasterKey(keySeq2same));
- assertEquals("expected number of keys", 1, ts.getMasterKeys().length);
- assertEquals("key2Data",ts.getMasterKeys()[0]);
- ts.updateMasterKey(keySeq2, "updatedData");
- assertEquals("updatedData",ts.getMasterKeys()[0]);
- assertTrue(ts.removeMasterKey(keySeq2));
-
- // tokens
- assertEquals(0, ts.getAllDelegationTokenIdentifiers().size());
- DelegationTokenIdentifier tokenId = new DelegationTokenIdentifier(
- new Text("owner"), new Text("renewer"), new Text("realUser"));
- assertNull(ts.getToken(tokenId));
- assertFalse(ts.removeToken(tokenId));
- DelegationTokenInformation tokenInfo = new DelegationTokenInformation(
- 99, "password".getBytes());
- assertTrue(ts.addToken(tokenId, tokenInfo));
- assertFalse(ts.addToken(tokenId, tokenInfo));
- DelegationTokenInformation tokenInfoRead = ts.getToken(tokenId);
- assertEquals(tokenInfo.getRenewDate(), tokenInfoRead.getRenewDate());
- assertNotSame(tokenInfo, tokenInfoRead);
- Assert.assertArrayEquals(HiveDelegationTokenSupport
- .encodeDelegationTokenInformation(tokenInfo),
- HiveDelegationTokenSupport
- .encodeDelegationTokenInformation(tokenInfoRead));
-
- List<DelegationTokenIdentifier> allIds = ts
- .getAllDelegationTokenIdentifiers();
- assertEquals(1, allIds.size());
- Assert.assertEquals(TokenStoreDelegationTokenSecretManager
- .encodeWritable(tokenId),
- TokenStoreDelegationTokenSecretManager.encodeWritable(allIds
- .get(0)));
-
- assertTrue(ts.removeToken(tokenId));
- assertEquals(0, ts.getAllDelegationTokenIdentifiers().size());
- assertNull(ts.getToken(tokenId));
- ts.close();
- }
-}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java
deleted file mode 100644
index 7800416..0000000
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java
+++ /dev/null
@@ -1,179 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.thrift;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.List;
-
-import junit.framework.TestCase;
-
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.CuratorFrameworkFactory;
-import org.apache.curator.retry.ExponentialBackoffRetry;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
-import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Server.ServerMode;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation;
-import org.apache.hadoop.security.token.delegation.HiveDelegationTokenSupport;
-import org.apache.zookeeper.KeeperException;
-import org.apache.zookeeper.data.ACL;
-import org.junit.Assert;
-
-public class TestZooKeeperTokenStore extends TestCase {
-
- private MiniZooKeeperCluster zkCluster = null;
- private CuratorFramework zkClient = null;
- private int zkPort = -1;
- private ZooKeeperTokenStore ts;
-
- @Override
- protected void setUp() throws Exception {
- File zkDataDir = new File(System.getProperty("test.tmp.dir"));
- if (this.zkCluster != null) {
- throw new IOException("Cluster already running");
- }
- this.zkCluster = new MiniZooKeeperCluster();
- this.zkPort = this.zkCluster.startup(zkDataDir);
- this.zkClient =
- CuratorFrameworkFactory.builder().connectString("localhost:" + zkPort)
- .retryPolicy(new ExponentialBackoffRetry(1000, 3)).build();
- this.zkClient.start();
- }
-
- @Override
- protected void tearDown() throws Exception {
- this.zkClient.close();
- if (ts != null) {
- ts.close();
- }
- this.zkCluster.shutdown();
- this.zkCluster = null;
- }
-
- private Configuration createConf(String zkPath) {
- Configuration conf = new Configuration();
- conf.set(HiveDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_CONNECT_STR, "localhost:"
- + this.zkPort);
- conf.set(HiveDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ZNODE, zkPath);
- return conf;
- }
-
- public void testTokenStorage() throws Exception {
- String ZK_PATH = "/zktokenstore-testTokenStorage";
- ts = new ZooKeeperTokenStore();
- Configuration conf = createConf(ZK_PATH);
- conf.set(HiveDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ACL, "world:anyone:cdrwa");
- ts.setConf(conf);
- ts.init(null, ServerMode.METASTORE);
-
-
- String metastore_zk_path = ZK_PATH + ServerMode.METASTORE;
- int keySeq = ts.addMasterKey("key1Data");
- byte[] keyBytes = zkClient.getData().forPath(
- metastore_zk_path + "/keys/" + String.format(ZooKeeperTokenStore.ZK_SEQ_FORMAT, keySeq));
- assertNotNull(keyBytes);
- assertEquals(new String(keyBytes), "key1Data");
-
- int keySeq2 = ts.addMasterKey("key2Data");
- assertEquals("keys sequential", keySeq + 1, keySeq2);
- assertEquals("expected number keys", 2, ts.getMasterKeys().length);
-
- ts.removeMasterKey(keySeq);
- assertEquals("expected number keys", 1, ts.getMasterKeys().length);
-
- // tokens
- DelegationTokenIdentifier tokenId = new DelegationTokenIdentifier(
- new Text("owner"), new Text("renewer"), new Text("realUser"));
- DelegationTokenInformation tokenInfo = new DelegationTokenInformation(
- 99, "password".getBytes());
- ts.addToken(tokenId, tokenInfo);
- DelegationTokenInformation tokenInfoRead = ts.getToken(tokenId);
- assertEquals(tokenInfo.getRenewDate(), tokenInfoRead.getRenewDate());
- assertNotSame(tokenInfo, tokenInfoRead);
- Assert.assertArrayEquals(HiveDelegationTokenSupport
- .encodeDelegationTokenInformation(tokenInfo),
- HiveDelegationTokenSupport
- .encodeDelegationTokenInformation(tokenInfoRead));
-
- List<DelegationTokenIdentifier> allIds = ts.getAllDelegationTokenIdentifiers();
- assertEquals(1, allIds.size());
- Assert.assertEquals(TokenStoreDelegationTokenSecretManager
- .encodeWritable(tokenId),
- TokenStoreDelegationTokenSecretManager.encodeWritable(allIds
- .get(0)));
-
- assertTrue(ts.removeToken(tokenId));
- assertEquals(0, ts.getAllDelegationTokenIdentifiers().size());
- assertNull(ts.getToken(tokenId));
- }
-
- public void testAclNoAuth() throws Exception {
- String ZK_PATH = "/zktokenstore-testAclNoAuth";
- Configuration conf = createConf(ZK_PATH);
- conf.set(
- HiveDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ACL,
- "ip:127.0.0.1:r");
-
- ts = new ZooKeeperTokenStore();
- try {
- ts.setConf(conf);
- ts.init(null, ServerMode.METASTORE);
- fail("expected ACL exception");
- } catch (DelegationTokenStore.TokenStoreException e) {
- assertEquals(KeeperException.NoAuthException.class, e.getCause().getClass());
- }
- }
-
- public void testAclInvalid() throws Exception {
- String ZK_PATH = "/zktokenstore-testAclInvalid";
- String aclString = "sasl:hive/host@TEST.DOMAIN:cdrwa, fail-parse-ignored";
- Configuration conf = createConf(ZK_PATH);
- conf.set(
- HiveDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ACL,
- aclString);
-
- List<ACL> aclList = ZooKeeperTokenStore.parseACLs(aclString);
- assertEquals(1, aclList.size());
-
- ts = new ZooKeeperTokenStore();
- try {
- ts.setConf(conf);
- ts.init(null, ServerMode.METASTORE);
- fail("expected ACL exception");
- } catch (DelegationTokenStore.TokenStoreException e) {
- assertEquals(KeeperException.InvalidACLException.class, e.getCause().getClass());
- }
- }
-
- public void testAclPositive() throws Exception {
- String ZK_PATH = "/zktokenstore-testAcl";
- Configuration conf = createConf(ZK_PATH);
- conf.set(
- HiveDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ACL,
- "ip:127.0.0.1:cdrwa,world:anyone:cdrwa");
- ts = new ZooKeeperTokenStore();
- ts.setConf(conf);
- ts.init(null, ServerMode.METASTORE);
- List<ACL> acl = zkClient.getACL().forPath(ZK_PATH + ServerMode.METASTORE);
- assertEquals(2, acl.size());
- }
-
-}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
----------------------------------------------------------------------
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
index a9a4f2c..edf9385 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -26,6 +26,7 @@ import org.apache.hive.service.auth.HiveAuthConstants;
import org.apache.hive.service.auth.KerberosSaslHelper;
import org.apache.hive.service.auth.PlainSaslHelper;
import org.apache.hive.service.auth.SaslQOP;
+import org.apache.hive.service.cli.session.SessionUtils;
import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService;
import org.apache.hive.service.rpc.thrift.TCLIService;
import org.apache.hive.service.rpc.thrift.TCancelDelegationTokenReq;
@@ -164,7 +165,7 @@ public class HiveConnection implements java.sql.Connection {
// sess_var_list -> sessConfMap
// hive_conf_list -> hiveConfMap
// hive_var_list -> hiveVarMap
- host = ShimLoader.getHadoopThriftAuthBridge().getCanonicalHostName(connParams.getHost());
+ host = Utils.getCanonicalHostName(connParams.getHost());
port = connParams.getPort();
sessConfMap = connParams.getSessionVars();
isEmbeddedMode = connParams.isEmbeddedMode();
@@ -229,7 +230,7 @@ public class HiveConnection implements java.sql.Connection {
}
// Update with new values
jdbcUriString = connParams.getJdbcUriString();
- host = ShimLoader.getHadoopThriftAuthBridge().getCanonicalHostName(connParams.getHost());
+ host = Utils.getCanonicalHostName(connParams.getHost());
port = connParams.getPort();
} else {
errMsg = warnMsg;
@@ -652,7 +653,7 @@ public class HiveConnection implements java.sql.Connection {
if (JdbcConnectionParams.AUTH_TOKEN.equalsIgnoreCase(jdbcConnConf.get(JdbcConnectionParams.AUTH_TYPE))) {
// check delegation token in job conf if any
try {
- tokenStr = org.apache.hadoop.hive.shims.Utils.getTokenStrForm(HiveAuthConstants.HS2_CLIENT_TOKEN);
+ tokenStr = SessionUtils.getTokenStrForm(HiveAuthConstants.HS2_CLIENT_TOKEN);
} catch (IOException e) {
throw new SQLException("Error reading token ", e);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/jdbc/src/java/org/apache/hive/jdbc/Utils.java
----------------------------------------------------------------------
diff --git a/jdbc/src/java/org/apache/hive/jdbc/Utils.java b/jdbc/src/java/org/apache/hive/jdbc/Utils.java
index 574fb7e..855de88 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/Utils.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/Utils.java
@@ -18,7 +18,9 @@
package org.apache.hive.jdbc;
+import java.net.InetAddress;
import java.net.URI;
+import java.net.UnknownHostException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
@@ -663,4 +665,21 @@ public class Utils {
}
return null;
}
+
+ /**
+ * Method to get canonical-ized hostname, given a hostname (possibly a CNAME).
+ * This should allow for service-principals to use simplified CNAMEs.
+ * @param hostName The hostname to be canonical-ized.
+ * @return Given a CNAME, the canonical-ized hostname is returned. If not found, the original hostname is returned.
+ */
+ public static String getCanonicalHostName(String hostName) {
+ try {
+ return InetAddress.getByName(hostName).getCanonicalHostName();
+ }
+ catch(UnknownHostException exception) {
+ LOG.warn("Could not retrieve canonical hostname for " + hostName, exception);
+ return hostName;
+ }
+ }
+
}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 12faf82..23033fa 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -7676,8 +7676,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
conf.getVar(HiveConf.ConfVars.METASTORE_CLIENT_KERBEROS_PRINCIPAL));
// Start delegation token manager
delegationTokenManager = new MetastoreDelegationTokenManager();
- delegationTokenManager.startDelegationTokenSecretManager(conf, baseHandler,
- HadoopThriftAuthBridge.Server.ServerMode.METASTORE);
+ delegationTokenManager.startDelegationTokenSecretManager(conf, baseHandler, HadoopThriftAuthBridge.Server.ServerMode.METASTORE);
saslServer.setSecretManager(delegationTokenManager.getSecretManager());
transFactory = saslServer.createTransportFactory(
MetaStoreUtils.getMetaStoreSaslProperties(conf, useSSL));
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index 95e3d75..3f5f80e 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -66,6 +66,7 @@ import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy;
import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
import org.apache.hadoop.hive.metastore.txn.TxnUtils;
+import org.apache.hadoop.hive.metastore.utils.SecurityUtils;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
@@ -242,7 +243,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable {
});
String delegationTokenPropString = "DelegationTokenForHiveMetaStoreServer";
String delegationTokenStr = getDelegationToken(proxyUser, proxyUser);
- Utils.setTokenStr(UserGroupInformation.getCurrentUser(), delegationTokenStr,
+ SecurityUtils.setTokenStr(UserGroupInformation.getCurrentUser(), delegationTokenStr,
delegationTokenPropString);
this.conf.setVar(ConfVars.METASTORE_TOKEN_SIGNATURE, delegationTokenPropString);
close();
@@ -455,7 +456,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable {
// submission.
String tokenSig = conf.getVar(ConfVars.METASTORE_TOKEN_SIGNATURE);
// tokenSig could be null
- tokenStrForm = Utils.getTokenStrForm(tokenSig);
+ tokenStrForm = SecurityUtils.getTokenStrForm(tokenSig);
if(tokenStrForm != null) {
LOG.info("HMSC::open(): Found delegation token. Creating DIGEST-based thrift connection.");
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
index 5354e70..ef097ac 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
@@ -63,7 +63,7 @@ import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hive.metastore.api.Decimal;
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.metastore.api.SkewedInfo;
-import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
@@ -1638,7 +1638,7 @@ public class MetaStoreUtils {
+ hadoopRpcProtectionVal + " to " + hadoopRpcProtectionAuth + " because SSL is enabled");
conf.set(CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION, hadoopRpcProtectionAuth);
}
- return ShimLoader.getHadoopThriftAuthBridge().getHadoopSaslProperties(conf);
+ return HadoopThriftAuthBridge.getBridge().getHadoopSaslProperties(conf);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
index 0f4a4d8..6528ca0 100644
--- a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
+++ b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
@@ -27,15 +27,21 @@ import javax.security.auth.login.LoginException;
import javax.security.sasl.AuthenticationException;
import javax.security.sasl.Sasl;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.conf.HiveConfUtil;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.shims.HadoopShims.KerberosNameShim;
import org.apache.hadoop.hive.shims.ShimLoader;
-import org.apache.hadoop.hive.thrift.DBTokenStore;
-import org.apache.hadoop.hive.thrift.HiveDelegationTokenManager;
-import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
-import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Server.ServerMode;
+import org.apache.hadoop.hive.metastore.security.DBTokenStore;
+import org.apache.hadoop.hive.metastore.security.DelegationTokenStore;
+import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
+import org.apache.hadoop.hive.metastore.security.MemoryTokenStore;
+import org.apache.hadoop.hive.metastore.security.MetastoreDelegationTokenManager;
+import org.apache.hadoop.hive.metastore.security.ZooKeeperTokenStore;
+import org.apache.hadoop.hive.metastore.utils.SecurityUtils;
import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
@@ -61,7 +67,7 @@ public class HiveAuthFactory {
private final String transportMode;
private final HiveConf conf;
private String hadoopAuth;
- private HiveDelegationTokenManager delegationTokenManager = null;
+ private MetastoreDelegationTokenManager delegationTokenManager = null;
public HiveAuthFactory(HiveConf conf) throws TTransportException {
this.conf = conf;
@@ -82,16 +88,16 @@ public class HiveAuthFactory {
}
if (isSASLWithKerberizedHadoop()) {
saslServer =
- ShimLoader.getHadoopThriftAuthBridge().createServer(
+ HadoopThriftAuthBridge.getBridge().createServer(
conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB),
conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL),
conf.getVar(ConfVars.HIVE_SERVER2_CLIENT_KERBEROS_PRINCIPAL));
// Start delegation token manager
- delegationTokenManager = new HiveDelegationTokenManager();
+ delegationTokenManager = new MetastoreDelegationTokenManager();
try {
Object baseHandler = null;
- String tokenStoreClass = conf.getVar(HiveConf.ConfVars.METASTORE_CLUSTER_DELEGATION_TOKEN_STORE_CLS);
+ String tokenStoreClass = SecurityUtils.getTokenStoreClassName(conf);
if (tokenStoreClass.equals(DBTokenStore.class.getName())) {
// IMetaStoreClient is needed to access token store if DBTokenStore is to be used. It
@@ -105,7 +111,8 @@ public class HiveAuthFactory {
baseHandler = Hive.class;
}
- delegationTokenManager.startDelegationTokenSecretManager(conf, baseHandler, ServerMode.HIVESERVER2);
+ delegationTokenManager.startDelegationTokenSecretManager(conf, baseHandler,
+ HadoopThriftAuthBridge.Server.ServerMode.HIVESERVER2);
saslServer.setSecretManager(delegationTokenManager.getSecretManager());
}
catch (IOException e) {
@@ -325,5 +332,4 @@ public class HiveAuthFactory {
"Failed to validate proxy privilege of " + realUser + " for " + proxyUser, "08S01", e);
}
}
-
}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java b/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
index 8b5661a..7b3040d 100644
--- a/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
+++ b/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
@@ -32,6 +32,7 @@ import java.util.StringTokenizer;
import javax.security.auth.Subject;
import org.apache.commons.codec.binary.Base64;
+import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.http.protocol.BasicHttpContext;
@@ -66,7 +67,7 @@ public final class HttpAuthUtils {
public static String getKerberosServiceTicket(String principal, String host,
String serverHttpUrl, boolean assumeSubject) throws Exception {
String serverPrincipal =
- ShimLoader.getHadoopThriftAuthBridge().getServerPrincipal(principal, host);
+ HadoopThriftAuthBridge.getBridge().getServerPrincipal(principal, host);
if (assumeSubject) {
// With this option, we're assuming that the external application,
// using the JDBC driver has done a JAAS kerberos login already
@@ -79,7 +80,7 @@ public final class HttpAuthUtils {
} else {
// JAAS login from ticket cache to setup the client UserGroupInformation
UserGroupInformation clientUGI =
- ShimLoader.getHadoopThriftAuthBridge().getCurrentUGIWithConf("kerberos");
+ HadoopThriftAuthBridge.getBridge().getCurrentUGIWithConf("kerberos");
return clientUGI.doAs(new HttpKerberosClientAction(serverPrincipal, serverHttpUrl));
}
}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java b/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
index ea2c689..3f549ba 100644
--- a/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
+++ b/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
@@ -22,9 +22,8 @@ import java.util.Map;
import javax.security.sasl.SaslException;
-import org.apache.hadoop.hive.shims.ShimLoader;
-import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
-import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Server;
+import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
+import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge.Server;
import org.apache.hive.service.cli.thrift.ThriftCLIService;
import org.apache.hive.service.rpc.thrift.TCLIService;
import org.apache.hive.service.rpc.thrift.TCLIService.Iface;
@@ -53,7 +52,7 @@ public final class KerberosSaslHelper {
return createSubjectAssumedTransport(principal, underlyingTransport, saslProps);
} else {
HadoopThriftAuthBridge.Client authBridge =
- ShimLoader.getHadoopThriftAuthBridge().createClientWithConf("kerberos");
+ HadoopThriftAuthBridge.getBridge().createClientWithConf("kerberos");
return authBridge.createClientTransport(principal, host, "KERBEROS", null,
underlyingTransport, saslProps);
}
@@ -78,7 +77,7 @@ public final class KerberosSaslHelper {
public static TTransport getTokenTransport(String tokenStr, String host,
TTransport underlyingTransport, Map<String, String> saslProps) throws SaslException {
HadoopThriftAuthBridge.Client authBridge =
- ShimLoader.getHadoopThriftAuthBridge().createClientWithConf("kerberos");
+ HadoopThriftAuthBridge.getBridge().createClientWithConf("kerberos");
try {
return authBridge.createClientTransport(null, host, "DIGEST", tokenStr, underlyingTransport,
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
index 00a7e74..8000a5b 100644
--- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
+++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
@@ -26,7 +26,6 @@ import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.cli.HiveSQLException;
@@ -113,7 +112,7 @@ public class HiveSessionImplwithUGI extends HiveSessionImpl {
if (hmsDelegationTokenStr != null) {
getHiveConf().setVar(HiveConf.ConfVars.METASTORE_TOKEN_SIGNATURE, HS2TOKEN);
try {
- Utils.setTokenStr(sessionUgi, hmsDelegationTokenStr, HS2TOKEN);
+ SessionUtils.setTokenStr(sessionUgi, hmsDelegationTokenStr, HS2TOKEN);
} catch (IOException e) {
throw new HiveSQLException("Couldn't setup delegation token in the ugi: " + e, e);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/service/src/java/org/apache/hive/service/cli/session/SessionUtils.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/session/SessionUtils.java b/service/src/java/org/apache/hive/service/cli/session/SessionUtils.java
new file mode 100644
index 0000000..00d3112
--- /dev/null
+++ b/service/src/java/org/apache/hive/service/cli/session/SessionUtils.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.cli.session;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hive.metastore.security.DelegationTokenIdentifier;
+import org.apache.hadoop.hive.metastore.security.DelegationTokenSelector;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.security.token.TokenSelector;
+
+public class SessionUtils {
+ /**
+ * Get the string form of the token given a token signature. The signature is used as the value of
+ * the "service" field in the token for lookup. Ref: AbstractDelegationTokenSelector in Hadoop. If
+ * there exists such a token in the token cache (credential store) of the job, the lookup returns
+ * that. This is relevant only when running against a "secure" hadoop release The method gets hold
+ * of the tokens if they are set up by hadoop - this should happen on the map/reduce tasks if the
+ * client added the tokens into hadoop's credential store in the front end during job submission.
+ * The method will select the hive delegation token among the set of tokens and return the string
+ * form of it
+ *
+ * @param tokenSignature
+ * @return the string form of the token found
+ * @throws IOException
+ */
+ public static String getTokenStrForm(String tokenSignature) throws IOException {
+ UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
+ TokenSelector<? extends TokenIdentifier> tokenSelector = new DelegationTokenSelector();
+
+ Token<? extends TokenIdentifier> token = tokenSelector.selectToken(
+ tokenSignature == null ? new Text() : new Text(tokenSignature), ugi.getTokens());
+ return token != null ? token.encodeToUrlString() : null;
+ }
+
+ /**
+ * Create a delegation token object for the given token string and service. Add the token to given
+ * UGI
+ *
+ * @param ugi
+ * @param tokenStr
+ * @param tokenService
+ * @throws IOException
+ */
+ public static void setTokenStr(UserGroupInformation ugi, String tokenStr, String tokenService)
+ throws IOException {
+ Token<DelegationTokenIdentifier> delegationToken = createToken(tokenStr, tokenService);
+ ugi.addToken(delegationToken);
+ }
+
+ /**
+ * Add a given service to delegation token string.
+ *
+ * @param tokenStr
+ * @param tokenService
+ * @return
+ * @throws IOException
+ */
+ public static String addServiceToToken(String tokenStr, String tokenService) throws IOException {
+ Token<DelegationTokenIdentifier> delegationToken = createToken(tokenStr, tokenService);
+ return delegationToken.encodeToUrlString();
+ }
+
+ /**
+ * Create a new token using the given string and service
+ *
+ * @param tokenStr
+ * @param tokenService
+ * @return
+ * @throws IOException
+ */
+ private static Token<DelegationTokenIdentifier> createToken(String tokenStr, String tokenService)
+ throws IOException {
+ Token<DelegationTokenIdentifier> delegationToken = new Token<DelegationTokenIdentifier>();
+ delegationToken.decodeFromUrlString(tokenStr);
+ delegationToken.setService(new Text(tokenService));
+ return delegationToken;
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/shims/0.23/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge23.java
----------------------------------------------------------------------
diff --git a/shims/0.23/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge23.java b/shims/0.23/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge23.java
deleted file mode 100644
index 5e21c9f..0000000
--- a/shims/0.23/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge23.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.thrift;
-
-import java.lang.reflect.Field;
-import java.lang.reflect.Method;
-import java.util.Map;
-
-import org.apache.hadoop.conf.Configurable;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.SaslRpcServer;
-
-/**
- * Functions that bridge Thrift's SASL transports to Hadoop's SASL callback
- * handlers and authentication classes.
- *
- * This is a 0.23/2.x specific implementation
- */
-public class HadoopThriftAuthBridge23 extends HadoopThriftAuthBridge {
-
- private static Field SASL_PROPS_FIELD;
- private static Class<?> SASL_PROPERTIES_RESOLVER_CLASS;
- private static Method RES_GET_INSTANCE_METHOD;
- private static Method GET_DEFAULT_PROP_METHOD;
- static {
- SASL_PROPERTIES_RESOLVER_CLASS = null;
- SASL_PROPS_FIELD = null;
- final String SASL_PROP_RES_CLASSNAME = "org.apache.hadoop.security.SaslPropertiesResolver";
- try {
- SASL_PROPERTIES_RESOLVER_CLASS = Class.forName(SASL_PROP_RES_CLASSNAME);
-
- } catch (ClassNotFoundException e) {
- }
-
- if (SASL_PROPERTIES_RESOLVER_CLASS != null) {
- // found the class, so this would be hadoop version 2.4 or newer (See
- // HADOOP-10221, HADOOP-10451)
- try {
- RES_GET_INSTANCE_METHOD = SASL_PROPERTIES_RESOLVER_CLASS.getMethod("getInstance",
- Configuration.class);
- GET_DEFAULT_PROP_METHOD = SASL_PROPERTIES_RESOLVER_CLASS.getMethod("getDefaultProperties");
- } catch (Exception e) {
- // this must be hadoop 2.4 , where getDefaultProperties was protected
- }
- }
-
- if (SASL_PROPERTIES_RESOLVER_CLASS == null || GET_DEFAULT_PROP_METHOD == null) {
- // this must be a hadoop 2.4 version or earlier.
- // Resorting to the earlier method of getting the properties, which uses SASL_PROPS field
- try {
- SASL_PROPS_FIELD = SaslRpcServer.class.getField("SASL_PROPS");
- } catch (NoSuchFieldException e) {
- // Older version of hadoop should have had this field
- throw new IllegalStateException("Error finding hadoop SASL_PROPS field in "
- + SaslRpcServer.class.getSimpleName(), e);
- }
- }
- }
-
- /**
- * Read and return Hadoop SASL configuration which can be configured using
- * "hadoop.rpc.protection"
- *
- * @param conf
- * @return Hadoop SASL configuration
- */
- @SuppressWarnings("unchecked")
- @Override
- public Map<String, String> getHadoopSaslProperties(Configuration conf) {
- if (SASL_PROPS_FIELD != null) {
- // hadoop 2.4 and earlier way of finding the sasl property settings
- // Initialize the SaslRpcServer to ensure QOP parameters are read from
- // conf
- SaslRpcServer.init(conf);
- try {
- return (Map<String, String>) SASL_PROPS_FIELD.get(null);
- } catch (Exception e) {
- throw new IllegalStateException("Error finding hadoop SASL properties", e);
- }
- }
- // 2.5 and later way of finding sasl property
- try {
- Configurable saslPropertiesResolver = (Configurable) RES_GET_INSTANCE_METHOD.invoke(null,
- conf);
- saslPropertiesResolver.setConf(conf);
- return (Map<String, String>) GET_DEFAULT_PROP_METHOD.invoke(saslPropertiesResolver);
- } catch (Exception e) {
- throw new IllegalStateException("Error finding hadoop SASL properties", e);
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java b/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java
index f15e7ff..1ea8b50 100644
--- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java
+++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java
@@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hive.shims;
-import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
import org.apache.hadoop.util.VersionInfo;
import org.apache.log4j.AppenderSkeleton;
import org.slf4j.Logger;
@@ -37,7 +36,6 @@ public abstract class ShimLoader {
private static volatile HadoopShims hadoopShims;
private static JettyShims jettyShims;
private static AppenderSkeleton eventCounter;
- private static HadoopThriftAuthBridge hadoopThriftAuthBridge;
private static SchedulerShim schedulerShim;
/**
@@ -103,14 +101,6 @@ public abstract class ShimLoader {
return eventCounter;
}
- public static synchronized HadoopThriftAuthBridge getHadoopThriftAuthBridge() {
- if (hadoopThriftAuthBridge == null) {
- hadoopThriftAuthBridge = loadShims(HADOOP_THRIFT_AUTH_BRIDGE_CLASSES,
- HadoopThriftAuthBridge.class);
- }
- return hadoopThriftAuthBridge;
- }
-
public static synchronized SchedulerShim getSchedulerShims() {
if (schedulerShim == null) {
schedulerShim = createShim(SCHEDULER_SHIM_CLASSE, SchedulerShim.class);
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/shims/common/src/main/java/org/apache/hadoop/hive/shims/Utils.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/shims/Utils.java b/shims/common/src/main/java/org/apache/hadoop/hive/shims/Utils.java
index 3c93186..d9ae336 100644
--- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/Utils.java
+++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/Utils.java
@@ -26,8 +26,8 @@ import java.util.Map;
import java.util.Set;
import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.LoginException;
import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
+import javax.security.auth.login.LoginException;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
@@ -37,19 +37,12 @@ import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import org.apache.hadoop.hive.thrift.DelegationTokenIdentifier;
-import org.apache.hadoop.hive.thrift.DelegationTokenSelector;
-import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.util.KerberosUtil;
-import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.hadoop.security.token.TokenSelector;
import org.apache.zookeeper.client.ZooKeeperSaslClient;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class Utils {
@@ -73,72 +66,6 @@ public class Utils {
}
/**
- * Get the string form of the token given a token signature.
- * The signature is used as the value of the "service" field in the token for lookup.
- * Ref: AbstractDelegationTokenSelector in Hadoop. If there exists such a token
- * in the token cache (credential store) of the job, the lookup returns that.
- * This is relevant only when running against a "secure" hadoop release
- * The method gets hold of the tokens if they are set up by hadoop - this should
- * happen on the map/reduce tasks if the client added the tokens into hadoop's
- * credential store in the front end during job submission. The method will
- * select the hive delegation token among the set of tokens and return the string
- * form of it
- * @param tokenSignature
- * @return the string form of the token found
- * @throws IOException
- */
- public static String getTokenStrForm(String tokenSignature) throws IOException {
- UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
- TokenSelector<? extends TokenIdentifier> tokenSelector = new DelegationTokenSelector();
-
- Token<? extends TokenIdentifier> token = tokenSelector.selectToken(
- tokenSignature == null ? new Text() : new Text(tokenSignature), ugi.getTokens());
- return token != null ? token.encodeToUrlString() : null;
- }
-
- /**
- * Create a delegation token object for the given token string and service.
- * Add the token to given UGI
- * @param ugi
- * @param tokenStr
- * @param tokenService
- * @throws IOException
- */
- public static void setTokenStr(UserGroupInformation ugi, String tokenStr, String tokenService)
- throws IOException {
- Token<DelegationTokenIdentifier> delegationToken = createToken(tokenStr, tokenService);
- ugi.addToken(delegationToken);
- }
-
- /**
- * Add a given service to delegation token string.
- * @param tokenStr
- * @param tokenService
- * @return
- * @throws IOException
- */
- public static String addServiceToToken(String tokenStr, String tokenService)
- throws IOException {
- Token<DelegationTokenIdentifier> delegationToken = createToken(tokenStr, tokenService);
- return delegationToken.encodeToUrlString();
- }
-
- /**
- * Create a new token using the given string and service
- * @param tokenStr
- * @param tokenService
- * @return
- * @throws IOException
- */
- private static Token<DelegationTokenIdentifier> createToken(String tokenStr, String tokenService)
- throws IOException {
- Token<DelegationTokenIdentifier> delegationToken = new Token<DelegationTokenIdentifier>();
- delegationToken.decodeFromUrlString(tokenStr);
- delegationToken.setService(new Text(tokenService));
- return delegationToken;
- }
-
- /**
* Dynamically sets up the JAAS configuration that uses kerberos
* @param principal
* @param keyTabFile
@@ -189,7 +116,7 @@ public class Utils {
krbOptions.put("useKeyTab", "true");
krbOptions.put("keyTab", keyTabFile);
}
- krbOptions.put("principal", principal);
+ krbOptions.put("principal", principal);
krbOptions.put("refreshKrb5Config", "true");
AppConfigurationEntry hiveZooKeeperClientEntry = new AppConfigurationEntry(
KerberosUtil.getKrb5LoginModuleName(), LoginModuleControlFlag.REQUIRED, krbOptions);
@@ -203,6 +130,7 @@ public class Utils {
}
}
+
public static final String XSRF_CUSTOM_HEADER_PARAM = "custom-header";
public static final String XSRF_CUSTOM_METHODS_TO_IGNORE_PARAM = "methods-to-ignore";
private static final String XSRF_HEADER_DEFAULT = "X-XSRF-HEADER";
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DBTokenStore.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DBTokenStore.java b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DBTokenStore.java
deleted file mode 100644
index 1b54946..0000000
--- a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DBTokenStore.java
+++ /dev/null
@@ -1,192 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.thrift;
-
-import java.io.IOException;
-import java.lang.reflect.InvocationTargetException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Server.ServerMode;
-import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation;
-import org.apache.hadoop.security.token.delegation.HiveDelegationTokenSupport;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class DBTokenStore implements DelegationTokenStore {
- private static final Logger LOG = LoggerFactory.getLogger(DBTokenStore.class);
- private Configuration conf;
-
- @Override
- public int addMasterKey(String s) throws TokenStoreException {
- if (LOG.isTraceEnabled()) {
- LOG.trace("addMasterKey: s = " + s);
- }
- return (Integer)invokeOnTokenStore("addMasterKey", new Object[]{s},String.class);
- }
-
- @Override
- public void updateMasterKey(int keySeq, String s) throws TokenStoreException {
- if (LOG.isTraceEnabled()) {
- LOG.trace("updateMasterKey: s = " + s + ", keySeq = " + keySeq);
- }
- invokeOnTokenStore("updateMasterKey", new Object[] {Integer.valueOf(keySeq), s},
- Integer.class, String.class);
- }
-
- @Override
- public boolean removeMasterKey(int keySeq) {
- return (Boolean)invokeOnTokenStore("removeMasterKey", new Object[] {Integer.valueOf(keySeq)},
- Integer.class);
- }
-
- @Override
- public String[] getMasterKeys() throws TokenStoreException {
- return (String[])invokeOnTokenStore("getMasterKeys", new Object[0]);
- }
-
- @Override
- public boolean addToken(DelegationTokenIdentifier tokenIdentifier,
- DelegationTokenInformation token) throws TokenStoreException {
-
- try {
- String identifier = TokenStoreDelegationTokenSecretManager.encodeWritable(tokenIdentifier);
- String tokenStr = Base64.encodeBase64URLSafeString(
- HiveDelegationTokenSupport.encodeDelegationTokenInformation(token));
- boolean result = (Boolean)invokeOnTokenStore("addToken", new Object[] {identifier, tokenStr},
- String.class, String.class);
- if (LOG.isTraceEnabled()) {
- LOG.trace("addToken: tokenIdentifier = " + tokenIdentifier + ", added = " + result);
- }
- return result;
- } catch (IOException e) {
- throw new TokenStoreException(e);
- }
- }
-
- @Override
- public DelegationTokenInformation getToken(DelegationTokenIdentifier tokenIdentifier)
- throws TokenStoreException {
- try {
- String tokenStr = (String)invokeOnTokenStore("getToken", new Object[] {
- TokenStoreDelegationTokenSecretManager.encodeWritable(tokenIdentifier)}, String.class);
- DelegationTokenInformation result = null;
- if (StringUtils.isNotEmpty(tokenStr)) {
- result = HiveDelegationTokenSupport.decodeDelegationTokenInformation(Base64.decodeBase64(tokenStr));
- }
- if (LOG.isTraceEnabled()) {
- LOG.trace("getToken: tokenIdentifier = " + tokenIdentifier + ", result = " + result);
- }
- return result;
- } catch (IOException e) {
- throw new TokenStoreException(e);
- }
- }
-
- @Override
- public boolean removeToken(DelegationTokenIdentifier tokenIdentifier) throws TokenStoreException{
- try {
- boolean result = (Boolean)invokeOnTokenStore("removeToken", new Object[] {
- TokenStoreDelegationTokenSecretManager.encodeWritable(tokenIdentifier)}, String.class);
- if (LOG.isTraceEnabled()) {
- LOG.trace("removeToken: tokenIdentifier = " + tokenIdentifier + ", removed = " + result);
- }
- return result;
- } catch (IOException e) {
- throw new TokenStoreException(e);
- }
- }
-
- @Override
- public List<DelegationTokenIdentifier> getAllDelegationTokenIdentifiers() throws TokenStoreException{
-
- List<String> tokenIdents = (List<String>)invokeOnTokenStore("getAllTokenIdentifiers", new Object[0]);
- List<DelegationTokenIdentifier> delTokenIdents = new ArrayList<DelegationTokenIdentifier>(tokenIdents.size());
-
- for (String tokenIdent : tokenIdents) {
- DelegationTokenIdentifier delToken = new DelegationTokenIdentifier();
- try {
- TokenStoreDelegationTokenSecretManager.decodeWritable(delToken, tokenIdent);
- } catch (IOException e) {
- throw new TokenStoreException(e);
- }
- delTokenIdents.add(delToken);
- }
- return delTokenIdents;
- }
-
- private Object handler;
- private ServerMode smode;
-
- @Override
- public void init(Object handler, ServerMode smode) throws TokenStoreException {
- this.handler = handler;
- this.smode = smode;
- }
-
- private Object invokeOnTokenStore(String methName, Object[] params, Class<?> ... paramTypes)
- throws TokenStoreException{
- Object tokenStore;
- try {
- switch (smode) {
- case METASTORE :
- tokenStore = handler.getClass().getMethod("getMS").invoke(handler);
- break;
- case HIVESERVER2 :
- Object hiveObject = ((Class<?>)handler)
- .getMethod("get", org.apache.hadoop.conf.Configuration.class, java.lang.Class.class)
- .invoke(handler, conf, DBTokenStore.class);
- tokenStore = ((Class<?>)handler).getMethod("getMSC").invoke(hiveObject);
- break;
- default:
- throw new TokenStoreException(new Exception("unknown server mode"));
- }
- return tokenStore.getClass().getMethod(methName, paramTypes).invoke(tokenStore, params);
- } catch (IllegalArgumentException e) {
- throw new TokenStoreException(e);
- } catch (SecurityException e) {
- throw new TokenStoreException(e);
- } catch (IllegalAccessException e) {
- throw new TokenStoreException(e);
- } catch (InvocationTargetException e) {
- throw new TokenStoreException(e.getCause());
- } catch (NoSuchMethodException e) {
- throw new TokenStoreException(e);
- }
- }
-
- @Override
- public void setConf(Configuration conf) {
- this.conf = conf;
- }
-
- @Override
- public Configuration getConf() {
- return conf;
- }
-
- @Override
- public void close() throws IOException {
- // No-op.
- }
-
-}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenIdentifier.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenIdentifier.java b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenIdentifier.java
deleted file mode 100644
index 4ca3c0b..0000000
--- a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenIdentifier.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.thrift;
-
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
-
-/**
- * A delegation token identifier that is specific to Hive.
- */
-public class DelegationTokenIdentifier
- extends AbstractDelegationTokenIdentifier {
- public static final Text HIVE_DELEGATION_KIND = new Text("HIVE_DELEGATION_TOKEN");
-
- /**
- * Create an empty delegation token identifier for reading into.
- */
- public DelegationTokenIdentifier() {
- }
-
- /**
- * Create a new delegation token identifier
- * @param owner the effective username of the token owner
- * @param renewer the username of the renewer
- * @param realUser the real username of the token owner
- */
- public DelegationTokenIdentifier(Text owner, Text renewer, Text realUser) {
- super(owner, renewer, realUser);
- }
-
- @Override
- public Text getKind() {
- return HIVE_DELEGATION_KIND;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenSecretManager.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenSecretManager.java b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenSecretManager.java
deleted file mode 100644
index 5299e18..0000000
--- a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenSecretManager.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.thrift;
-
-import java.io.ByteArrayInputStream;
-import java.io.DataInputStream;
-import java.io.IOException;
-
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
-
-/**
- * A Hive specific delegation token secret manager.
- * The secret manager is responsible for generating and accepting the password
- * for each token.
- */
-public class DelegationTokenSecretManager
- extends AbstractDelegationTokenSecretManager<DelegationTokenIdentifier> {
-
- /**
- * Create a secret manager
- * @param delegationKeyUpdateInterval the number of seconds for rolling new
- * secret keys.
- * @param delegationTokenMaxLifetime the maximum lifetime of the delegation
- * tokens
- * @param delegationTokenRenewInterval how often the tokens must be renewed
- * @param delegationTokenRemoverScanInterval how often the tokens are scanned
- * for expired tokens
- */
- public DelegationTokenSecretManager(long delegationKeyUpdateInterval,
- long delegationTokenMaxLifetime,
- long delegationTokenRenewInterval,
- long delegationTokenRemoverScanInterval) {
- super(delegationKeyUpdateInterval, delegationTokenMaxLifetime,
- delegationTokenRenewInterval, delegationTokenRemoverScanInterval);
- }
-
- @Override
- public DelegationTokenIdentifier createIdentifier() {
- return new DelegationTokenIdentifier();
- }
-
- /**
- * Verify token string
- * @param tokenStrForm
- * @return user name
- * @throws IOException
- */
- public synchronized String verifyDelegationToken(String tokenStrForm) throws IOException {
- Token<DelegationTokenIdentifier> t = new Token<DelegationTokenIdentifier>();
- t.decodeFromUrlString(tokenStrForm);
-
- DelegationTokenIdentifier id = getTokenIdentifier(t);
- verifyToken(id, t.getPassword());
- return id.getUser().getShortUserName();
- }
-
- protected DelegationTokenIdentifier getTokenIdentifier(Token<DelegationTokenIdentifier> token)
- throws IOException {
- // turn bytes back into identifier for cache lookup
- ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier());
- DataInputStream in = new DataInputStream(buf);
- DelegationTokenIdentifier id = createIdentifier();
- id.readFields(in);
- return id;
- }
-
- public synchronized void cancelDelegationToken(String tokenStrForm) throws IOException {
- Token<DelegationTokenIdentifier> t= new Token<DelegationTokenIdentifier>();
- t.decodeFromUrlString(tokenStrForm);
- String user = UserGroupInformation.getCurrentUser().getUserName();
- cancelToken(t, user);
- }
-
- public synchronized long renewDelegationToken(String tokenStrForm) throws IOException {
- Token<DelegationTokenIdentifier> t= new Token<DelegationTokenIdentifier>();
- t.decodeFromUrlString(tokenStrForm);
- String user = UserGroupInformation.getCurrentUser().getUserName();
- return renewToken(t, user);
- }
-
- public synchronized String getDelegationToken(String renewer) throws IOException {
- UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
- Text owner = new Text(ugi.getUserName());
- Text realUser = null;
- if (ugi.getRealUser() != null) {
- realUser = new Text(ugi.getRealUser().getUserName());
- }
- DelegationTokenIdentifier ident =
- new DelegationTokenIdentifier(owner, new Text(renewer), realUser);
- Token<DelegationTokenIdentifier> t = new Token<DelegationTokenIdentifier>(
- ident, this);
- return t.encodeToUrlString();
- }
-
- public String getUserFromToken(String tokenStr) throws IOException {
- Token<DelegationTokenIdentifier> delegationToken = new Token<DelegationTokenIdentifier>();
- delegationToken.decodeFromUrlString(tokenStr);
-
- ByteArrayInputStream buf = new ByteArrayInputStream(delegationToken.getIdentifier());
- DataInputStream in = new DataInputStream(buf);
- DelegationTokenIdentifier id = createIdentifier();
- id.readFields(in);
- return id.getUser().getShortUserName();
- }
-}
-
[2/3] hive git commit: HIVE-17371 : Move tokenstores to metastore
module (Vihang Karajgaonkar, reviewed by Alan Gates, Thejas M Nair)
Posted by vi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenSelector.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenSelector.java b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenSelector.java
deleted file mode 100644
index f6e2420..0000000
--- a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenSelector.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.thrift;
-
-import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector;
-
-/**
- * A delegation token that is specialized for Hive
- */
-
-public class DelegationTokenSelector
- extends AbstractDelegationTokenSelector<DelegationTokenIdentifier>{
-
- public DelegationTokenSelector() {
- super(DelegationTokenIdentifier.HIVE_DELEGATION_KIND);
- }
-}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenStore.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenStore.java b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenStore.java
deleted file mode 100644
index 867b4ed..0000000
--- a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenStore.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.thrift;
-
-import java.io.Closeable;
-import java.util.List;
-
-import org.apache.hadoop.conf.Configurable;
-import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Server.ServerMode;
-import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation;
-
-/**
- * Interface for pluggable token store that can be implemented with shared external
- * storage for load balancing and high availability (for example using ZooKeeper).
- * Internal, store specific errors are translated into {@link TokenStoreException}.
- */
-public interface DelegationTokenStore extends Configurable, Closeable {
-
- /**
- * Exception for internal token store errors that typically cannot be handled by the caller.
- */
- public static class TokenStoreException extends RuntimeException {
- private static final long serialVersionUID = -8693819817623074083L;
-
- public TokenStoreException(Throwable cause) {
- super(cause);
- }
-
- public TokenStoreException(String message, Throwable cause) {
- super(message, cause);
- }
- }
-
- /**
- * Add new master key. The token store assigns and returns the sequence number.
- * Caller needs to use the identifier to update the key (since it is embedded in the key).
- *
- * @param s
- * @return sequence number for new key
- */
- int addMasterKey(String s) throws TokenStoreException;
-
- /**
- * Update master key (for expiration and setting store assigned sequence within key)
- * @param keySeq
- * @param s
- * @throws TokenStoreException
- */
- void updateMasterKey(int keySeq, String s) throws TokenStoreException;
-
- /**
- * Remove key for given id.
- * @param keySeq
- * @return false if key no longer present, true otherwise.
- */
- boolean removeMasterKey(int keySeq);
-
- /**
- * Return all master keys.
- * @return
- * @throws TokenStoreException
- */
- String[] getMasterKeys() throws TokenStoreException;
-
- /**
- * Add token. If identifier is already present, token won't be added.
- * @param tokenIdentifier
- * @param token
- * @return true if token was added, false for existing identifier
- */
- boolean addToken(DelegationTokenIdentifier tokenIdentifier,
- DelegationTokenInformation token) throws TokenStoreException;
-
- /**
- * Get token. Returns null if the token does not exist.
- * @param tokenIdentifier
- * @return
- */
- DelegationTokenInformation getToken(DelegationTokenIdentifier tokenIdentifier)
- throws TokenStoreException;
-
- /**
- * Remove token. Return value can be used by caller to detect concurrency.
- * @param tokenIdentifier
- * @return true if token was removed, false if it was already removed.
- * @throws TokenStoreException
- */
- boolean removeToken(DelegationTokenIdentifier tokenIdentifier) throws TokenStoreException;
-
- /**
- * List of all token identifiers in the store. This is used to remove expired tokens
- * and a potential scalability improvement would be to partition by master key id
- * @return
- */
- List<DelegationTokenIdentifier> getAllDelegationTokenIdentifiers() throws TokenStoreException;
-
- /**
- * @param hmsHandler ObjectStore used by DBTokenStore
- * @param smode Indicate whether this is a metastore or hiveserver2 token store
- */
- void init(Object hmsHandler, ServerMode smode);
-
-}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
deleted file mode 100644
index 2d39bea..0000000
--- a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
+++ /dev/null
@@ -1,689 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.thrift;
-
-import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.Socket;
-import java.net.UnknownHostException;
-import java.security.PrivilegedAction;
-import java.security.PrivilegedExceptionAction;
-import java.util.Locale;
-import java.util.Map;
-
-import javax.security.auth.callback.Callback;
-import javax.security.auth.callback.CallbackHandler;
-import javax.security.auth.callback.NameCallback;
-import javax.security.auth.callback.PasswordCallback;
-import javax.security.auth.callback.UnsupportedCallbackException;
-import javax.security.sasl.AuthorizeCallback;
-import javax.security.sasl.RealmCallback;
-import javax.security.sasl.RealmChoiceCallback;
-import javax.security.sasl.SaslException;
-import javax.security.sasl.SaslServer;
-
-import org.apache.commons.codec.binary.Base64;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport;
-import org.apache.hadoop.hive.thrift.DelegationTokenIdentifier;
-import org.apache.hadoop.hive.thrift.DelegationTokenSecretManager;
-import org.apache.hadoop.security.SaslRpcServer;
-import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
-import org.apache.hadoop.security.SecurityUtil;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
-import org.apache.hadoop.security.token.SecretManager.InvalidToken;
-import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.thrift.TException;
-import org.apache.thrift.TProcessor;
-import org.apache.thrift.protocol.TProtocol;
-import org.apache.thrift.transport.TSaslClientTransport;
-import org.apache.thrift.transport.TSaslServerTransport;
-import org.apache.thrift.transport.TSocket;
-import org.apache.thrift.transport.TTransport;
-import org.apache.thrift.transport.TTransportException;
-import org.apache.thrift.transport.TTransportFactory;
-
-/**
- * Functions that bridge Thrift's SASL transports to Hadoop's
- * SASL callback handlers and authentication classes.
- * HIVE-11378 This class is not directly used anymore. It now exists only as a shell to be
- * extended by HadoopThriftAuthBridge23 in 0.23 shims. I have made it abstract
- * to avoid maintenance errors.
- */
-public abstract class HadoopThriftAuthBridge {
- private static final Logger LOG = LoggerFactory.getLogger(HadoopThriftAuthBridge.class);
-
- public Client createClient() {
- return new Client();
- }
-
- public Client createClientWithConf(String authMethod) {
- UserGroupInformation ugi;
- try {
- ugi = UserGroupInformation.getLoginUser();
- } catch(IOException e) {
- throw new IllegalStateException("Unable to get current login user: " + e, e);
- }
- if (loginUserHasCurrentAuthMethod(ugi, authMethod)) {
- LOG.debug("Not setting UGI conf as passed-in authMethod of " + authMethod + " = current.");
- return new Client();
- } else {
- LOG.debug("Setting UGI conf as passed-in authMethod of " + authMethod + " != current.");
- Configuration conf = new Configuration();
- conf.set(HADOOP_SECURITY_AUTHENTICATION, authMethod);
- UserGroupInformation.setConfiguration(conf);
- return new Client();
- }
- }
-
- public Server createServer(String keytabFile, String principalConf, String clientConf) throws TTransportException {
- return new Server(keytabFile, principalConf, clientConf);
- }
-
- public String getServerPrincipal(String principalConfig, String host)
- throws IOException {
- String serverPrincipal = SecurityUtil.getServerPrincipal(principalConfig, host);
- String names[] = SaslRpcServer.splitKerberosName(serverPrincipal);
- if (names.length != 3) {
- throw new IOException(
- "Kerberos principal name does NOT have the expected hostname part: "
- + serverPrincipal);
- }
- return serverPrincipal;
- }
-
- /**
- * Method to get canonical-ized hostname, given a hostname (possibly a CNAME).
- * This should allow for service-principals to use simplified CNAMEs.
- * @param hostName The hostname to be canonical-ized.
- * @return Given a CNAME, the canonical-ized hostname is returned. If not found, the original hostname is returned.
- */
- public String getCanonicalHostName(String hostName) {
- try {
- return InetAddress.getByName(hostName).getCanonicalHostName();
- }
- catch(UnknownHostException exception) {
- LOG.warn("Could not retrieve canonical hostname for " + hostName, exception);
- return hostName;
- }
- }
-
- public UserGroupInformation getCurrentUGIWithConf(String authMethod)
- throws IOException {
- UserGroupInformation ugi;
- try {
- ugi = UserGroupInformation.getCurrentUser();
- } catch(IOException e) {
- throw new IllegalStateException("Unable to get current user: " + e, e);
- }
- if (loginUserHasCurrentAuthMethod(ugi, authMethod)) {
- LOG.debug("Not setting UGI conf as passed-in authMethod of " + authMethod + " = current.");
- return ugi;
- } else {
- LOG.debug("Setting UGI conf as passed-in authMethod of " + authMethod + " != current.");
- Configuration conf = new Configuration();
- conf.set(HADOOP_SECURITY_AUTHENTICATION, authMethod);
- UserGroupInformation.setConfiguration(conf);
- return UserGroupInformation.getCurrentUser();
- }
- }
-
- /**
- * Return true if the current login user is already using the given authMethod.
- *
- * Used above to ensure we do not create a new Configuration object and as such
- * lose other settings such as the cluster to which the JVM is connected. Required
- * for oozie since it does not have a core-site.xml see HIVE-7682
- */
- private boolean loginUserHasCurrentAuthMethod(UserGroupInformation ugi, String sAuthMethod) {
- AuthenticationMethod authMethod;
- try {
- // based on SecurityUtil.getAuthenticationMethod()
- authMethod = Enum.valueOf(AuthenticationMethod.class, sAuthMethod.toUpperCase(Locale.ENGLISH));
- } catch (IllegalArgumentException iae) {
- throw new IllegalArgumentException("Invalid attribute value for " +
- HADOOP_SECURITY_AUTHENTICATION + " of " + sAuthMethod, iae);
- }
- LOG.debug("Current authMethod = " + ugi.getAuthenticationMethod());
- return ugi.getAuthenticationMethod().equals(authMethod);
- }
-
-
- /**
- * Read and return Hadoop SASL configuration which can be configured using
- * "hadoop.rpc.protection"
- * @param conf
- * @return Hadoop SASL configuration
- */
-
- public abstract Map<String, String> getHadoopSaslProperties(Configuration conf);
-
- public static class Client {
- /**
- * Create a client-side SASL transport that wraps an underlying transport.
- *
- * @param methodStr The authentication method to use. Currently only KERBEROS is
- * supported.
- * @param principalConfig The Kerberos principal of the target server.
- * @param underlyingTransport The underlying transport mechanism, usually a TSocket.
- * @param saslProps the sasl properties to create the client with
- */
-
-
- public TTransport createClientTransport(
- String principalConfig, String host,
- String methodStr, String tokenStrForm, final TTransport underlyingTransport,
- final Map<String, String> saslProps) throws IOException {
- final AuthMethod method = AuthMethod.valueOf(AuthMethod.class, methodStr);
-
- TTransport saslTransport = null;
- switch (method) {
- case DIGEST:
- Token<DelegationTokenIdentifier> t= new Token<DelegationTokenIdentifier>();
- t.decodeFromUrlString(tokenStrForm);
- saslTransport = new TSaslClientTransport(
- method.getMechanismName(),
- null,
- null, SaslRpcServer.SASL_DEFAULT_REALM,
- saslProps, new SaslClientCallbackHandler(t),
- underlyingTransport);
- return new TUGIAssumingTransport(saslTransport, UserGroupInformation.getCurrentUser());
-
- case KERBEROS:
- String serverPrincipal = SecurityUtil.getServerPrincipal(principalConfig, host);
- final String names[] = SaslRpcServer.splitKerberosName(serverPrincipal);
- if (names.length != 3) {
- throw new IOException(
- "Kerberos principal name does NOT have the expected hostname part: "
- + serverPrincipal);
- }
- try {
- return UserGroupInformation.getCurrentUser().doAs(
- new PrivilegedExceptionAction<TUGIAssumingTransport>() {
- @Override
- public TUGIAssumingTransport run() throws IOException {
- TTransport saslTransport = new TSaslClientTransport(
- method.getMechanismName(),
- null,
- names[0], names[1],
- saslProps, null,
- underlyingTransport);
- return new TUGIAssumingTransport(saslTransport, UserGroupInformation.getCurrentUser());
- }
- });
- } catch (InterruptedException | SaslException se) {
- throw new IOException("Could not instantiate SASL transport", se);
- }
-
- default:
- throw new IOException("Unsupported authentication method: " + method);
- }
- }
- private static class SaslClientCallbackHandler implements CallbackHandler {
- private final String userName;
- private final char[] userPassword;
-
- public SaslClientCallbackHandler(Token<? extends TokenIdentifier> token) {
- this.userName = encodeIdentifier(token.getIdentifier());
- this.userPassword = encodePassword(token.getPassword());
- }
-
-
- @Override
- public void handle(Callback[] callbacks)
- throws UnsupportedCallbackException {
- NameCallback nc = null;
- PasswordCallback pc = null;
- RealmCallback rc = null;
- for (Callback callback : callbacks) {
- if (callback instanceof RealmChoiceCallback) {
- continue;
- } else if (callback instanceof NameCallback) {
- nc = (NameCallback) callback;
- } else if (callback instanceof PasswordCallback) {
- pc = (PasswordCallback) callback;
- } else if (callback instanceof RealmCallback) {
- rc = (RealmCallback) callback;
- } else {
- throw new UnsupportedCallbackException(callback,
- "Unrecognized SASL client callback");
- }
- }
- if (nc != null) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("SASL client callback: setting username: " + userName);
- }
- nc.setName(userName);
- }
- if (pc != null) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("SASL client callback: setting userPassword");
- }
- pc.setPassword(userPassword);
- }
- if (rc != null) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("SASL client callback: setting realm: "
- + rc.getDefaultText());
- }
- rc.setText(rc.getDefaultText());
- }
- }
-
- static String encodeIdentifier(byte[] identifier) {
- return new String(Base64.encodeBase64(identifier));
- }
-
- static char[] encodePassword(byte[] password) {
- return new String(Base64.encodeBase64(password)).toCharArray();
- }
- }
- }
-
- public static class Server {
- public enum ServerMode {
- HIVESERVER2, METASTORE
- };
-
- protected final UserGroupInformation realUgi;
- protected final UserGroupInformation clientValidationUGI;
- protected DelegationTokenSecretManager secretManager;
-
- public Server() throws TTransportException {
- try {
- realUgi = UserGroupInformation.getCurrentUser();
- clientValidationUGI = UserGroupInformation.getCurrentUser();
- } catch (IOException ioe) {
- throw new TTransportException(ioe);
- }
- }
- /**
- * Create a server with a kerberos keytab/principal.
- */
- protected Server(String keytabFile, String principalConf, String clientConf)
- throws TTransportException {
- if (keytabFile == null || keytabFile.isEmpty()) {
- throw new TTransportException("No keytab specified");
- }
- if (principalConf == null || principalConf.isEmpty()) {
- throw new TTransportException("No principal specified");
- }
- if (clientConf == null || clientConf.isEmpty()) {
- // Don't bust existing setups.
- LOG.warn("Client-facing principal not set. Using server-side setting: " + principalConf);
- clientConf = principalConf;
- }
-
- // Login from the keytab
- String kerberosName;
- try {
- LOG.info("Logging in via CLIENT based principal ");
- kerberosName =
- SecurityUtil.getServerPrincipal(clientConf, "0.0.0.0");
- UserGroupInformation.loginUserFromKeytab(
- kerberosName, keytabFile);
- clientValidationUGI = UserGroupInformation.getLoginUser();
- assert clientValidationUGI.isFromKeytab();
-
- LOG.info("Logging in via SERVER based principal ");
- kerberosName =
- SecurityUtil.getServerPrincipal(principalConf, "0.0.0.0");
- UserGroupInformation.loginUserFromKeytab(
- kerberosName, keytabFile);
- realUgi = UserGroupInformation.getLoginUser();
- assert realUgi.isFromKeytab();
- } catch (IOException ioe) {
- throw new TTransportException(ioe);
- }
- }
-
- public void setSecretManager(DelegationTokenSecretManager secretManager) {
- this.secretManager = secretManager;
- }
-
- /**
- * Create a TTransportFactory that, upon connection of a client socket,
- * negotiates a Kerberized SASL transport. The resulting TTransportFactory
- * can be passed as both the input and output transport factory when
- * instantiating a TThreadPoolServer, for example.
- *
- * @param saslProps Map of SASL properties
- */
-
- public TTransportFactory createTransportFactory(Map<String, String> saslProps)
- throws TTransportException {
-
- TSaslServerTransport.Factory transFactory = createSaslServerTransportFactory(saslProps);
-
- return new TUGIAssumingTransportFactory(transFactory, clientValidationUGI);
- }
-
- /**
- * Create a TSaslServerTransport.Factory that, upon connection of a client
- * socket, negotiates a Kerberized SASL transport.
- *
- * @param saslProps Map of SASL properties
- */
- public TSaslServerTransport.Factory createSaslServerTransportFactory(
- Map<String, String> saslProps) throws TTransportException {
- // Parse out the kerberos principal, host, realm.
- String kerberosName = clientValidationUGI.getUserName();
- final String names[] = SaslRpcServer.splitKerberosName(kerberosName);
- if (names.length != 3) {
- throw new TTransportException("Kerberos principal should have 3 parts: " + kerberosName);
- }
-
- TSaslServerTransport.Factory transFactory = new TSaslServerTransport.Factory();
- transFactory.addServerDefinition(
- AuthMethod.KERBEROS.getMechanismName(),
- names[0], names[1], // two parts of kerberos principal
- saslProps,
- new SaslRpcServer.SaslGssCallbackHandler());
- transFactory.addServerDefinition(AuthMethod.DIGEST.getMechanismName(),
- null, SaslRpcServer.SASL_DEFAULT_REALM,
- saslProps, new SaslDigestCallbackHandler(secretManager));
-
- return transFactory;
- }
-
- /**
- * Wrap a TTransportFactory in such a way that, before processing any RPC, it
- * assumes the UserGroupInformation of the user authenticated by
- * the SASL transport.
- */
- public TTransportFactory wrapTransportFactory(TTransportFactory transFactory) {
- return new TUGIAssumingTransportFactory(transFactory, realUgi);
- }
-
- /**
- * Wrap a TProcessor in such a way that, before processing any RPC, it
- * assumes the UserGroupInformation of the user authenticated by
- * the SASL transport.
- */
-
- public TProcessor wrapProcessor(TProcessor processor) {
- return new TUGIAssumingProcessor(processor, secretManager, true);
- }
-
- /**
- * Wrap a TProcessor to capture the client information like connecting userid, ip etc
- */
-
- public TProcessor wrapNonAssumingProcessor(TProcessor processor) {
- return new TUGIAssumingProcessor(processor, secretManager, false);
- }
-
- final static ThreadLocal<InetAddress> remoteAddress =
- new ThreadLocal<InetAddress>() {
-
- @Override
- protected InetAddress initialValue() {
- return null;
- }
- };
-
- public InetAddress getRemoteAddress() {
- return remoteAddress.get();
- }
-
- final static ThreadLocal<AuthenticationMethod> authenticationMethod =
- new ThreadLocal<AuthenticationMethod>() {
-
- @Override
- protected AuthenticationMethod initialValue() {
- return AuthenticationMethod.TOKEN;
- }
- };
-
- private static ThreadLocal<String> remoteUser = new ThreadLocal<String> () {
-
- @Override
- protected String initialValue() {
- return null;
- }
- };
-
-
- public String getRemoteUser() {
- return remoteUser.get();
- }
-
- private final static ThreadLocal<String> userAuthMechanism =
- new ThreadLocal<String>() {
-
- @Override
- protected String initialValue() {
- return AuthMethod.KERBEROS.getMechanismName();
- }
- };
-
- public String getUserAuthMechanism() {
- return userAuthMechanism.get();
- }
- /** CallbackHandler for SASL DIGEST-MD5 mechanism */
- // This code is pretty much completely based on Hadoop's
- // SaslRpcServer.SaslDigestCallbackHandler - the only reason we could not
- // use that Hadoop class as-is was because it needs a Server.Connection object
- // which is relevant in hadoop rpc but not here in the metastore - so the
- // code below does not deal with the Connection Server.object.
- static class SaslDigestCallbackHandler implements CallbackHandler {
- private final DelegationTokenSecretManager secretManager;
-
- public SaslDigestCallbackHandler(
- DelegationTokenSecretManager secretManager) {
- this.secretManager = secretManager;
- }
-
- private char[] getPassword(DelegationTokenIdentifier tokenid) throws InvalidToken {
- return encodePassword(secretManager.retrievePassword(tokenid));
- }
-
- private char[] encodePassword(byte[] password) {
- return new String(Base64.encodeBase64(password)).toCharArray();
- }
- /** {@inheritDoc} */
-
- @Override
- public void handle(Callback[] callbacks) throws InvalidToken,
- UnsupportedCallbackException {
- NameCallback nc = null;
- PasswordCallback pc = null;
- AuthorizeCallback ac = null;
- for (Callback callback : callbacks) {
- if (callback instanceof AuthorizeCallback) {
- ac = (AuthorizeCallback) callback;
- } else if (callback instanceof NameCallback) {
- nc = (NameCallback) callback;
- } else if (callback instanceof PasswordCallback) {
- pc = (PasswordCallback) callback;
- } else if (callback instanceof RealmCallback) {
- continue; // realm is ignored
- } else {
- throw new UnsupportedCallbackException(callback,
- "Unrecognized SASL DIGEST-MD5 Callback");
- }
- }
- if (pc != null) {
- DelegationTokenIdentifier tokenIdentifier = SaslRpcServer.
- getIdentifier(nc.getDefaultName(), secretManager);
- char[] password = getPassword(tokenIdentifier);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("SASL server DIGEST-MD5 callback: setting password "
- + "for client: " + tokenIdentifier.getUser());
- }
- pc.setPassword(password);
- }
- if (ac != null) {
- String authid = ac.getAuthenticationID();
- String authzid = ac.getAuthorizationID();
- if (authid.equals(authzid)) {
- ac.setAuthorized(true);
- } else {
- ac.setAuthorized(false);
- }
- if (ac.isAuthorized()) {
- if (LOG.isDebugEnabled()) {
- String username =
- SaslRpcServer.getIdentifier(authzid, secretManager).getUser().getUserName();
- LOG.debug("SASL server DIGEST-MD5 callback: setting "
- + "canonicalized client ID: " + username);
- }
- ac.setAuthorizedID(authzid);
- }
- }
- }
- }
-
- /**
- * Processor that pulls the SaslServer object out of the transport, and
- * assumes the remote user's UGI before calling through to the original
- * processor.
- *
- * This is used on the server side to set the UGI for each specific call.
- */
- protected class TUGIAssumingProcessor implements TProcessor {
- final TProcessor wrapped;
- DelegationTokenSecretManager secretManager;
- boolean useProxy;
- TUGIAssumingProcessor(TProcessor wrapped, DelegationTokenSecretManager secretManager,
- boolean useProxy) {
- this.wrapped = wrapped;
- this.secretManager = secretManager;
- this.useProxy = useProxy;
- }
-
-
- @Override
- public boolean process(final TProtocol inProt, final TProtocol outProt) throws TException {
- TTransport trans = inProt.getTransport();
- if (!(trans instanceof TSaslServerTransport)) {
- throw new TException("Unexpected non-SASL transport " + trans.getClass());
- }
- TSaslServerTransport saslTrans = (TSaslServerTransport)trans;
- SaslServer saslServer = saslTrans.getSaslServer();
- String authId = saslServer.getAuthorizationID();
- LOG.debug("AUTH ID ======>" + authId);
- String endUser = authId;
-
- Socket socket = ((TSocket)(saslTrans.getUnderlyingTransport())).getSocket();
- remoteAddress.set(socket.getInetAddress());
-
- String mechanismName = saslServer.getMechanismName();
- userAuthMechanism.set(mechanismName);
- if (AuthMethod.PLAIN.getMechanismName().equalsIgnoreCase(mechanismName)) {
- remoteUser.set(endUser);
- return wrapped.process(inProt, outProt);
- }
-
- authenticationMethod.set(AuthenticationMethod.KERBEROS);
- if(AuthMethod.TOKEN.getMechanismName().equalsIgnoreCase(mechanismName)) {
- try {
- TokenIdentifier tokenId = SaslRpcServer.getIdentifier(authId,
- secretManager);
- endUser = tokenId.getUser().getUserName();
- authenticationMethod.set(AuthenticationMethod.TOKEN);
- } catch (InvalidToken e) {
- throw new TException(e.getMessage());
- }
- }
-
- UserGroupInformation clientUgi = null;
- try {
- if (useProxy) {
- clientUgi = UserGroupInformation.createProxyUser(
- endUser, UserGroupInformation.getLoginUser());
- remoteUser.set(clientUgi.getShortUserName());
- LOG.debug("Set remoteUser :" + remoteUser.get());
- return clientUgi.doAs(new PrivilegedExceptionAction<Boolean>() {
-
- @Override
- public Boolean run() {
- try {
- return wrapped.process(inProt, outProt);
- } catch (TException te) {
- throw new RuntimeException(te);
- }
- }
- });
- } else {
- // use the short user name for the request
- UserGroupInformation endUserUgi = UserGroupInformation.createRemoteUser(endUser);
- remoteUser.set(endUserUgi.getShortUserName());
- LOG.debug("Set remoteUser :" + remoteUser.get() + ", from endUser :" + endUser);
- return wrapped.process(inProt, outProt);
- }
- } catch (RuntimeException rte) {
- if (rte.getCause() instanceof TException) {
- throw (TException)rte.getCause();
- }
- throw rte;
- } catch (InterruptedException ie) {
- throw new RuntimeException(ie); // unexpected!
- } catch (IOException ioe) {
- throw new RuntimeException(ioe); // unexpected!
- }
- finally {
- if (clientUgi != null) {
- try { FileSystem.closeAllForUGI(clientUgi); }
- catch(IOException exception) {
- LOG.error("Could not clean up file-system handles for UGI: " + clientUgi, exception);
- }
- }
- }
- }
- }
-
- /**
- * A TransportFactory that wraps another one, but assumes a specified UGI
- * before calling through.
- *
- * This is used on the server side to assume the server's Principal when accepting
- * clients.
- */
- static class TUGIAssumingTransportFactory extends TTransportFactory {
- private final UserGroupInformation ugi;
- private final TTransportFactory wrapped;
-
- public TUGIAssumingTransportFactory(TTransportFactory wrapped, UserGroupInformation ugi) {
- assert wrapped != null;
- assert ugi != null;
- this.wrapped = wrapped;
- this.ugi = ugi;
- }
-
-
- @Override
- public TTransport getTransport(final TTransport trans) {
- return ugi.doAs(new PrivilegedAction<TTransport>() {
- @Override
- public TTransport run() {
- return wrapped.getTransport(trans);
- }
- });
- }
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HiveDelegationTokenManager.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HiveDelegationTokenManager.java b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HiveDelegationTokenManager.java
deleted file mode 100644
index b3e4a76..0000000
--- a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HiveDelegationTokenManager.java
+++ /dev/null
@@ -1,172 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.hadoop.hive.thrift;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.security.PrivilegedExceptionAction;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.shims.Utils;
-import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Server.ServerMode;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
-import org.apache.hadoop.security.authorize.AuthorizationException;
-import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.apache.hadoop.util.ReflectionUtils;
-
-public class HiveDelegationTokenManager {
-
- public static final String DELEGATION_TOKEN_GC_INTERVAL =
- "hive.cluster.delegation.token.gc-interval";
- private final static long DELEGATION_TOKEN_GC_INTERVAL_DEFAULT = 3600000; // 1 hour
- // Delegation token related keys
- public static final String DELEGATION_KEY_UPDATE_INTERVAL_KEY =
- "hive.cluster.delegation.key.update-interval";
- public static final long DELEGATION_KEY_UPDATE_INTERVAL_DEFAULT =
- 24*60*60*1000; // 1 day
- public static final String DELEGATION_TOKEN_RENEW_INTERVAL_KEY =
- "hive.cluster.delegation.token.renew-interval";
- public static final long DELEGATION_TOKEN_RENEW_INTERVAL_DEFAULT =
- 24*60*60*1000; // 1 day
- public static final String DELEGATION_TOKEN_MAX_LIFETIME_KEY =
- "hive.cluster.delegation.token.max-lifetime";
- public static final long DELEGATION_TOKEN_MAX_LIFETIME_DEFAULT =
- 7*24*60*60*1000; // 7 days
- public static final String DELEGATION_TOKEN_STORE_CLS =
- "hive.cluster.delegation.token.store.class";
- public static final String DELEGATION_TOKEN_STORE_ZK_CONNECT_STR =
- "hive.cluster.delegation.token.store.zookeeper.connectString";
- // Alternate connect string specification configuration
- public static final String DELEGATION_TOKEN_STORE_ZK_CONNECT_STR_ALTERNATE =
- "hive.zookeeper.quorum";
-
- public static final String DELEGATION_TOKEN_STORE_ZK_CONNECT_TIMEOUTMILLIS =
- "hive.cluster.delegation.token.store.zookeeper.connectTimeoutMillis";
- public static final String DELEGATION_TOKEN_STORE_ZK_ZNODE =
- "hive.cluster.delegation.token.store.zookeeper.znode";
- public static final String DELEGATION_TOKEN_STORE_ZK_ACL =
- "hive.cluster.delegation.token.store.zookeeper.acl";
- public static final String DELEGATION_TOKEN_STORE_ZK_ZNODE_DEFAULT =
- "/hivedelegation";
-
- protected DelegationTokenSecretManager secretManager;
-
- public HiveDelegationTokenManager() {
- }
-
- public DelegationTokenSecretManager getSecretManager() {
- return secretManager;
- }
-
- public void startDelegationTokenSecretManager(Configuration conf, Object hms, ServerMode smode)
- throws IOException {
- long secretKeyInterval =
- conf.getLong(DELEGATION_KEY_UPDATE_INTERVAL_KEY, DELEGATION_KEY_UPDATE_INTERVAL_DEFAULT);
- long tokenMaxLifetime =
- conf.getLong(DELEGATION_TOKEN_MAX_LIFETIME_KEY, DELEGATION_TOKEN_MAX_LIFETIME_DEFAULT);
- long tokenRenewInterval =
- conf.getLong(DELEGATION_TOKEN_RENEW_INTERVAL_KEY, DELEGATION_TOKEN_RENEW_INTERVAL_DEFAULT);
- long tokenGcInterval =
- conf.getLong(DELEGATION_TOKEN_GC_INTERVAL, DELEGATION_TOKEN_GC_INTERVAL_DEFAULT);
-
- DelegationTokenStore dts = getTokenStore(conf);
- dts.setConf(conf);
- dts.init(hms, smode);
- secretManager =
- new TokenStoreDelegationTokenSecretManager(secretKeyInterval, tokenMaxLifetime,
- tokenRenewInterval, tokenGcInterval, dts);
- secretManager.startThreads();
- }
-
- public String getDelegationToken(final String owner, final String renewer, String remoteAddr)
- throws IOException,
- InterruptedException {
- /**
- * If the user asking the token is same as the 'owner' then don't do
- * any proxy authorization checks. For cases like oozie, where it gets
- * a delegation token for another user, we need to make sure oozie is
- * authorized to get a delegation token.
- */
- // Do all checks on short names
- UserGroupInformation currUser = UserGroupInformation.getCurrentUser();
- UserGroupInformation ownerUgi = UserGroupInformation.createRemoteUser(owner);
- if (!ownerUgi.getShortUserName().equals(currUser.getShortUserName())) {
- // in the case of proxy users, the getCurrentUser will return the
- // real user (for e.g. oozie) due to the doAs that happened just before the
- // server started executing the method getDelegationToken in the MetaStore
- ownerUgi = UserGroupInformation.createProxyUser(owner, UserGroupInformation.getCurrentUser());
- ProxyUsers.authorize(ownerUgi, remoteAddr, null);
- }
- return ownerUgi.doAs(new PrivilegedExceptionAction<String>() {
-
- @Override
- public String run() throws IOException {
- return secretManager.getDelegationToken(renewer);
- }
- });
- }
-
- public String getDelegationTokenWithService(String owner, String renewer, String service, String remoteAddr)
- throws IOException, InterruptedException {
- String token = getDelegationToken(owner, renewer, remoteAddr);
- return Utils.addServiceToToken(token, service);
- }
-
- public long renewDelegationToken(String tokenStrForm)
- throws IOException {
- return secretManager.renewDelegationToken(tokenStrForm);
- }
-
- public String getUserFromToken(String tokenStr) throws IOException {
- return secretManager.getUserFromToken(tokenStr);
- }
-
- public void cancelDelegationToken(String tokenStrForm) throws IOException {
- secretManager.cancelDelegationToken(tokenStrForm);
- }
-
- /**
- * Verify token string
- * @param tokenStrForm
- * @return user name
- * @throws IOException
- */
- public String verifyDelegationToken(String tokenStrForm) throws IOException {
- return secretManager.verifyDelegationToken(tokenStrForm);
- }
-
- private DelegationTokenStore getTokenStore(Configuration conf) throws IOException {
- String tokenStoreClassName = conf.get(DELEGATION_TOKEN_STORE_CLS, "");
- if (StringUtils.isBlank(tokenStoreClassName)) {
- return new MemoryTokenStore();
- }
- try {
- Class<? extends DelegationTokenStore> storeClass =
- Class.forName(tokenStoreClassName).asSubclass(DelegationTokenStore.class);
- return ReflectionUtils.newInstance(storeClass, conf);
- } catch (ClassNotFoundException e) {
- throw new IOException("Error initializing delegation token store: " + tokenStoreClassName, e);
- }
- }
-
-
-}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/shims/common/src/main/java/org/apache/hadoop/hive/thrift/MemoryTokenStore.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/MemoryTokenStore.java b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/MemoryTokenStore.java
deleted file mode 100644
index 9d837b8..0000000
--- a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/MemoryTokenStore.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.thrift;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Server.ServerMode;
-import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Default in-memory token store implementation.
- */
-public class MemoryTokenStore implements DelegationTokenStore {
- private static final Logger LOG = LoggerFactory.getLogger(MemoryTokenStore.class);
-
- private final Map<Integer, String> masterKeys
- = new ConcurrentHashMap<Integer, String>();
-
- private final ConcurrentHashMap<DelegationTokenIdentifier, DelegationTokenInformation> tokens
- = new ConcurrentHashMap<DelegationTokenIdentifier, DelegationTokenInformation>();
-
- private final AtomicInteger masterKeySeq = new AtomicInteger();
- private Configuration conf;
-
- @Override
- public void setConf(Configuration conf) {
- this.conf = conf;
- }
-
- @Override
- public Configuration getConf() {
- return this.conf;
- }
-
- @Override
- public int addMasterKey(String s) {
- int keySeq = masterKeySeq.getAndIncrement();
- if (LOG.isTraceEnabled()) {
- LOG.trace("addMasterKey: s = " + s + ", keySeq = " + keySeq);
- }
- masterKeys.put(keySeq, s);
- return keySeq;
- }
-
- @Override
- public void updateMasterKey(int keySeq, String s) {
- if (LOG.isTraceEnabled()) {
- LOG.trace("updateMasterKey: s = " + s + ", keySeq = " + keySeq);
- }
- masterKeys.put(keySeq, s);
- }
-
- @Override
- public boolean removeMasterKey(int keySeq) {
- if (LOG.isTraceEnabled()) {
- LOG.trace("removeMasterKey: keySeq = " + keySeq);
- }
- return masterKeys.remove(keySeq) != null;
- }
-
- @Override
- public String[] getMasterKeys() {
- return masterKeys.values().toArray(new String[0]);
- }
-
- @Override
- public boolean addToken(DelegationTokenIdentifier tokenIdentifier,
- DelegationTokenInformation token) {
- DelegationTokenInformation tokenInfo = tokens.putIfAbsent(tokenIdentifier, token);
- if (LOG.isTraceEnabled()) {
- LOG.trace("addToken: tokenIdentifier = " + tokenIdentifier + ", added = " + (tokenInfo == null));
- }
- return (tokenInfo == null);
- }
-
- @Override
- public boolean removeToken(DelegationTokenIdentifier tokenIdentifier) {
- DelegationTokenInformation tokenInfo = tokens.remove(tokenIdentifier);
- if (LOG.isTraceEnabled()) {
- LOG.trace("removeToken: tokenIdentifier = " + tokenIdentifier + ", removed = " + (tokenInfo != null));
- }
- return tokenInfo != null;
- }
-
- @Override
- public DelegationTokenInformation getToken(DelegationTokenIdentifier tokenIdentifier) {
- DelegationTokenInformation result = tokens.get(tokenIdentifier);
- if (LOG.isTraceEnabled()) {
- LOG.trace("getToken: tokenIdentifier = " + tokenIdentifier + ", result = " + result);
- }
- return result;
- }
-
- @Override
- public List<DelegationTokenIdentifier> getAllDelegationTokenIdentifiers() {
- List<DelegationTokenIdentifier> result = new ArrayList<DelegationTokenIdentifier>(
- tokens.size());
- for (DelegationTokenIdentifier id : tokens.keySet()) {
- result.add(id);
- }
- return result;
- }
-
- @Override
- public void close() throws IOException {
- //no-op
- }
-
- @Override
- public void init(Object hmsHandler, ServerMode smode) throws TokenStoreException {
- // no-op
- }
-}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/shims/common/src/main/java/org/apache/hadoop/hive/thrift/TokenStoreDelegationTokenSecretManager.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/TokenStoreDelegationTokenSecretManager.java b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/TokenStoreDelegationTokenSecretManager.java
deleted file mode 100644
index 4719b85..0000000
--- a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/TokenStoreDelegationTokenSecretManager.java
+++ /dev/null
@@ -1,335 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.thrift;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.lang.reflect.Method;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.codec.binary.Base64;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
-import org.apache.hadoop.security.token.delegation.DelegationKey;
-import org.apache.hadoop.security.token.delegation.HiveDelegationTokenSupport;
-import org.apache.hadoop.util.Daemon;
-import org.apache.hadoop.util.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Extension of {@link DelegationTokenSecretManager} to support alternative to default in-memory
- * token management for fail-over and clustering through plug-able token store (ZooKeeper etc.).
- * Delegation tokens will be retrieved from the store on-demand and (unlike base class behavior) not
- * cached in memory. This avoids complexities related to token expiration. The security token is
- * needed only at the time the transport is opened (as opposed to per interface operation). The
- * assumption therefore is low cost of interprocess token retrieval (for random read efficient store
- * such as ZooKeeper) compared to overhead of synchronizing per-process in-memory token caches.
- * The wrapper incorporates the token store abstraction within the limitations of current
- * Hive/Hadoop dependency (.20S) with minimum code duplication.
- * Eventually this should be supported by Hadoop security directly.
- */
-public class TokenStoreDelegationTokenSecretManager extends DelegationTokenSecretManager {
-
- private static final Logger LOGGER =
- LoggerFactory.getLogger(TokenStoreDelegationTokenSecretManager.class.getName());
-
- final private long keyUpdateInterval;
- final private long tokenRemoverScanInterval;
- private Thread tokenRemoverThread;
-
- final private DelegationTokenStore tokenStore;
-
- public TokenStoreDelegationTokenSecretManager(long delegationKeyUpdateInterval,
- long delegationTokenMaxLifetime, long delegationTokenRenewInterval,
- long delegationTokenRemoverScanInterval,
- DelegationTokenStore sharedStore) {
- super(delegationKeyUpdateInterval, delegationTokenMaxLifetime, delegationTokenRenewInterval,
- delegationTokenRemoverScanInterval);
- this.keyUpdateInterval = delegationKeyUpdateInterval;
- this.tokenRemoverScanInterval = delegationTokenRemoverScanInterval;
-
- this.tokenStore = sharedStore;
- }
-
- protected Map<Integer, DelegationKey> reloadKeys() {
- // read keys from token store
- String[] allKeys = tokenStore.getMasterKeys();
- Map<Integer, DelegationKey> keys
- = new HashMap<Integer, DelegationKey>(allKeys.length);
- for (String keyStr : allKeys) {
- DelegationKey key = new DelegationKey();
- try {
- decodeWritable(key, keyStr);
- keys.put(key.getKeyId(), key);
- } catch (IOException ex) {
- LOGGER.error("Failed to load master key.", ex);
- }
- }
- synchronized (this) {
- super.allKeys.clear();
- super.allKeys.putAll(keys);
- }
- return keys;
- }
-
- @Override
- public byte[] retrievePassword(DelegationTokenIdentifier identifier) throws InvalidToken {
- DelegationTokenInformation info = this.tokenStore.getToken(identifier);
- if (info == null) {
- throw new InvalidToken("token expired or does not exist: " + identifier);
- }
- // must reuse super as info.getPassword is not accessible
- synchronized (this) {
- try {
- super.currentTokens.put(identifier, info);
- return super.retrievePassword(identifier);
- } finally {
- super.currentTokens.remove(identifier);
- }
- }
- }
-
- @Override
- public DelegationTokenIdentifier cancelToken(Token<DelegationTokenIdentifier> token,
- String canceller) throws IOException {
- DelegationTokenIdentifier id = getTokenIdentifier(token);
- LOGGER.info("Token cancelation requested for identifier: "+id);
- this.tokenStore.removeToken(id);
- return id;
- }
-
- /**
- * Create the password and add it to shared store.
- */
- @Override
- protected byte[] createPassword(DelegationTokenIdentifier id) {
- byte[] password;
- DelegationTokenInformation info;
- synchronized (this) {
- password = super.createPassword(id);
- // add new token to shared store
- // need to persist expiration along with password
- info = super.currentTokens.remove(id);
- if (info == null) {
- throw new IllegalStateException("Failed to retrieve token after creation");
- }
- }
- this.tokenStore.addToken(id, info);
- return password;
- }
-
- @Override
- public long renewToken(Token<DelegationTokenIdentifier> token,
- String renewer) throws InvalidToken, IOException {
- // since renewal is KERBEROS authenticated token may not be cached
- final DelegationTokenIdentifier id = getTokenIdentifier(token);
- DelegationTokenInformation tokenInfo = this.tokenStore.getToken(id);
- if (tokenInfo == null) {
- throw new InvalidToken("token does not exist: " + id); // no token found
- }
- // ensure associated master key is available
- if (!super.allKeys.containsKey(id.getMasterKeyId())) {
- LOGGER.info("Unknown master key (id={}), (re)loading keys from token store.",
- id.getMasterKeyId());
- reloadKeys();
- }
- // reuse super renewal logic
- synchronized (this) {
- super.currentTokens.put(id, tokenInfo);
- try {
- return super.renewToken(token, renewer);
- } finally {
- super.currentTokens.remove(id);
- }
- }
- }
-
- public static String encodeWritable(Writable key) throws IOException {
- ByteArrayOutputStream bos = new ByteArrayOutputStream();
- DataOutputStream dos = new DataOutputStream(bos);
- key.write(dos);
- dos.flush();
- return Base64.encodeBase64URLSafeString(bos.toByteArray());
- }
-
- public static void decodeWritable(Writable w, String idStr) throws IOException {
- DataInputStream in = new DataInputStream(new ByteArrayInputStream(Base64.decodeBase64(idStr)));
- w.readFields(in);
- }
-
- /**
- * Synchronize master key updates / sequence generation for multiple nodes.
- * NOTE: {@link AbstractDelegationTokenSecretManager} keeps currentKey private, so we need
- * to utilize this "hook" to manipulate the key through the object reference.
- * This .20S workaround should cease to exist when Hadoop supports token store.
- */
- @Override
- protected void logUpdateMasterKey(DelegationKey key) throws IOException {
- int keySeq = this.tokenStore.addMasterKey(encodeWritable(key));
- // update key with assigned identifier
- DelegationKey keyWithSeq = new DelegationKey(keySeq, key.getExpiryDate(), key.getKey());
- String keyStr = encodeWritable(keyWithSeq);
- this.tokenStore.updateMasterKey(keySeq, keyStr);
- decodeWritable(key, keyStr);
- LOGGER.info("New master key with key id={}", key.getKeyId());
- super.logUpdateMasterKey(key);
- }
-
- @Override
- public synchronized void startThreads() throws IOException {
- try {
- // updateCurrentKey needs to be called to initialize the master key
- // (there should be a null check added in the future in rollMasterKey)
- // updateCurrentKey();
- Method m = AbstractDelegationTokenSecretManager.class.getDeclaredMethod("updateCurrentKey");
- m.setAccessible(true);
- m.invoke(this);
- } catch (Exception e) {
- throw new IOException("Failed to initialize master key", e);
- }
- running = true;
- tokenRemoverThread = new Daemon(new ExpiredTokenRemover());
- tokenRemoverThread.start();
- }
-
- @Override
- public synchronized void stopThreads() {
- if (LOGGER.isDebugEnabled()) {
- LOGGER.debug("Stopping expired delegation token remover thread");
- }
- running = false;
- if (tokenRemoverThread != null) {
- tokenRemoverThread.interrupt();
- }
- }
-
- /**
- * Remove expired tokens. Replaces logic in {@link AbstractDelegationTokenSecretManager}
- * that cannot be reused due to private method access. Logic here can more efficiently
- * deal with external token store by only loading into memory the minimum data needed.
- */
- protected void removeExpiredTokens() {
- long now = System.currentTimeMillis();
- Iterator<DelegationTokenIdentifier> i = tokenStore.getAllDelegationTokenIdentifiers()
- .iterator();
- while (i.hasNext()) {
- DelegationTokenIdentifier id = i.next();
- if (now > id.getMaxDate()) {
- this.tokenStore.removeToken(id); // no need to look at token info
- } else {
- // get token info to check renew date
- DelegationTokenInformation tokenInfo = tokenStore.getToken(id);
- if (tokenInfo != null) {
- if (now > tokenInfo.getRenewDate()) {
- this.tokenStore.removeToken(id);
- }
- }
- }
- }
- }
-
- /**
- * Extension of rollMasterKey to remove expired keys from store.
- *
- * @throws IOException
- */
- protected void rollMasterKeyExt() throws IOException {
- Map<Integer, DelegationKey> keys = reloadKeys();
- int currentKeyId = super.currentId;
- HiveDelegationTokenSupport.rollMasterKey(TokenStoreDelegationTokenSecretManager.this);
- List<DelegationKey> keysAfterRoll = Arrays.asList(getAllKeys());
- for (DelegationKey key : keysAfterRoll) {
- keys.remove(key.getKeyId());
- if (key.getKeyId() == currentKeyId) {
- tokenStore.updateMasterKey(currentKeyId, encodeWritable(key));
- }
- }
- for (DelegationKey expiredKey : keys.values()) {
- LOGGER.info("Removing expired key id={}", expiredKey.getKeyId());
- try {
- tokenStore.removeMasterKey(expiredKey.getKeyId());
- } catch (Exception e) {
- LOGGER.error("Error removing expired key id={}", expiredKey.getKeyId(), e);
- }
- }
- }
-
- /**
- * Cloned from {@link AbstractDelegationTokenSecretManager} to deal with private access
- * restriction (there would not be an need to clone the remove thread if the remove logic was
- * protected/extensible).
- */
- protected class ExpiredTokenRemover extends Thread {
- private long lastMasterKeyUpdate;
- private long lastTokenCacheCleanup;
-
- @Override
- public void run() {
- LOGGER.info("Starting expired delegation token remover thread, "
- + "tokenRemoverScanInterval=" + tokenRemoverScanInterval
- / (60 * 1000) + " min(s)");
- while (running) {
- try {
- long now = System.currentTimeMillis();
- if (lastMasterKeyUpdate + keyUpdateInterval < now) {
- try {
- rollMasterKeyExt();
- lastMasterKeyUpdate = now;
- } catch (IOException e) {
- LOGGER.error("Master key updating failed. "
- + StringUtils.stringifyException(e));
- }
- }
- if (lastTokenCacheCleanup + tokenRemoverScanInterval < now) {
- removeExpiredTokens();
- lastTokenCacheCleanup = now;
- }
- try {
- Thread.sleep(5000); // 5 seconds
- } catch (InterruptedException ie) {
- LOGGER
- .error("InterruptedException received for ExpiredTokenRemover thread "
- + ie);
- }
- } catch (Throwable t) {
- LOGGER.error("ExpiredTokenRemover thread received unexpected exception. "
- + t, t);
- // Wait 5 seconds too in case of an exception, so we do not end up in busy waiting for
- // the solution for this exception
- try {
- Thread.sleep(5000); // 5 seconds
- } catch (InterruptedException ie) {
- LOGGER.error("InterruptedException received for ExpiredTokenRemover thread during " +
- "wait in exception sleep " + ie);
- }
- }
- }
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/shims/common/src/main/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java
deleted file mode 100644
index 885ec56..0000000
--- a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java
+++ /dev/null
@@ -1,476 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.thrift;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.CuratorFrameworkFactory;
-import org.apache.curator.framework.api.ACLProvider;
-import org.apache.curator.framework.imps.CuratorFrameworkState;
-import org.apache.curator.retry.ExponentialBackoffRetry;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.shims.ShimLoader;
-import org.apache.hadoop.hive.shims.Utils;
-import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Server.ServerMode;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation;
-import org.apache.hadoop.security.token.delegation.HiveDelegationTokenSupport;
-import org.apache.zookeeper.CreateMode;
-import org.apache.zookeeper.KeeperException;
-import org.apache.zookeeper.ZooDefs;
-import org.apache.zookeeper.ZooDefs.Ids;
-import org.apache.zookeeper.ZooDefs.Perms;
-import org.apache.zookeeper.data.ACL;
-import org.apache.zookeeper.data.Id;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * ZooKeeper token store implementation.
- */
-public class ZooKeeperTokenStore implements DelegationTokenStore {
-
- private static final Logger LOGGER =
- LoggerFactory.getLogger(ZooKeeperTokenStore.class.getName());
-
- protected static final String ZK_SEQ_FORMAT = "%010d";
- private static final String NODE_KEYS = "/keys";
- private static final String NODE_TOKENS = "/tokens";
-
- private String rootNode = "";
- private volatile CuratorFramework zkSession;
- private String zkConnectString;
- private int connectTimeoutMillis;
- private List<ACL> newNodeAcl = Arrays.asList(new ACL(Perms.ALL, Ids.AUTH_IDS));
-
- /**
- * ACLProvider permissions will be used in case parent dirs need to be created
- */
- private final ACLProvider aclDefaultProvider = new ACLProvider() {
-
- @Override
- public List<ACL> getDefaultAcl() {
- return newNodeAcl;
- }
-
- @Override
- public List<ACL> getAclForPath(String path) {
- return getDefaultAcl();
- }
- };
-
-
- private ServerMode serverMode;
-
- private final String WHEN_ZK_DSTORE_MSG = "when zookeeper based delegation token storage is enabled"
- + "(hive.cluster.delegation.token.store.class=" + ZooKeeperTokenStore.class.getName() + ")";
-
- private Configuration conf;
-
- /**
- * Default constructor for dynamic instantiation w/ Configurable
- * (ReflectionUtils does not support Configuration constructor injection).
- */
- protected ZooKeeperTokenStore() {
- }
-
- private CuratorFramework getSession() {
- if (zkSession == null || zkSession.getState() == CuratorFrameworkState.STOPPED) {
- synchronized (this) {
- if (zkSession == null || zkSession.getState() == CuratorFrameworkState.STOPPED) {
- zkSession =
- CuratorFrameworkFactory.builder().connectString(zkConnectString)
- .connectionTimeoutMs(connectTimeoutMillis).aclProvider(aclDefaultProvider)
- .retryPolicy(new ExponentialBackoffRetry(1000, 3)).build();
- zkSession.start();
- }
- }
- }
- return zkSession;
- }
-
- private void setupJAASConfig(Configuration conf) throws IOException {
- if (!UserGroupInformation.getLoginUser().isFromKeytab()) {
- // The process has not logged in using keytab
- // this should be a test mode, can't use keytab to authenticate
- // with zookeeper.
- LOGGER.warn("Login is not from keytab");
- return;
- }
-
- String principal;
- String keytab;
- switch (serverMode) {
- case METASTORE:
- principal = getNonEmptyConfVar(conf, "hive.metastore.kerberos.principal");
- keytab = getNonEmptyConfVar(conf, "hive.metastore.kerberos.keytab.file");
- break;
- case HIVESERVER2:
- principal = getNonEmptyConfVar(conf, "hive.server2.authentication.kerberos.principal");
- keytab = getNonEmptyConfVar(conf, "hive.server2.authentication.kerberos.keytab");
- break;
- default:
- throw new AssertionError("Unexpected server mode " + serverMode);
- }
- Utils.setZookeeperClientKerberosJaasConfig(principal, keytab);
- }
-
- private String getNonEmptyConfVar(Configuration conf, String param) throws IOException {
- String val = conf.get(param);
- if (val == null || val.trim().isEmpty()) {
- throw new IOException("Configuration parameter " + param + " should be set, "
- + WHEN_ZK_DSTORE_MSG);
- }
- return val;
- }
-
- /**
- * Create a path if it does not already exist ("mkdir -p")
- * @param path string with '/' separator
- * @param acl list of ACL entries
- * @throws TokenStoreException
- */
- public void ensurePath(String path, List<ACL> acl)
- throws TokenStoreException {
- try {
- CuratorFramework zk = getSession();
- String node = zk.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT)
- .withACL(acl).forPath(path);
- LOGGER.info("Created path: {} ", node);
- } catch (KeeperException.NodeExistsException e) {
- // node already exists
- } catch (Exception e) {
- throw new TokenStoreException("Error creating path " + path, e);
- }
- }
-
- /**
- * Parse ACL permission string, from ZooKeeperMain private method
- * @param permString
- * @return
- */
- public static int getPermFromString(String permString) {
- int perm = 0;
- for (int i = 0; i < permString.length(); i++) {
- switch (permString.charAt(i)) {
- case 'r':
- perm |= ZooDefs.Perms.READ;
- break;
- case 'w':
- perm |= ZooDefs.Perms.WRITE;
- break;
- case 'c':
- perm |= ZooDefs.Perms.CREATE;
- break;
- case 'd':
- perm |= ZooDefs.Perms.DELETE;
- break;
- case 'a':
- perm |= ZooDefs.Perms.ADMIN;
- break;
- default:
- LOGGER.error("Unknown perm type: " + permString.charAt(i));
- }
- }
- return perm;
- }
-
- /**
- * Parse comma separated list of ACL entries to secure generated nodes, e.g.
- * <code>sasl:hive/host1@MY.DOMAIN:cdrwa,sasl:hive/host2@MY.DOMAIN:cdrwa</code>
- * @param aclString
- * @return ACL list
- */
- public static List<ACL> parseACLs(String aclString) {
- String[] aclComps = StringUtils.splitByWholeSeparator(aclString, ",");
- List<ACL> acl = new ArrayList<ACL>(aclComps.length);
- for (String a : aclComps) {
- if (StringUtils.isBlank(a)) {
- continue;
- }
- a = a.trim();
- // from ZooKeeperMain private method
- int firstColon = a.indexOf(':');
- int lastColon = a.lastIndexOf(':');
- if (firstColon == -1 || lastColon == -1 || firstColon == lastColon) {
- LOGGER.error(a + " does not have the form scheme:id:perm");
- continue;
- }
- ACL newAcl = new ACL();
- newAcl.setId(new Id(a.substring(0, firstColon), a.substring(
- firstColon + 1, lastColon)));
- newAcl.setPerms(getPermFromString(a.substring(lastColon + 1)));
- acl.add(newAcl);
- }
- return acl;
- }
-
- private void initClientAndPaths() {
- if (this.zkSession != null) {
- this.zkSession.close();
- }
- try {
- ensurePath(rootNode + NODE_KEYS, newNodeAcl);
- ensurePath(rootNode + NODE_TOKENS, newNodeAcl);
- } catch (TokenStoreException e) {
- throw e;
- }
- }
-
- @Override
- public void setConf(Configuration conf) {
- if (conf == null) {
- throw new IllegalArgumentException("conf is null");
- }
- this.conf = conf;
- }
-
- @Override
- public Configuration getConf() {
- return null; // not required
- }
-
- private Map<Integer, byte[]> getAllKeys() throws KeeperException, InterruptedException {
-
- String masterKeyNode = rootNode + NODE_KEYS;
-
- // get children of key node
- List<String> nodes = zkGetChildren(masterKeyNode);
-
- // read each child node, add to results
- Map<Integer, byte[]> result = new HashMap<Integer, byte[]>();
- for (String node : nodes) {
- String nodePath = masterKeyNode + "/" + node;
- byte[] data = zkGetData(nodePath);
- if (data != null) {
- result.put(getSeq(node), data);
- }
- }
- return result;
- }
-
- private List<String> zkGetChildren(String path) {
- CuratorFramework zk = getSession();
- try {
- return zk.getChildren().forPath(path);
- } catch (Exception e) {
- throw new TokenStoreException("Error getting children for " + path, e);
- }
- }
-
- private byte[] zkGetData(String nodePath) {
- CuratorFramework zk = getSession();
- try {
- return zk.getData().forPath(nodePath);
- } catch (KeeperException.NoNodeException ex) {
- return null;
- } catch (Exception e) {
- throw new TokenStoreException("Error reading " + nodePath, e);
- }
- }
-
- private int getSeq(String path) {
- String[] pathComps = path.split("/");
- return Integer.parseInt(pathComps[pathComps.length-1]);
- }
-
- @Override
- public int addMasterKey(String s) {
- String keysPath = rootNode + NODE_KEYS + "/";
- CuratorFramework zk = getSession();
- String newNode;
- try {
- newNode = zk.create().withMode(CreateMode.PERSISTENT_SEQUENTIAL).withACL(newNodeAcl)
- .forPath(keysPath, s.getBytes());
- } catch (Exception e) {
- throw new TokenStoreException("Error creating new node with path " + keysPath, e);
- }
- LOGGER.info("Added key {}", newNode);
- return getSeq(newNode);
- }
-
- @Override
- public void updateMasterKey(int keySeq, String s) {
- CuratorFramework zk = getSession();
- String keyPath = rootNode + NODE_KEYS + "/" + String.format(ZK_SEQ_FORMAT, keySeq);
- try {
- zk.setData().forPath(keyPath, s.getBytes());
- } catch (Exception e) {
- throw new TokenStoreException("Error setting data in " + keyPath, e);
- }
- }
-
- @Override
- public boolean removeMasterKey(int keySeq) {
- String keyPath = rootNode + NODE_KEYS + "/" + String.format(ZK_SEQ_FORMAT, keySeq);
- zkDelete(keyPath);
- return true;
- }
-
- private void zkDelete(String path) {
- CuratorFramework zk = getSession();
- try {
- zk.delete().forPath(path);
- } catch (KeeperException.NoNodeException ex) {
- // already deleted
- } catch (Exception e) {
- throw new TokenStoreException("Error deleting " + path, e);
- }
- }
-
- @Override
- public String[] getMasterKeys() {
- try {
- Map<Integer, byte[]> allKeys = getAllKeys();
- String[] result = new String[allKeys.size()];
- int resultIdx = 0;
- for (byte[] keyBytes : allKeys.values()) {
- result[resultIdx++] = new String(keyBytes);
- }
- return result;
- } catch (KeeperException ex) {
- throw new TokenStoreException(ex);
- } catch (InterruptedException ex) {
- throw new TokenStoreException(ex);
- }
- }
-
-
- private String getTokenPath(DelegationTokenIdentifier tokenIdentifier) {
- try {
- return rootNode + NODE_TOKENS + "/"
- + TokenStoreDelegationTokenSecretManager.encodeWritable(tokenIdentifier);
- } catch (IOException ex) {
- throw new TokenStoreException("Failed to encode token identifier", ex);
- }
- }
-
- @Override
- public boolean addToken(DelegationTokenIdentifier tokenIdentifier,
- DelegationTokenInformation token) {
- byte[] tokenBytes = HiveDelegationTokenSupport.encodeDelegationTokenInformation(token);
- String tokenPath = getTokenPath(tokenIdentifier);
- CuratorFramework zk = getSession();
- String newNode;
- try {
- newNode = zk.create().withMode(CreateMode.PERSISTENT).withACL(newNodeAcl)
- .forPath(tokenPath, tokenBytes);
- } catch (Exception e) {
- throw new TokenStoreException("Error creating new node with path " + tokenPath, e);
- }
-
- LOGGER.info("Added token: {}", newNode);
- return true;
- }
-
- @Override
- public boolean removeToken(DelegationTokenIdentifier tokenIdentifier) {
- String tokenPath = getTokenPath(tokenIdentifier);
- zkDelete(tokenPath);
- return true;
- }
-
- @Override
- public DelegationTokenInformation getToken(DelegationTokenIdentifier tokenIdentifier) {
- byte[] tokenBytes = zkGetData(getTokenPath(tokenIdentifier));
- if(tokenBytes == null) {
- // The token is already removed.
- return null;
- }
- try {
- return HiveDelegationTokenSupport.decodeDelegationTokenInformation(tokenBytes);
- } catch (Exception ex) {
- throw new TokenStoreException("Failed to decode token", ex);
- }
- }
-
- @Override
- public List<DelegationTokenIdentifier> getAllDelegationTokenIdentifiers() {
- String containerNode = rootNode + NODE_TOKENS;
- final List<String> nodes = zkGetChildren(containerNode);
- List<DelegationTokenIdentifier> result = new java.util.ArrayList<DelegationTokenIdentifier>(
- nodes.size());
- for (String node : nodes) {
- DelegationTokenIdentifier id = new DelegationTokenIdentifier();
- try {
- TokenStoreDelegationTokenSecretManager.decodeWritable(id, node);
- result.add(id);
- } catch (Exception e) {
- LOGGER.warn("Failed to decode token '{}'", node);
- }
- }
- return result;
- }
-
- @Override
- public void close() throws IOException {
- if (this.zkSession != null) {
- this.zkSession.close();
- }
- }
-
- @Override
- public void init(Object hmsHandler, ServerMode smode) {
- this.serverMode = smode;
- zkConnectString =
- conf.get(HiveDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_CONNECT_STR, null);
- if (zkConnectString == null || zkConnectString.trim().isEmpty()) {
- // try alternate config param
- zkConnectString =
- conf.get(
- HiveDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_CONNECT_STR_ALTERNATE,
- null);
- if (zkConnectString == null || zkConnectString.trim().isEmpty()) {
- throw new IllegalArgumentException("Zookeeper connect string has to be specifed through "
- + "either " + HiveDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_CONNECT_STR
- + " or "
- + HiveDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_CONNECT_STR_ALTERNATE
- + WHEN_ZK_DSTORE_MSG);
- }
- }
- connectTimeoutMillis =
- conf.getInt(
- HiveDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_CONNECT_TIMEOUTMILLIS,
- CuratorFrameworkFactory.builder().getConnectionTimeoutMs());
- String aclStr = conf.get(HiveDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ACL, null);
- if (StringUtils.isNotBlank(aclStr)) {
- this.newNodeAcl = parseACLs(aclStr);
- }
- rootNode =
- conf.get(HiveDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ZNODE,
- HiveDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ZNODE_DEFAULT) + serverMode;
-
- try {
- // Install the JAAS Configuration for the runtime
- setupJAASConfig(conf);
- } catch (IOException e) {
- throw new TokenStoreException("Error setting up JAAS configuration for zookeeper client "
- + e.getMessage(), e);
- }
- initClientAndPaths();
- }
-
-}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/DBTokenStore.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/DBTokenStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/DBTokenStore.java
new file mode 100644
index 0000000..2f26010
--- /dev/null
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/DBTokenStore.java
@@ -0,0 +1,192 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore.security;
+
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge.Server.ServerMode;
+import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation;
+import org.apache.hadoop.security.token.delegation.MetastoreDelegationTokenSupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class DBTokenStore implements DelegationTokenStore {
+ private static final Logger LOG = LoggerFactory.getLogger(DBTokenStore.class);
+ private Configuration conf;
+
+ @Override
+ public int addMasterKey(String s) throws TokenStoreException {
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("addMasterKey: s = " + s);
+ }
+ return (Integer)invokeOnTokenStore("addMasterKey", new Object[]{s},String.class);
+ }
+
+ @Override
+ public void updateMasterKey(int keySeq, String s) throws TokenStoreException {
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("updateMasterKey: s = " + s + ", keySeq = " + keySeq);
+ }
+ invokeOnTokenStore("updateMasterKey", new Object[] {Integer.valueOf(keySeq), s},
+ Integer.class, String.class);
+ }
+
+ @Override
+ public boolean removeMasterKey(int keySeq) {
+ return (Boolean)invokeOnTokenStore("removeMasterKey", new Object[] {Integer.valueOf(keySeq)},
+ Integer.class);
+ }
+
+ @Override
+ public String[] getMasterKeys() throws TokenStoreException {
+ return (String[])invokeOnTokenStore("getMasterKeys", new Object[0]);
+ }
+
+ @Override
+ public boolean addToken(DelegationTokenIdentifier tokenIdentifier,
+ DelegationTokenInformation token) throws TokenStoreException {
+
+ try {
+ String identifier = TokenStoreDelegationTokenSecretManager.encodeWritable(tokenIdentifier);
+ String tokenStr = Base64.encodeBase64URLSafeString(
+ MetastoreDelegationTokenSupport.encodeDelegationTokenInformation(token));
+ boolean result = (Boolean)invokeOnTokenStore("addToken", new Object[] {identifier, tokenStr},
+ String.class, String.class);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("addToken: tokenIdentifier = " + tokenIdentifier + ", added = " + result);
+ }
+ return result;
+ } catch (IOException e) {
+ throw new TokenStoreException(e);
+ }
+ }
+
+ @Override
+ public DelegationTokenInformation getToken(DelegationTokenIdentifier tokenIdentifier)
+ throws TokenStoreException {
+ try {
+ String tokenStr = (String)invokeOnTokenStore("getToken", new Object[] {
+ TokenStoreDelegationTokenSecretManager.encodeWritable(tokenIdentifier)}, String.class);
+ DelegationTokenInformation result = null;
+ if (StringUtils.isNotEmpty(tokenStr)) {
+ result = MetastoreDelegationTokenSupport.decodeDelegationTokenInformation(Base64.decodeBase64(tokenStr));
+ }
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("getToken: tokenIdentifier = " + tokenIdentifier + ", result = " + result);
+ }
+ return result;
+ } catch (IOException e) {
+ throw new TokenStoreException(e);
+ }
+ }
+
+ @Override
+ public boolean removeToken(DelegationTokenIdentifier tokenIdentifier) throws TokenStoreException{
+ try {
+ boolean result = (Boolean)invokeOnTokenStore("removeToken", new Object[] {
+ TokenStoreDelegationTokenSecretManager.encodeWritable(tokenIdentifier)}, String.class);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("removeToken: tokenIdentifier = " + tokenIdentifier + ", removed = " + result);
+ }
+ return result;
+ } catch (IOException e) {
+ throw new TokenStoreException(e);
+ }
+ }
+
+ @Override
+ public List<DelegationTokenIdentifier> getAllDelegationTokenIdentifiers() throws TokenStoreException{
+
+ List<String> tokenIdents = (List<String>)invokeOnTokenStore("getAllTokenIdentifiers", new Object[0]);
+ List<DelegationTokenIdentifier> delTokenIdents = new ArrayList<DelegationTokenIdentifier>(tokenIdents.size());
+
+ for (String tokenIdent : tokenIdents) {
+ DelegationTokenIdentifier delToken = new DelegationTokenIdentifier();
+ try {
+ TokenStoreDelegationTokenSecretManager.decodeWritable(delToken, tokenIdent);
+ } catch (IOException e) {
+ throw new TokenStoreException(e);
+ }
+ delTokenIdents.add(delToken);
+ }
+ return delTokenIdents;
+ }
+
+ private Object handler;
+ private ServerMode serverMode;
+
+ @Override
+ public void init(Object handler, HadoopThriftAuthBridge.Server.ServerMode serverMode) throws TokenStoreException {
+ this.handler = handler;
+ this.serverMode = serverMode;
+ }
+
+ private Object invokeOnTokenStore(String methName, Object[] params, Class<?> ... paramTypes)
+ throws TokenStoreException{
+ Object tokenStore;
+ try {
+ switch (serverMode) {
+ case METASTORE:
+ tokenStore = handler.getClass().getMethod("getMS").invoke(handler);
+ break;
+ case HIVESERVER2:
+ Object hiveObject = ((Class<?>) handler)
+ .getMethod("get", org.apache.hadoop.conf.Configuration.class, java.lang.Class.class)
+ .invoke(handler, conf, DBTokenStore.class);
+ tokenStore = ((Class<?>) handler).getMethod("getMSC").invoke(hiveObject);
+ break;
+ default:
+ throw new IllegalArgumentException("Unexpected value of Server mode " + serverMode);
+ }
+ return tokenStore.getClass().getMethod(methName, paramTypes).invoke(tokenStore, params);
+ } catch (IllegalArgumentException e) {
+ throw new TokenStoreException(e);
+ } catch (SecurityException e) {
+ throw new TokenStoreException(e);
+ } catch (IllegalAccessException e) {
+ throw new TokenStoreException(e);
+ } catch (InvocationTargetException e) {
+ throw new TokenStoreException(e.getCause());
+ } catch (NoSuchMethodException e) {
+ throw new TokenStoreException(e);
+ }
+ }
+
+ @Override
+ public void setConf(Configuration conf) {
+ this.conf = conf;
+ }
+
+ @Override
+ public Configuration getConf() {
+ return conf;
+ }
+
+ @Override
+ public void close() throws IOException {
+ // No-op.
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/8fea1176/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenSelector.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenSelector.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenSelector.java
new file mode 100644
index 0000000..d384a37
--- /dev/null
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenSelector.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore.security;
+
+import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector;
+
+/**
+ * A delegation token that is specialized for Hive
+ */
+
+public class DelegationTokenSelector
+ extends AbstractDelegationTokenSelector<DelegationTokenIdentifier>{
+
+ public DelegationTokenSelector() {
+ super(DelegationTokenIdentifier.HIVE_DELEGATION_KIND);
+ }
+}