You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by vi...@apache.org on 2014/11/06 09:04:45 UTC

[34/43] git commit: HADOOP-11272. Allow ZKSignerSecretProvider and ZKDelegationTokenSecretManager to use the same curator client. Contributed by Arun Suresh.

HADOOP-11272. Allow ZKSignerSecretProvider and ZKDelegationTokenSecretManager to use the same curator client. Contributed by Arun Suresh.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8a261e68
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8a261e68
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8a261e68

Branch: refs/heads/HDFS-EC
Commit: 8a261e68e4177b47be01ceae7310ea56aeb7ca38
Parents: 6ba52d8
Author: Aaron T. Myers <at...@apache.org>
Authored: Wed Nov 5 17:47:22 2014 -0800
Committer: Aaron T. Myers <at...@apache.org>
Committed: Wed Nov 5 17:47:22 2014 -0800

----------------------------------------------------------------------
 .../server/AuthenticationFilter.java            |  21 +++-
 .../server/TestAuthenticationFilter.java        |   2 +
 hadoop-common-project/hadoop-common/CHANGES.txt |   3 +
 .../ZKDelegationTokenSecretManager.java         |   6 +-
 .../DelegationTokenAuthenticationFilter.java    |  22 ++--
 .../hadoop/crypto/key/kms/server/TestKMS.java   | 101 +++++++++++++++++++
 6 files changed, 142 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8a261e68/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
index a070345..58d97ca 100644
--- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
+++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
@@ -219,6 +219,19 @@ public class AuthenticationFilter implements Filter {
       authHandlerClassName = authHandlerName;
     }
 
+    validity = Long.parseLong(config.getProperty(AUTH_TOKEN_VALIDITY, "36000"))
+        * 1000; //10 hours
+    initializeSecretProvider(filterConfig);
+
+    initializeAuthHandler(authHandlerClassName, filterConfig);
+
+
+    cookieDomain = config.getProperty(COOKIE_DOMAIN, null);
+    cookiePath = config.getProperty(COOKIE_PATH, null);
+  }
+
+  protected void initializeAuthHandler(String authHandlerClassName, FilterConfig filterConfig)
+      throws ServletException {
     try {
       Class<?> klass = Thread.currentThread().getContextClassLoader().loadClass(authHandlerClassName);
       authHandler = (AuthenticationHandler) klass.newInstance();
@@ -230,9 +243,10 @@ public class AuthenticationFilter implements Filter {
     } catch (IllegalAccessException ex) {
       throw new ServletException(ex);
     }
+  }
 
-    validity = Long.parseLong(config.getProperty(AUTH_TOKEN_VALIDITY, "36000"))
-        * 1000; //10 hours
+  protected void initializeSecretProvider(FilterConfig filterConfig)
+      throws ServletException {
     secretProvider = (SignerSecretProvider) filterConfig.getServletContext().
         getAttribute(SIGNER_SECRET_PROVIDER_ATTRIBUTE);
     if (secretProvider == null) {
@@ -254,9 +268,6 @@ public class AuthenticationFilter implements Filter {
       customSecretProvider = true;
     }
     signer = new Signer(secretProvider);
-
-    cookieDomain = config.getProperty(COOKIE_DOMAIN, null);
-    cookiePath = config.getProperty(COOKIE_PATH, null);
   }
 
   @SuppressWarnings("unchecked")

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8a261e68/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java
index 5d93fcf..3b6b958 100644
--- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java
+++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java
@@ -283,6 +283,8 @@ public class TestAuthenticationFilter {
     filter = new AuthenticationFilter();
     try {
       FilterConfig config = Mockito.mock(FilterConfig.class);
+      ServletContext sc = Mockito.mock(ServletContext.class);
+      Mockito.when(config.getServletContext()).thenReturn(sc);
       Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn("kerberos");
       Mockito.when(config.getInitParameterNames()).thenReturn(
         new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE)).elements());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8a261e68/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 8567e1e..55ef9d3 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -409,6 +409,9 @@ Release 2.7.0 - UNRELEASED
     HADOOP-10714. AmazonS3Client.deleteObjects() need to be limited to 1000
     entries per call. (Juan Yu via atm)
 
+    HADOOP-11272. Allow ZKSignerSecretProvider and
+    ZKDelegationTokenSecretManager to use the same curator client. (Arun Suresh via atm)
+
 Release 2.6.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8a261e68/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/ZKDelegationTokenSecretManager.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/ZKDelegationTokenSecretManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/ZKDelegationTokenSecretManager.java
index 82dd2da..ebc45a5 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/ZKDelegationTokenSecretManager.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/ZKDelegationTokenSecretManager.java
@@ -136,7 +136,11 @@ public abstract class ZKDelegationTokenSecretManager<TokenIdent extends Abstract
         conf.getLong(DelegationTokenManager.REMOVAL_SCAN_INTERVAL,
             DelegationTokenManager.REMOVAL_SCAN_INTERVAL_DEFAULT) * 1000);
     if (CURATOR_TL.get() != null) {
-      zkClient = CURATOR_TL.get();
+      zkClient =
+          CURATOR_TL.get().usingNamespace(
+              conf.get(ZK_DTSM_ZNODE_WORKING_PATH,
+                  ZK_DTSM_ZNODE_WORKING_PATH_DEAFULT)
+                  + "/" + ZK_DTSM_NAMESPACE);
       isExternalClient = true;
     } else {
       String connString = conf.get(ZK_DTSM_ZK_CONNECTION_STRING);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8a261e68/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java
index aa9ec99..fbd1129 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.security.token.delegation.web;
 
 import com.google.common.annotations.VisibleForTesting;
+
 import org.apache.curator.framework.CuratorFramework;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -46,6 +47,7 @@ import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletRequestWrapper;
 import javax.servlet.http.HttpServletResponse;
+
 import java.io.IOException;
 import java.io.Writer;
 import java.nio.charset.Charset;
@@ -156,14 +158,7 @@ public class DelegationTokenAuthenticationFilter
 
   @Override
   public void init(FilterConfig filterConfig) throws ServletException {
-    // A single CuratorFramework should be used for a ZK cluster.
-    // If the ZKSignerSecretProvider has already created it, it has to
-    // be set here... to be used by the ZKDelegationTokenSecretManager
-    ZKDelegationTokenSecretManager.setCurator((CuratorFramework)
-        filterConfig.getServletContext().getAttribute(ZKSignerSecretProvider.
-            ZOOKEEPER_SIGNER_SECRET_PROVIDER_CURATOR_CLIENT_ATTRIBUTE));
     super.init(filterConfig);
-    ZKDelegationTokenSecretManager.setCurator(null);
     AuthenticationHandler handler = getAuthenticationHandler();
     AbstractDelegationTokenSecretManager dtSecretManager =
         (AbstractDelegationTokenSecretManager) filterConfig.getServletContext().
@@ -188,6 +183,19 @@ public class DelegationTokenAuthenticationFilter
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf, PROXYUSER_PREFIX);
   }
 
+  @Override
+  protected void initializeAuthHandler(String authHandlerClassName,
+      FilterConfig filterConfig) throws ServletException {
+    // A single CuratorFramework should be used for a ZK cluster.
+    // If the ZKSignerSecretProvider has already created it, it has to
+    // be set here... to be used by the ZKDelegationTokenSecretManager
+    ZKDelegationTokenSecretManager.setCurator((CuratorFramework)
+        filterConfig.getServletContext().getAttribute(ZKSignerSecretProvider.
+            ZOOKEEPER_SIGNER_SECRET_PROVIDER_CURATOR_CLIENT_ATTRIBUTE));
+    super.initializeAuthHandler(authHandlerClassName, filterConfig);
+    ZKDelegationTokenSecretManager.setCurator(null);
+  }
+
   protected void setHandlerAuthMethod(SaslRpcServer.AuthMethod authMethod) {
     this.handlerAuthMethod = authMethod;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8a261e68/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
index 4628e36..9e76178 100644
--- a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
+++ b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.crypto.key.kms.server;
 
+import org.apache.curator.test.TestingServer;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.crypto.key.kms.server.KeyAuthorizationKeyProvider;
 import org.apache.hadoop.crypto.key.KeyProvider;
@@ -1585,6 +1586,106 @@ public class TestKMS {
   }
 
   @Test
+  public void testKMSWithZKSigner() throws Exception {
+    doKMSWithZK(true, false);
+  }
+
+  @Test
+  public void testKMSWithZKDTSM() throws Exception {
+    doKMSWithZK(false, true);
+  }
+
+  @Test
+  public void testKMSWithZKSignerAndDTSM() throws Exception {
+    doKMSWithZK(true, true);
+  }
+
+  public void doKMSWithZK(boolean zkDTSM, boolean zkSigner) throws Exception {
+    TestingServer zkServer = null;
+    try {
+      zkServer = new TestingServer();
+      zkServer.start();
+
+      Configuration conf = new Configuration();
+      conf.set("hadoop.security.authentication", "kerberos");
+      UserGroupInformation.setConfiguration(conf);
+      final File testDir = getTestDir();
+      conf = createBaseKMSConf(testDir);
+      conf.set("hadoop.kms.authentication.type", "kerberos");
+      conf.set("hadoop.kms.authentication.kerberos.keytab", keytab.getAbsolutePath());
+      conf.set("hadoop.kms.authentication.kerberos.principal", "HTTP/localhost");
+      conf.set("hadoop.kms.authentication.kerberos.name.rules", "DEFAULT");
+
+      if (zkSigner) {
+        conf.set("hadoop.kms.authentication.signer.secret.provider", "zookeeper");
+        conf.set("hadoop.kms.authentication.signer.secret.provider.zookeeper.path","/testKMSWithZKDTSM");
+        conf.set("hadoop.kms.authentication.signer.secret.provider.zookeeper.connection.string",zkServer.getConnectString());
+      }
+
+      if (zkDTSM) {
+        conf.set("hadoop.kms.authentication.zk-dt-secret-manager.enable", "true");
+      }
+      if (zkDTSM && !zkSigner) {
+        conf.set("hadoop.kms.authentication.zk-dt-secret-manager.zkConnectionString", zkServer.getConnectString());
+        conf.set("hadoop.kms.authentication.zk-dt-secret-manager.znodeWorkingPath", "testZKPath");
+        conf.set("hadoop.kms.authentication.zk-dt-secret-manager.zkAuthType", "none");
+      }
+
+      for (KMSACLs.Type type : KMSACLs.Type.values()) {
+        conf.set(type.getAclConfigKey(), type.toString());
+      }
+      conf.set(KMSACLs.Type.CREATE.getAclConfigKey(),
+          KMSACLs.Type.CREATE.toString() + ",SET_KEY_MATERIAL");
+
+      conf.set(KMSACLs.Type.ROLLOVER.getAclConfigKey(),
+          KMSACLs.Type.ROLLOVER.toString() + ",SET_KEY_MATERIAL");
+
+      conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "k0.ALL", "*");
+      conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "k1.ALL", "*");
+      conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "k2.ALL", "*");
+      conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "k3.ALL", "*");
+
+      writeConf(testDir, conf);
+
+      KMSCallable<KeyProvider> c =
+          new KMSCallable<KeyProvider>() {
+        @Override
+        public KeyProvider call() throws Exception {
+          final Configuration conf = new Configuration();
+          conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
+          final URI uri = createKMSUri(getKMSUrl());
+
+          final KeyProvider kp =
+              doAs("SET_KEY_MATERIAL",
+                  new PrivilegedExceptionAction<KeyProvider>() {
+                    @Override
+                    public KeyProvider run() throws Exception {
+                      KMSClientProvider kp = new KMSClientProvider(uri, conf);
+                          kp.createKey("k1", new byte[16],
+                              new KeyProvider.Options(conf));
+                          kp.createKey("k2", new byte[16],
+                              new KeyProvider.Options(conf));
+                          kp.createKey("k3", new byte[16],
+                              new KeyProvider.Options(conf));
+                      return kp;
+                    }
+                  });
+          return kp;
+        }
+      };
+
+      runServer(null, null, testDir, c);
+    } finally {
+      if (zkServer != null) {
+        zkServer.stop();
+        zkServer.close();
+      }
+    }
+
+  }
+
+
+  @Test
   public void testProxyUserKerb() throws Exception {
     doProxyUserTest(true);
   }