You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sh...@apache.org on 2021/09/20 12:30:21 UTC

[hadoop] branch trunk updated: HDFS-16129. Fixing the signature secret file misusage in HttpFS. Contributed by Tamas Domok

This is an automated email from the ASF dual-hosted git repository.

shuzirra pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new f93e8fb  HDFS-16129. Fixing the signature secret file misusage in HttpFS. Contributed by Tamas Domok
f93e8fb is described below

commit f93e8fbf2d72ae2a4786a5cfee395013b68b145c
Author: Tamas Domok <do...@gmail.com>
AuthorDate: Mon Sep 20 14:29:50 2021 +0200

    HDFS-16129. Fixing the signature secret file misusage in HttpFS. Contributed by Tamas Domok
    
    * HDFS-16129. Fixing the signature secret file misusage in HttpFS.
    
    The signature secret file was not used in HttpFs.
     - if the configuration did not contain the deprecated
    httpfs.authentication.signature.secret.file option then it
    used the random secret provider
     - if both option (httpfs. and hadoop.http.) was set then
    the HttpFSAuthenticationFilter could not read the file
    because the file path was not substituted properly
    
    !NOTE! behavioral change: the deprecated httpfs. configuration
    values are overwritten with the hadoop.http. values.
    
    The commit also contains a follow up change to the YARN-10814,
    empty secret files will result in a random secret provider.
    
    Co-authored-by: Tamas Domok <td...@cloudera.com>
---
 .../java/org/apache/hadoop/http/HttpServer2.java   |  44 +++--
 .../hadoop/crypto/key/kms/server/KMSWebServer.java |   2 +-
 .../fs/http/server/HttpFSAuthenticationFilter.java |  41 ++---
 .../fs/http/server/HttpFSServerWebServer.java      |  30 +++-
 .../src/main/resources/httpfs-default.xml          |   4 +-
 .../hadoop/fs/http/client/BaseTestHttpFSWith.java  |   5 +-
 .../fs/http/server/TestHttpFSAccessControlled.java |   6 +-
 .../hadoop/fs/http/server/TestHttpFSServer.java    |  10 +-
 .../fs/http/server/TestHttpFSServerNoACLs.java     |   6 +-
 .../fs/http/server/TestHttpFSServerNoXAttrs.java   |   6 +-
 .../fs/http/server/TestHttpFSServerWebServer.java  | 181 ++++++++++++++++++---
 .../TestHttpFSServerWebServerWithRandomSecret.java |  58 -------
 12 files changed, 258 insertions(+), 135 deletions(-)

diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
index 9f81eed..fe201e8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
@@ -243,7 +243,9 @@ public final class HttpServer2 implements FilterContainer {
 
     private String hostName;
     private boolean disallowFallbackToRandomSignerSecretProvider;
-    private String authFilterConfigurationPrefix = "hadoop.http.authentication.";
+    private final List<String> authFilterConfigurationPrefixes =
+        new ArrayList<>(Collections.singletonList(
+            "hadoop.http.authentication."));
     private String excludeCiphers;
 
     private boolean xFrameEnabled;
@@ -365,8 +367,15 @@ public final class HttpServer2 implements FilterContainer {
       return this;
     }
 
-    public Builder authFilterConfigurationPrefix(String value) {
-      this.authFilterConfigurationPrefix = value;
+    public Builder setAuthFilterConfigurationPrefix(String value) {
+      this.authFilterConfigurationPrefixes.clear();
+      this.authFilterConfigurationPrefixes.add(value);
+      return this;
+    }
+
+    public Builder setAuthFilterConfigurationPrefixes(String[] prefixes) {
+      this.authFilterConfigurationPrefixes.clear();
+      Collections.addAll(this.authFilterConfigurationPrefixes, prefixes);
       return this;
     }
 
@@ -473,8 +482,10 @@ public final class HttpServer2 implements FilterContainer {
       HttpServer2 server = new HttpServer2(this);
 
       if (this.securityEnabled &&
-          !this.conf.get(authFilterConfigurationPrefix + "type").
-          equals(PseudoAuthenticationHandler.TYPE)) {
+          authFilterConfigurationPrefixes.stream().noneMatch(
+              prefix -> this.conf.get(prefix + "type")
+                  .equals(PseudoAuthenticationHandler.TYPE))
+      ) {
         server.initSpnego(conf, hostName, usernameConfKey, keytabConfKey);
       }
 
@@ -811,18 +822,25 @@ public final class HttpServer2 implements FilterContainer {
       throws Exception {
     final Configuration conf = b.conf;
     Properties config = getFilterProperties(conf,
-                                            b.authFilterConfigurationPrefix);
+        b.authFilterConfigurationPrefixes);
     return AuthenticationFilter.constructSecretProvider(
         ctx, config, b.disallowFallbackToRandomSignerSecretProvider);
   }
 
-  private static Properties getFilterProperties(Configuration conf, String
-      prefix) {
-    Properties prop = new Properties();
-    Map<String, String> filterConfig = AuthenticationFilterInitializer
-        .getFilterConfigMap(conf, prefix);
-    prop.putAll(filterConfig);
-    return prop;
+  public static Properties getFilterProperties(Configuration conf, List<String> prefixes) {
+    Properties props = new Properties();
+    for (String prefix : prefixes) {
+      Map<String, String> filterConfigMap =
+          AuthenticationFilterInitializer.getFilterConfigMap(conf, prefix);
+      for (Map.Entry<String, String> entry : filterConfigMap.entrySet()) {
+        Object previous = props.setProperty(entry.getKey(), entry.getValue());
+        if (previous != null && !previous.equals(entry.getValue())) {
+          LOG.warn("Overwriting configuration for key='{}' with value='{}' " +
+              "previous value='{}'", entry.getKey(), entry.getValue(), previous);
+        }
+      }
+    }
+    return props;
   }
 
   private static void addNoCacheFilter(ServletContextHandler ctxt) {
diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebServer.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebServer.java
index 639d855..a6cab81 100644
--- a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebServer.java
+++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebServer.java
@@ -118,7 +118,7 @@ public class KMSWebServer {
         .setName(NAME)
         .setConf(conf)
         .setSSLConf(sslConf)
-        .authFilterConfigurationPrefix(KMSAuthenticationFilter.CONFIG_PREFIX)
+        .setAuthFilterConfigurationPrefix(KMSAuthenticationFilter.CONFIG_PREFIX)
         .setACL(new AccessControlList(conf.get(
             KMSConfiguration.HTTP_ADMINS_KEY, " ")))
         .addEndpoint(endpoint)
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java
index 56d0862..734f144 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.fs.http.server;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.web.WebHdfsConstants;
+import org.apache.hadoop.http.HttpServer2;
 import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import org.apache.hadoop.security.authentication.util.RandomSignerSecretProvider;
 import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
@@ -35,6 +36,8 @@ import java.io.Reader;
 import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Map;
 import java.util.Properties;
 
@@ -46,9 +49,9 @@ import java.util.Properties;
 public class HttpFSAuthenticationFilter
     extends DelegationTokenAuthenticationFilter {
 
-  static final String CONF_PREFIX = "httpfs.authentication.";
-
-  static final String HADOOP_HTTP_CONF_PREFIX = "hadoop.http.authentication.";
+  public static final String CONF_PREFIX = "httpfs.authentication.";
+  public static final String HADOOP_HTTP_CONF_PREFIX = "hadoop.http.authentication.";
+  static final String[] CONF_PREFIXES = {CONF_PREFIX, HADOOP_HTTP_CONF_PREFIX};
 
   private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET
       + ".file";
@@ -69,27 +72,9 @@ public class HttpFSAuthenticationFilter
   @Override
   protected Properties getConfiguration(String configPrefix,
       FilterConfig filterConfig) throws ServletException{
-    Properties props = new Properties();
     Configuration conf = HttpFSServerWebApp.get().getConfig();
-
-    props.setProperty(AuthenticationFilter.COOKIE_PATH, "/");
-    for (Map.Entry<String, String> entry : conf) {
-      String name = entry.getKey();
-      if (name.startsWith(HADOOP_HTTP_CONF_PREFIX)) {
-        name = name.substring(HADOOP_HTTP_CONF_PREFIX.length());
-        props.setProperty(name, entry.getValue());
-      }
-    }
-
-    // Replace Hadoop Http Authentication Configs with HttpFS specific Configs
-    for (Map.Entry<String, String> entry : conf) {
-      String name = entry.getKey();
-      if (name.startsWith(CONF_PREFIX)) {
-        String value = conf.get(name);
-        name = name.substring(CONF_PREFIX.length());
-        props.setProperty(name, value);
-      }
-    }
+    Properties props = HttpServer2.getFilterProperties(conf,
+        new ArrayList<>(Arrays.asList(CONF_PREFIXES)));
 
     String signatureSecretFile = props.getProperty(SIGNATURE_SECRET_FILE, null);
     if (signatureSecretFile == null) {
@@ -106,8 +91,16 @@ public class HttpFSAuthenticationFilter
           secret.append((char) c);
           c = reader.read();
         }
+
+        String secretString = secret.toString();
+        if (secretString.isEmpty()) {
+          throw new RuntimeException(
+              "No secret in HttpFs signature secret file: "
+                  + signatureSecretFile);
+        }
+
         props.setProperty(AuthenticationFilter.SIGNATURE_SECRET,
-            secret.toString());
+            secretString);
       } catch (IOException ex) {
         throw new RuntimeException("Could not read HttpFS signature "
             + "secret file: " + signatureSecretFile);
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebServer.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebServer.java
index a59d899..ee5c8f3 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebServer.java
@@ -17,6 +17,14 @@
  */
 package org.apache.hadoop.fs.http.server;
 
+import com.google.common.annotations.VisibleForTesting;
+
+import static org.apache.hadoop.fs.http.server.HttpFSAuthenticationFilter.CONF_PREFIX;
+import static org.apache.hadoop.fs.http.server.HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX;
+import static org.apache.hadoop.security.authentication.server.AuthenticationFilter.AUTH_TYPE;
+import static org.apache.hadoop.security.authentication.server.AuthenticationFilter.SIGNATURE_SECRET_FILE;
+import static org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler.KEYTAB;
+import static org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler.PRINCIPAL;
 import static org.apache.hadoop.util.StringUtils.startupShutdownMessage;
 
 import java.io.IOException;
@@ -65,6 +73,7 @@ public class HttpFSServerWebServer {
   private static final String SERVLET_PATH = "/webhdfs";
 
   static {
+    addDeprecatedKeys();
     Configuration.addDefaultResource(HTTPFS_DEFAULT_XML);
     Configuration.addDefaultResource(HTTPFS_SITE_XML);
   }
@@ -124,7 +133,8 @@ public class HttpFSServerWebServer {
         .setName(NAME)
         .setConf(conf)
         .setSSLConf(sslConf)
-        .authFilterConfigurationPrefix(HttpFSAuthenticationFilter.CONF_PREFIX)
+        .setAuthFilterConfigurationPrefixes(
+            HttpFSAuthenticationFilter.CONF_PREFIXES)
         .setACL(new AccessControlList(conf.get(HTTP_ADMINS_KEY, " ")))
         .addEndpoint(endpoint)
         .build();
@@ -178,6 +188,11 @@ public class HttpFSServerWebServer {
     }
   }
 
+  @VisibleForTesting
+  HttpServer2 getHttpServer() {
+    return httpServer;
+  }
+
   public static void main(String[] args) throws Exception {
     startupShutdownMessage(HttpFSServerWebServer.class, args, LOG);
     Configuration conf = new Configuration(true);
@@ -187,4 +202,17 @@ public class HttpFSServerWebServer {
     webServer.start();
     webServer.join();
   }
+
+  public static void addDeprecatedKeys() {
+    Configuration.addDeprecations(new Configuration.DeprecationDelta[]{
+        new Configuration.DeprecationDelta(CONF_PREFIX + KEYTAB,
+            HADOOP_HTTP_CONF_PREFIX + KEYTAB),
+        new Configuration.DeprecationDelta(CONF_PREFIX + PRINCIPAL,
+            HADOOP_HTTP_CONF_PREFIX + PRINCIPAL),
+        new Configuration.DeprecationDelta(CONF_PREFIX + SIGNATURE_SECRET_FILE,
+            HADOOP_HTTP_CONF_PREFIX + SIGNATURE_SECRET_FILE),
+        new Configuration.DeprecationDelta(CONF_PREFIX + AUTH_TYPE,
+            HADOOP_HTTP_CONF_PREFIX + AUTH_TYPE)
+    });
+  }
 }
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml
index 869e4e5..bc2ef5f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml
@@ -158,8 +158,8 @@
       If multiple HttpFS servers are used in a load-balancer/round-robin fashion,
       they should share the secret file.
 
-      If the secret file specified here does not exist, random secret is
-      generated at startup time.
+      If the secret file specified here does not exist or it is empty, a random
+      secret is generated at startup time.
 
       httpfs.authentication.signature.secret.file is deprecated. Instead use
       hadoop.http.authentication.signature.secret.file.
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
index 84cd055..d8dd7c7 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.fs.QuotaUsage;
 import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.fs.StorageType;
 import org.apache.hadoop.fs.contract.ContractTestUtils;
+import org.apache.hadoop.fs.http.server.HttpFSAuthenticationFilter;
 import org.apache.hadoop.fs.http.server.HttpFSServerWebApp;
 import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.fs.permission.AclStatus;
@@ -58,6 +59,7 @@ import org.apache.hadoop.hdfs.web.JsonUtil;
 import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import org.apache.hadoop.test.HFSTestCase;
 import org.apache.hadoop.test.HadoopUsersConfTestHelper;
 import org.apache.hadoop.test.LambdaTestUtils;
@@ -148,7 +150,8 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
              HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
     conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
              HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
-    conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
+    conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
+        AuthenticationFilter.SIGNATURE_SECRET_FILE, secretFile.getAbsolutePath());
     File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
     os = new FileOutputStream(httpfsSite);
     conf.writeXml(os);
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSAccessControlled.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSAccessControlled.java
index 47d9935..7a010e0 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSAccessControlled.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSAccessControlled.java
@@ -23,6 +23,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import org.apache.hadoop.test.HTestCase;
 import org.apache.hadoop.test.HadoopUsersConfTestHelper;
 import org.apache.hadoop.test.TestDir;
@@ -128,8 +129,9 @@ public class TestHttpFSAccessControlled extends HTestCase {
     conf.set("httpfs.proxyuser." +
                     HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
             HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
-    conf.set("httpfs.authentication.signature.secret.file",
-            secretFile.getAbsolutePath());
+    conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
+            AuthenticationFilter.SIGNATURE_SECRET_FILE,
+        secretFile.getAbsolutePath());
 
     File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
     os = new FileOutputStream(httpfsSite);
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
index 6aa8aa3..cdb4a53 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
@@ -231,8 +231,9 @@ public class TestHttpFSServer extends HFSTestCase {
     // HTTPFS configuration
     conf = new Configuration(false);
     if (addDelegationTokenAuthHandler) {
-      conf.set("httpfs.authentication.type",
-               HttpFSKerberosAuthenticationHandlerForTesting.class.getName());
+      conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
+              AuthenticationFilter.AUTH_TYPE,
+          HttpFSKerberosAuthenticationHandlerForTesting.class.getName());
     }
     conf.set("httpfs.services.ext", MockGroups.class.getName());
     conf.set("httpfs.admin.group", HadoopUsersConfTestHelper.
@@ -243,8 +244,9 @@ public class TestHttpFSServer extends HFSTestCase {
     conf.set("httpfs.proxyuser." +
              HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
              HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
-    conf.set("httpfs.authentication.signature.secret.file",
-             secretFile.getAbsolutePath());
+    conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
+            AuthenticationFilter.SIGNATURE_SECRET_FILE,
+        secretFile.getAbsolutePath());
     conf.set("httpfs.hadoop.config.dir", hadoopConfDir.toString());
     if (sslEnabled) {
       conf.set("httpfs.ssl.enabled", "true");
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerNoACLs.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerNoACLs.java
index c679dba..6b3dfb4 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerNoACLs.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerNoACLs.java
@@ -23,6 +23,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import org.apache.hadoop.test.HTestCase;
 import org.apache.hadoop.test.HadoopUsersConfTestHelper;
 import org.apache.hadoop.test.TestDir;
@@ -136,8 +137,9 @@ public class TestHttpFSServerNoACLs extends HTestCase {
     conf.set("httpfs.proxyuser." +
                     HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
             HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
-    conf.set("httpfs.authentication.signature.secret.file",
-            secretFile.getAbsolutePath());
+    conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
+            AuthenticationFilter.SIGNATURE_SECRET_FILE,
+        secretFile.getAbsolutePath());
 
     File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
     os = new FileOutputStream(httpfsSite);
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerNoXAttrs.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerNoXAttrs.java
index 270989b..ac70c07 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerNoXAttrs.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerNoXAttrs.java
@@ -23,6 +23,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import org.apache.hadoop.test.HTestCase;
 import org.apache.hadoop.test.HadoopUsersConfTestHelper;
 import org.apache.hadoop.test.TestDir;
@@ -137,8 +138,9 @@ public class TestHttpFSServerNoXAttrs extends HTestCase {
     conf.set("httpfs.proxyuser." +
                     HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
             HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
-    conf.set("httpfs.authentication.signature.secret.file",
-            secretFile.getAbsolutePath());
+    conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
+            AuthenticationFilter.SIGNATURE_SECRET_FILE,
+        secretFile.getAbsolutePath());
 
     File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
     os = new FileOutputStream(httpfsSite);
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerWebServer.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerWebServer.java
index 97d41d3..e0fdef5 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerWebServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerWebServer.java
@@ -17,9 +17,11 @@
  */
 package org.apache.hadoop.fs.http.server;
 
-import java.io.BufferedReader;
 import java.io.File;
+import java.io.FileOutputStream;
+import java.io.BufferedReader;
 import java.io.InputStreamReader;
+import java.io.IOException;
 import java.net.HttpURLConnection;
 import java.net.URL;
 import java.nio.charset.StandardCharsets;
@@ -27,17 +29,23 @@ import java.text.MessageFormat;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.http.HttpServer2;
 import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.apache.hadoop.security.authentication.util.FileSignerSecretProvider;
+import org.apache.hadoop.security.authentication.util.RandomSignerSecretProvider;
+import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.test.HadoopUsersConfTestHelper;
 import org.apache.hadoop.util.Shell;
 import org.junit.Assert;
 import org.junit.Before;
-import org.junit.BeforeClass;
+import org.junit.After;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.Timeout;
 
+import static org.apache.hadoop.security.authentication.server.AuthenticationFilter.SIGNER_SECRET_PROVIDER_ATTRIBUTE;
+
 /**
  * Test {@link HttpFSServerWebServer}.
  */
@@ -45,11 +53,13 @@ public class TestHttpFSServerWebServer {
 
   @Rule
   public Timeout timeout = new Timeout(30000);
+
+  private File secretFile;
   private HttpFSServerWebServer webServer;
 
-  @BeforeClass
-  public static void beforeClass() throws Exception {
-    File homeDir = GenericTestUtils.getTestDir();
+  @Before
+  public void init() throws Exception {
+    File homeDir = GenericTestUtils.setupTestRootDir(TestHttpFSServerWebServer.class);
     File confDir = new File(homeDir, "etc/hadoop");
     File logsDir = new File(homeDir, "logs");
     File tempDir = new File(homeDir, "temp");
@@ -71,43 +81,33 @@ public class TestHttpFSServerWebServer {
     System.setProperty("httpfs.home.dir", homeDir.getAbsolutePath());
     System.setProperty("httpfs.log.dir", logsDir.getAbsolutePath());
     System.setProperty("httpfs.config.dir", confDir.getAbsolutePath());
-    FileUtils.writeStringToFile(new File(confDir, "httpfs-signature.secret"),
-        "foo", StandardCharsets.UTF_8);
+    secretFile = new File(System.getProperty("httpfs.config.dir"),
+        "httpfs-signature-custom.secret");
   }
 
-  @Before
-  public void setUp() throws Exception {
-    Configuration conf = new Configuration();
-    conf.set(HttpFSServerWebServer.HTTP_HOSTNAME_KEY, "localhost");
-    conf.setInt(HttpFSServerWebServer.HTTP_PORT_KEY, 0);
-    conf.set(AuthenticationFilter.SIGNATURE_SECRET_FILE,
-        "httpfs-signature.secret");
-    Configuration sslConf = new Configuration();
-    webServer = new HttpFSServerWebServer(conf, sslConf);
+  @After
+  public void teardown() throws Exception {
+    if (webServer != null) {
+      webServer.stop();
+    }
   }
 
   @Test
   public void testStartStop() throws Exception {
+    webServer = createWebServer(createConfigurationWithRandomSecret());
     webServer.start();
-    String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
-    URL url = new URL(webServer.getUrl(), MessageFormat.format(
-        "/webhdfs/v1/?user.name={0}&op=liststatus", user));
-    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
-    BufferedReader reader = new BufferedReader(
-        new InputStreamReader(conn.getInputStream()));
-    reader.readLine();
-    reader.close();
     webServer.stop();
   }
 
   @Test
   public void testJustStop() throws Exception {
+    webServer = createWebServer(createConfigurationWithRandomSecret());
     webServer.stop();
   }
 
   @Test
   public void testDoubleStop() throws Exception {
+    webServer = createWebServer(createConfigurationWithRandomSecret());
     webServer.start();
     webServer.stop();
     webServer.stop();
@@ -115,9 +115,140 @@ public class TestHttpFSServerWebServer {
 
   @Test
   public void testDoubleStart() throws Exception {
+    webServer = createWebServer(createConfigurationWithRandomSecret());
+    webServer.start();
+    webServer.start();
+    webServer.stop();
+  }
+
+  @Test
+  public void testServiceWithSecretFile() throws Exception {
+    createSecretFile("foo");
+    webServer = createWebServer(createConfigurationWithSecretFile());
+    webServer.start();
+    assertServiceRespondsWithOK(webServer.getUrl());
+    assertSignerSecretProviderType(webServer.getHttpServer(),
+        FileSignerSecretProvider.class);
+    webServer.stop();
+  }
+
+  @Test
+  public void testServiceWithSecretFileWithDeprecatedConfigOnly()
+      throws Exception {
+    createSecretFile("foo");
+    Configuration conf = createConfiguration();
+    setDeprecatedSecretFile(conf, secretFile.getAbsolutePath());
+    webServer = createWebServer(conf);
+    webServer.start();
+    assertServiceRespondsWithOK(webServer.getUrl());
+    assertSignerSecretProviderType(webServer.getHttpServer(),
+        FileSignerSecretProvider.class);
+    webServer.stop();
+  }
+
+  @Test
+  public void testServiceWithSecretFileWithBothConfigOptions() throws Exception {
+    createSecretFile("foo");
+    Configuration conf = createConfigurationWithSecretFile();
+    setDeprecatedSecretFile(conf, secretFile.getAbsolutePath());
+    webServer = createWebServer(conf);
+    webServer.start();
+    assertServiceRespondsWithOK(webServer.getUrl());
+    assertSignerSecretProviderType(webServer.getHttpServer(),
+        FileSignerSecretProvider.class);
+    webServer.stop();
+  }
+
+  @Test
+  public void testServiceWithMissingSecretFile() throws Exception {
+    webServer = createWebServer(createConfigurationWithSecretFile());
     webServer.start();
+    assertServiceRespondsWithOK(webServer.getUrl());
+    assertSignerSecretProviderType(webServer.getHttpServer(),
+        RandomSignerSecretProvider.class);
+    webServer.stop();
+  }
+
+  @Test
+  public void testServiceWithEmptySecretFile() throws Exception {
+    // The AuthenticationFilter.constructSecretProvider will do the fallback
+    // to the random secrets not the HttpFSAuthenticationFilter.
+    createSecretFile("");
+    webServer = createWebServer(createConfigurationWithSecretFile());
     webServer.start();
+    assertServiceRespondsWithOK(webServer.getUrl());
+    assertSignerSecretProviderType(webServer.getHttpServer(),
+        RandomSignerSecretProvider.class);
     webServer.stop();
   }
 
+  private <T extends SignerSecretProvider> void assertSignerSecretProviderType(
+      HttpServer2 server, Class<T> expected) {
+    SignerSecretProvider secretProvider = (SignerSecretProvider)
+        server.getWebAppContext().getServletContext()
+            .getAttribute(SIGNER_SECRET_PROVIDER_ATTRIBUTE);
+    Assert.assertNotNull("The secret provider must not be null", secretProvider);
+    Assert.assertEquals("The secret provider must match the following", expected, secretProvider.getClass());
+  }
+
+  private void assertServiceRespondsWithOK(URL serviceURL)
+      throws Exception {
+    String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
+    URL url = new URL(serviceURL, MessageFormat.format(
+        "/webhdfs/v1/?user.name={0}&op=liststatus", user));
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
+    try (BufferedReader reader = new BufferedReader(
+        new InputStreamReader(conn.getInputStream()))) {
+      reader.readLine();
+    }
+  }
+
+  private void setDeprecatedSecretFile(Configuration conf, String path) {
+    conf.set(HttpFSAuthenticationFilter.CONF_PREFIX +
+            AuthenticationFilter.SIGNATURE_SECRET_FILE,
+        path);
+  }
+
+  private Configuration createConfigurationWithRandomSecret() {
+    Configuration conf = createConfiguration();
+    conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
+        AuthenticationFilter.SIGNER_SECRET_PROVIDER, "random");
+    return conf;
+  }
+
+  private Configuration createConfigurationWithSecretFile() {
+    Configuration conf = createConfiguration();
+    conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
+            AuthenticationFilter.SIGNATURE_SECRET_FILE,
+        secretFile.getAbsolutePath());
+    return conf;
+  }
+
+  private Configuration createConfiguration() {
+    Configuration conf = new Configuration(false);
+    conf.set(HttpFSServerWebServer.HTTP_HOSTNAME_KEY, "localhost");
+    conf.setInt(HttpFSServerWebServer.HTTP_PORT_KEY, 0);
+    return conf;
+  }
+
+  private HttpFSServerWebServer createWebServer(Configuration conf)
+      throws Exception {
+    Configuration sslConf = new Configuration(false);
+
+    // The configuration must be stored for the HttpFSAuthenticatorFilter, because
+    // it accesses the configuration from the webapp: HttpFSServerWebApp.get().getConfig()
+    try (FileOutputStream os = new FileOutputStream(
+        new File(System.getProperty("httpfs.config.dir"), "httpfs-site.xml"))) {
+      conf.writeXml(os);
+    }
+
+    return new HttpFSServerWebServer(conf, sslConf);
+  }
+
+  private void createSecretFile(String content) throws IOException {
+    Assert.assertTrue(secretFile.createNewFile());
+    FileUtils.writeStringToFile(secretFile, content, StandardCharsets.UTF_8);
+  }
+
 }
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerWebServerWithRandomSecret.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerWebServerWithRandomSecret.java
deleted file mode 100644
index b8e902a..0000000
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerWebServerWithRandomSecret.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.fs.http.server;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.test.GenericTestUtils;
-import org.apache.hadoop.util.Shell;
-import org.junit.BeforeClass;
-
-import java.io.File;
-
-/**
- * Unlike {@link TestHttpFSServerWebServer}, httpfs-signature.secret doesn't
- * exist. In this case, a random secret is used.
- */
-public class TestHttpFSServerWebServerWithRandomSecret extends
-    TestHttpFSServerWebServer {
-  @BeforeClass
-  public static void beforeClass() throws Exception {
-    File homeDir = GenericTestUtils.getTestDir();
-    File confDir = new File(homeDir, "etc/hadoop");
-    File logsDir = new File(homeDir, "logs");
-    File tempDir = new File(homeDir, "temp");
-    confDir.mkdirs();
-    logsDir.mkdirs();
-    tempDir.mkdirs();
-
-    if (Shell.WINDOWS) {
-      File binDir = new File(homeDir, "bin");
-      binDir.mkdirs();
-      File winutils = Shell.getWinUtilsFile();
-      if (winutils.exists()) {
-        FileUtils.copyFileToDirectory(winutils, binDir);
-      }
-    }
-
-    System.setProperty("hadoop.home.dir", homeDir.getAbsolutePath());
-    System.setProperty("hadoop.log.dir", logsDir.getAbsolutePath());
-    System.setProperty("httpfs.home.dir", homeDir.getAbsolutePath());
-    System.setProperty("httpfs.log.dir", logsDir.getAbsolutePath());
-    System.setProperty("httpfs.config.dir", confDir.getAbsolutePath());
-  }
-}

---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org