You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ozone.apache.org by xy...@apache.org on 2020/02/07 21:22:37 UTC

[hadoop-ozone] branch master updated: HDDS-2950. Upgrade jetty to the latest 9.4 release (#508)

This is an automated email from the ASF dual-hosted git repository.

xyao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hadoop-ozone.git


The following commit(s) were added to refs/heads/master by this push:
     new 1ac8263  HDDS-2950. Upgrade jetty to the latest 9.4 release (#508)
1ac8263 is described below

commit 1ac8263405e5e4d3a12ceea1efcba40209547705
Author: Elek, Márton <el...@users.noreply.github.com>
AuthorDate: Fri Feb 7 22:22:29 2020 +0100

    HDDS-2950. Upgrade jetty to the latest 9.4 release (#508)
---
 .../apache/hadoop/hdds/DFSConfigKeysLegacy.java    |    3 +
 .../java/org/apache/hadoop/hdds/HddsUtils.java     |  124 --
 .../hadoop/ozone/HddsDatanodeHttpServer.java       |    2 +-
 .../apache/hadoop/ozone/HddsDatanodeService.java   |    2 +-
 .../org/apache/hadoop/hdds/server/ServerUtils.java |   11 +-
 .../AdminAuthorizedServlet.java}                   |   38 +-
 .../hdds/server/{ => http}/BaseHttpServer.java     |  171 +-
 .../FilterContainer.java}                          |   32 +-
 .../FilterInitializer.java}                        |   23 +-
 .../hadoop/hdds/server/http/HtmlQuoting.java       |  235 +++
 .../apache/hadoop/hdds/server/http/HttpConfig.java |   75 +
 .../hadoop/hdds/server/http/HttpRequestLog.java    |  107 ++
 .../HttpRequestLogAppender.java}                   |   52 +-
 .../hadoop/hdds/server/http/HttpServer2.java       | 1707 ++++++++++++++++++++
 .../hdds/server/{ => http}/LogStreamServlet.java   |    2 +-
 .../hadoop/hdds/server/http/NoCacheFilter.java     |   55 +
 .../hdds/server/{ => http}/ProfileServlet.java     |    2 +-
 .../server/{ => http}/PrometheusMetricsSink.java   |    2 +-
 .../hdds/server/{ => http}/PrometheusServlet.java  |    2 +-
 .../server/{ => http}/RatisDropwizardExports.java  |    2 +-
 .../{ => http}/RatisNameRewriteSampleBuilder.java  |    2 +-
 .../hdds/server/http/StaticUserWebFilter.java      |  157 ++
 .../package-info.java}                             |   27 +-
 .../hdds/server/{ => http}/TestBaseHttpServer.java |    3 +-
 .../hadoop/hdds/server/http/TestHtmlQuoting.java   |   96 ++
 .../hdds/server/http/TestHttpRequestLog.java       |   51 +
 .../server/http/TestHttpRequestLogAppender.java}   |   32 +-
 .../hdds/server/{ => http}/TestProfileServlet.java |    6 +-
 .../{ => http}/TestPrometheusMetricsSink.java      |    2 +-
 .../{ => http}/TestRatisDropwizardExports.java     |    2 +-
 .../server/{ => http}/TestRatisNameRewrite.java    |    2 +-
 .../hadoop/hdds/server/http/package-info.java}     |   27 +-
 .../server/StorageContainerManagerHttpServer.java  |    2 +-
 .../hadoop/ozone/insight/BaseInsightPoint.java     |    2 +-
 .../hadoop/ozone/om/OzoneManagerHttpServer.java    |    2 +-
 .../apache/hadoop/ozone/om/ha/OMNodeDetails.java   |    3 +-
 .../om/snapshot/OzoneManagerSnapshotProvider.java  |   43 +-
 .../apache/hadoop/ozone/recon/ReconHttpServer.java |    2 +-
 .../spi/impl/OzoneManagerServiceProviderImpl.java  |    5 +-
 .../java/org/apache/hadoop/ozone/s3/Gateway.java   |    1 +
 .../hadoop/ozone/s3/S3GatewayHttpServer.java       |    2 +-
 .../apache/hadoop/ozone/freon/FreonHttpServer.java |    2 +-
 pom.xml                                            |    2 +-
 43 files changed, 2821 insertions(+), 299 deletions(-)

diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/DFSConfigKeysLegacy.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/DFSConfigKeysLegacy.java
index f65d4db..d9e12fc 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/DFSConfigKeysLegacy.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/DFSConfigKeysLegacy.java
@@ -92,6 +92,9 @@ public final class DFSConfigKeysLegacy {
   public static final String DFS_METRICS_PERCENTILES_INTERVALS_KEY =
       "dfs.metrics.percentiles.intervals";
 
+  public static final String DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY =
+      "dfs.web.authentication.kerberos.keytab";
+
 }
 
 
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java
index 7f8279b..ac54229 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java
@@ -23,7 +23,6 @@ import java.io.IOException;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.net.InetSocketAddress;
-import java.net.URI;
 import java.net.UnknownHostException;
 import java.nio.file.Path;
 import java.util.Calendar;
@@ -35,7 +34,6 @@ import java.util.OptionalInt;
 import java.util.TimeZone;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -50,8 +48,6 @@ import org.apache.hadoop.hdds.protocolPB.SCMSecurityProtocolClientSideTranslator
 import org.apache.hadoop.hdds.protocolPB.SCMSecurityProtocolPB;
 import org.apache.hadoop.hdds.scm.ScmConfigKeys;
 import org.apache.hadoop.hdds.scm.protocolPB.ScmBlockLocationProtocolPB;
-import org.apache.hadoop.http.HttpConfig;
-import org.apache.hadoop.http.HttpServer2;
 import org.apache.hadoop.io.retry.RetryPolicies;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.ipc.Client;
@@ -64,8 +60,6 @@ import org.apache.hadoop.metrics2.source.JvmMetrics;
 import org.apache.hadoop.metrics2.util.MBeans;
 import org.apache.hadoop.net.DNS;
 import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.ozone.OzoneConfigKeys;
-import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 
 import com.google.common.base.Preconditions;
@@ -76,14 +70,6 @@ import static org.apache.hadoop.hdds.DFSConfigKeysLegacy.DFS_DATANODE_DNS_NAMESE
 import static org.apache.hadoop.hdds.DFSConfigKeysLegacy.DFS_DATANODE_HOST_NAME_KEY;
 import static org.apache.hadoop.hdds.recon.ReconConfigKeys.OZONE_RECON_ADDRESS_KEY;
 import static org.apache.hadoop.hdds.recon.ReconConfigKeys.OZONE_RECON_DATANODE_PORT_DEFAULT;
-import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ADMINISTRATORS;
-import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_CLIENT_HTTPS_NEED_AUTH_DEFAULT;
-import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_CLIENT_HTTPS_NEED_AUTH_KEY;
-import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SERVER_HTTPS_KEYPASSWORD_KEY;
-import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY;
-import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY;
-
-import org.apache.hadoop.security.authorize.AccessControlList;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -621,116 +607,6 @@ public final class HddsUtils {
     return builder.toString();
   }
 
-  public static HttpConfig.Policy getHttpPolicy(Configuration conf) {
-    String policyStr = conf.get(OzoneConfigKeys.OZONE_HTTP_POLICY_KEY,
-        OzoneConfigKeys.OZONE_HTTP_POLICY_DEFAULT);
-    HttpConfig.Policy policy = HttpConfig.Policy.fromString(policyStr);
-    if (policy == null) {
-      throw new HadoopIllegalArgumentException("Unregonized value '"
-          + policyStr + "' for " + OzoneConfigKeys.OZONE_HTTP_POLICY_KEY);
-    }
-    conf.set(OzoneConfigKeys.OZONE_HTTP_POLICY_KEY, policy.name());
-    return policy;
-  }
-
-  /**
-   * Return a HttpServer.Builder that the OzoneManager/SCM/Datanode/S3Gateway/
-   * Recon to initialize their HTTP / HTTPS server.
-   */
-  public static HttpServer2.Builder newHttpServer2BuilderForOzone(
-      Configuration conf, final InetSocketAddress httpAddr,
-      final InetSocketAddress httpsAddr, String name, String spnegoUserNameKey,
-      String spnegoKeytabFileKey) throws IOException {
-    HttpConfig.Policy policy = getHttpPolicy(conf);
-
-    HttpServer2.Builder builder = new HttpServer2.Builder().setName(name)
-        .setConf(conf).setACL(new AccessControlList(conf.get(
-            OZONE_ADMINISTRATORS, " ")))
-        .setSecurityEnabled(UserGroupInformation.isSecurityEnabled())
-        .setUsernameConfKey(spnegoUserNameKey)
-        .setKeytabConfKey(spnegoKeytabFileKey);
-
-    // initialize the webserver for uploading/downloading files.
-    if (UserGroupInformation.isSecurityEnabled()) {
-      LOG.info("Starting web server as: "
-          + SecurityUtil.getServerPrincipal(conf.get(spnegoUserNameKey),
-          httpAddr.getHostName()));
-    }
-
-    if (policy.isHttpEnabled()) {
-      if (httpAddr.getPort() == 0) {
-        builder.setFindPort(true);
-      }
-
-      URI uri = URI.create("http://" + NetUtils.getHostPortString(httpAddr));
-      builder.addEndpoint(uri);
-      LOG.info("Starting Web-server for " + name + " at: " + uri);
-    }
-
-    if (policy.isHttpsEnabled() && httpsAddr != null) {
-      Configuration sslConf = loadSslConfiguration(conf);
-      loadSslConfToHttpServerBuilder(builder, sslConf);
-
-      if (httpsAddr.getPort() == 0) {
-        builder.setFindPort(true);
-      }
-
-      URI uri = URI.create("https://" + NetUtils.getHostPortString(httpsAddr));
-      builder.addEndpoint(uri);
-      LOG.info("Starting Web-server for " + name + " at: " + uri);
-    }
-    return builder;
-  }
-
-  /**
-   * Load HTTPS-related configuration.
-   */
-  public static Configuration loadSslConfiguration(Configuration conf) {
-    Configuration sslConf = new Configuration(false);
-
-    sslConf.addResource(conf.get(
-        OzoneConfigKeys.OZONE_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
-        OzoneConfigKeys.OZONE_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT));
-
-    final String[] reqSslProps = {
-        OzoneConfigKeys.OZONE_SERVER_HTTPS_TRUSTSTORE_LOCATION_KEY,
-        OzoneConfigKeys.OZONE_SERVER_HTTPS_KEYSTORE_LOCATION_KEY,
-        OzoneConfigKeys.OZONE_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY,
-        OzoneConfigKeys.OZONE_SERVER_HTTPS_KEYPASSWORD_KEY
-    };
-
-    // Check if the required properties are included
-    for (String sslProp : reqSslProps) {
-      if (sslConf.get(sslProp) == null) {
-        LOG.warn("SSL config " + sslProp + " is missing. If " +
-            OzoneConfigKeys.OZONE_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY +
-            " is specified, make sure it is a relative path");
-      }
-    }
-
-    boolean requireClientAuth = conf.getBoolean(
-        OZONE_CLIENT_HTTPS_NEED_AUTH_KEY, OZONE_CLIENT_HTTPS_NEED_AUTH_DEFAULT);
-    sslConf.setBoolean(OZONE_CLIENT_HTTPS_NEED_AUTH_KEY, requireClientAuth);
-    return sslConf;
-  }
-
-  public static HttpServer2.Builder loadSslConfToHttpServerBuilder(
-      HttpServer2.Builder builder, Configuration sslConf) {
-    return builder
-        .needsClientAuth(
-            sslConf.getBoolean(OZONE_CLIENT_HTTPS_NEED_AUTH_KEY,
-                OZONE_CLIENT_HTTPS_NEED_AUTH_DEFAULT))
-        .keyPassword(getPassword(sslConf, OZONE_SERVER_HTTPS_KEYPASSWORD_KEY))
-        .keyStore(sslConf.get("ssl.server.keystore.location"),
-            getPassword(sslConf, OZONE_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY),
-            sslConf.get("ssl.server.keystore.type", "jks"))
-        .trustStore(sslConf.get("ssl.server.truststore.location"),
-            getPassword(sslConf, OZONE_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY),
-            sslConf.get("ssl.server.truststore.type", "jks"))
-        .excludeCiphers(
-            sslConf.get("ssl.server.exclude.cipher.list"));
-  }
-
   /**
    * Leverages the Configuration.getPassword method to attempt to get
    * passwords from the CredentialProvider API before falling back to
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeHttpServer.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeHttpServer.java
index 3dcfcfe..fe2d065 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeHttpServer.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeHttpServer.java
@@ -21,7 +21,7 @@ import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdds.HddsConfigKeys;
-import org.apache.hadoop.hdds.server.BaseHttpServer;
+import org.apache.hadoop.hdds.server.http.BaseHttpServer;
 
 /**
  * Simple http server to provide basic monitoring for hdds datanode.
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java
index 9d5ac68..42349b8 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java
@@ -43,7 +43,7 @@ import org.apache.hadoop.hdds.security.x509.SecurityConfig;
 import org.apache.hadoop.hdds.security.x509.certificate.client.CertificateClient;
 import org.apache.hadoop.hdds.security.x509.certificate.client.DNCertificateClient;
 import org.apache.hadoop.hdds.security.x509.certificates.utils.CertificateSignRequest;
-import org.apache.hadoop.hdds.server.RatisDropwizardExports;
+import org.apache.hadoop.hdds.server.http.RatisDropwizardExports;
 import org.apache.hadoop.hdds.tracing.TracingUtil;
 import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
 import org.apache.hadoop.ozone.container.common.statemachine.DatanodeStateMachine;
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/ServerUtils.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/ServerUtils.java
index 01c27b1..9c87018 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/ServerUtils.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/ServerUtils.java
@@ -17,6 +17,10 @@
 
 package org.apache.hadoop.hdds.server;
 
+import java.io.File;
+import java.net.InetSocketAddress;
+import java.util.Collection;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdds.HddsConfigKeys;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
@@ -24,14 +28,11 @@ import org.apache.hadoop.hdds.scm.ScmConfigKeys;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.security.UserGroupInformation;
+
 import org.apache.http.client.methods.HttpRequestBase;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.File;
-import java.net.InetSocketAddress;
-import java.util.Collection;
-
 /**
  * Generic utilities for all HDDS/Ozone servers.
  */
@@ -239,4 +240,6 @@ public final class ServerUtils {
     File metaDirPath = ServerUtils.getOzoneMetaDirPath(conf);
     return (new File(metaDirPath, "ratis")).getPath();
   }
+
+
 }
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/AdminAuthorizedServlet.java
similarity index 51%
copy from hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java
copy to hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/AdminAuthorizedServlet.java
index 899fd9e..0271501 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/AdminAuthorizedServlet.java
@@ -15,24 +15,32 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hdds.server;
+package org.apache.hadoop.hdds.server.http;
 
-import com.codahale.metrics.MetricRegistry;
-import io.prometheus.client.dropwizard.DropwizardExports;
-import io.prometheus.client.dropwizard.samplebuilder.DefaultSampleBuilder;
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.eclipse.jetty.servlet.DefaultServlet;
 
 /**
- * Collect Dropwizard metrics, but rename ratis specific metrics.
+ * General servlet which is admin-authorized.
+ *
  */
-public class RatisDropwizardExports extends DropwizardExports {
+public class AdminAuthorizedServlet extends DefaultServlet {
 
-  /**
-   * Creates a new DropwizardExports with a {@link DefaultSampleBuilder}.
-   *
-   * @param registry a metric registry to export in prometheus.
-   */
-  public RatisDropwizardExports(MetricRegistry registry) {
-    super(registry, new RatisNameRewriteSampleBuilder());
-  }
+  private static final long serialVersionUID = 1L;
 
-}
\ No newline at end of file
+  @Override
+  protected void doGet(HttpServletRequest request, HttpServletResponse response)
+      throws ServletException, IOException {
+    // Do the authorization
+    if (HttpServer2.hasAdministratorAccess(getServletContext(), request,
+        response)) {
+      // Authorization is done. Just call super.
+      super.doGet(request, response);
+    }
+  }
+}
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/BaseHttpServer.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/BaseHttpServer.java
similarity index 56%
rename from hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/BaseHttpServer.java
rename to hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/BaseHttpServer.java
index ab95aaa..7938b61 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/BaseHttpServer.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/BaseHttpServer.java
@@ -1,4 +1,4 @@
-/*
+/**
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with this
  * work for additional information regarding copyright ownership.  The ASF
@@ -15,28 +15,36 @@
  * the License.
  */
 
-package org.apache.hadoop.hdds.server;
+package org.apache.hadoop.hdds.server.http;
 
 import javax.servlet.http.HttpServlet;
 import java.io.IOException;
 import java.net.InetSocketAddress;
+import java.net.URI;
 import java.util.Optional;
 import java.util.OptionalInt;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdds.DFSConfigKeysLegacy;
 import org.apache.hadoop.hdds.HddsConfigKeys;
-import org.apache.hadoop.hdds.HddsUtils;
 import org.apache.hadoop.hdds.conf.HddsConfServlet;
-import org.apache.hadoop.http.HttpConfig;
-import org.apache.hadoop.http.HttpServer2;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.AccessControlList;
 
 import org.apache.commons.lang3.StringUtils;
 import static org.apache.hadoop.hdds.HddsUtils.getHostNameFromConfigKeys;
 import static org.apache.hadoop.hdds.HddsUtils.getPortNumberFromConfigKeys;
+import static org.apache.hadoop.hdds.server.http.HttpConfig.getHttpPolicy;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ADMINISTRATORS;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_CLIENT_HTTPS_NEED_AUTH_DEFAULT;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_CLIENT_HTTPS_NEED_AUTH_KEY;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SERVER_HTTPS_KEYPASSWORD_KEY;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY;
 import org.eclipse.jetty.webapp.WebAppContext;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -48,8 +56,8 @@ public abstract class BaseHttpServer {
 
   private static final Logger LOG =
       LoggerFactory.getLogger(BaseHttpServer.class);
-  protected static final String PROMETHEUS_SINK = "PROMETHEUS_SINK";
-  protected static final String JETTY_BASETMPDIR =
+  static final String PROMETHEUS_SINK = "PROMETHEUS_SINK";
+  private static final String JETTY_BASETMPDIR =
       "org.eclipse.jetty.webapp.basetempdir";
 
   private HttpServer2 httpServer;
@@ -70,7 +78,7 @@ public abstract class BaseHttpServer {
   public BaseHttpServer(Configuration conf, String name) throws IOException {
     this.name = name;
     this.conf = conf;
-    policy = HddsUtils.getHttpPolicy(conf);
+    policy = getHttpPolicy(conf);
     if (isEnabled()) {
       this.httpAddress = getHttpBindAddress();
       this.httpsAddress = getHttpsBindAddress();
@@ -80,7 +88,7 @@ public abstract class BaseHttpServer {
       // CommonConfigurationKeysPublic.HADOOP_PROMETHEUS_ENABLED when possible.
       conf.setBoolean("hadoop.prometheus.endpoint.enabled", false);
 
-      HttpServer2.Builder builder = HddsUtils.newHttpServer2BuilderForOzone(
+      HttpServer2.Builder builder = newHttpServer2BuilderForOzone(
           conf, httpAddress, httpsAddress,
           name, getSpnegoPrincipal(), getKeytabFile());
 
@@ -126,6 +134,58 @@ public abstract class BaseHttpServer {
     }
   }
 
+
+
+  /**
+   * Return a HttpServer.Builder that the OzoneManager/SCM/Datanode/S3Gateway/
+   * Recon to initialize their HTTP / HTTPS server.
+   */
+  public static HttpServer2.Builder newHttpServer2BuilderForOzone(
+      Configuration conf, final InetSocketAddress httpAddr,
+      final InetSocketAddress httpsAddr, String name, String spnegoUserNameKey,
+      String spnegoKeytabFileKey) throws IOException {
+    HttpConfig.Policy policy = getHttpPolicy(conf);
+
+    HttpServer2.Builder builder = new HttpServer2.Builder().setName(name)
+        .setConf(conf).setACL(new AccessControlList(conf.get(
+            OZONE_ADMINISTRATORS, " ")))
+        .setSecurityEnabled(UserGroupInformation.isSecurityEnabled())
+        .setUsernameConfKey(spnegoUserNameKey)
+        .setKeytabConfKey(spnegoKeytabFileKey);
+
+    // initialize the webserver for uploading/downloading files.
+    if (UserGroupInformation.isSecurityEnabled()) {
+      LOG.info("Starting web server as: "
+          + SecurityUtil.getServerPrincipal(conf.get(spnegoUserNameKey),
+          httpAddr.getHostName()));
+    }
+
+    if (policy.isHttpEnabled()) {
+      if (httpAddr.getPort() == 0) {
+        builder.setFindPort(true);
+      }
+
+      URI uri = URI.create("http://" + NetUtils.getHostPortString(httpAddr));
+      builder.addEndpoint(uri);
+      LOG.info("Starting Web-server for " + name + " at: " + uri);
+    }
+
+    if (policy.isHttpsEnabled() && httpsAddr != null) {
+      Configuration sslConf = loadSslConfiguration(conf);
+      loadSslConfToHttpServerBuilder(builder, sslConf);
+
+      if (httpsAddr.getPort() == 0) {
+        builder.setFindPort(true);
+      }
+
+      URI uri = URI.create("https://" + NetUtils.getHostPortString(httpsAddr));
+      builder.addEndpoint(uri);
+      LOG.info("Starting Web-server for " + name + " at: " + uri);
+    }
+    return builder;
+  }
+
+
   /**
    * Add a servlet to BaseHttpServer.
    *
@@ -232,6 +292,99 @@ public abstract class BaseHttpServer {
     }
   }
 
+
+  public static HttpServer2.Builder loadSslConfToHttpServerBuilder(
+      HttpServer2.Builder builder, Configuration sslConf) {
+    return builder
+        .needsClientAuth(
+            sslConf.getBoolean(OZONE_CLIENT_HTTPS_NEED_AUTH_KEY,
+                OZONE_CLIENT_HTTPS_NEED_AUTH_DEFAULT))
+        .keyPassword(getPassword(sslConf, OZONE_SERVER_HTTPS_KEYPASSWORD_KEY))
+        .keyStore(sslConf.get("ssl.server.keystore.location"),
+            getPassword(sslConf, OZONE_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY),
+            sslConf.get("ssl.server.keystore.type", "jks"))
+        .trustStore(sslConf.get("ssl.server.truststore.location"),
+            getPassword(sslConf, OZONE_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY),
+            sslConf.get("ssl.server.truststore.type", "jks"))
+        .excludeCiphers(
+            sslConf.get("ssl.server.exclude.cipher.list"));
+  }
+
+
+  /**
+   * Get SPNEGO keytab Key from configuration.
+   *
+   * @param conf       Configuration
+   * @param defaultKey default key to be used for config lookup
+   * @return DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY if the key is not empty
+   * else return defaultKey
+   */
+  public static String getSpnegoKeytabKey(Configuration conf,
+      String defaultKey) {
+    String value =
+        conf.get(
+            DFSConfigKeysLegacy.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY);
+    return (value == null || value.isEmpty()) ?
+        defaultKey :
+        DFSConfigKeysLegacy.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY;
+  }
+
+  /**
+   * Leverages the Configuration.getPassword method to attempt to get
+   * passwords from the CredentialProvider API before falling back to
+   * clear text in config - if falling back is allowed.
+   *
+   * @param conf  Configuration instance
+   * @param alias name of the credential to retreive
+   * @return String credential value or null
+   */
+  static String getPassword(Configuration conf, String alias) {
+    String password = null;
+    try {
+      char[] passchars = conf.getPassword(alias);
+      if (passchars != null) {
+        password = new String(passchars);
+      }
+    } catch (IOException ioe) {
+      LOG.warn("Setting password to null since IOException is caught"
+          + " when getting password", ioe);
+
+      password = null;
+    }
+    return password;
+  }
+  /**
+   * Load HTTPS-related configuration.
+   */
+  public static Configuration loadSslConfiguration(Configuration conf) {
+    Configuration sslConf = new Configuration(false);
+
+    sslConf.addResource(conf.get(
+        OzoneConfigKeys.OZONE_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
+        OzoneConfigKeys.OZONE_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT));
+
+    final String[] reqSslProps = {
+        OzoneConfigKeys.OZONE_SERVER_HTTPS_TRUSTSTORE_LOCATION_KEY,
+        OzoneConfigKeys.OZONE_SERVER_HTTPS_KEYSTORE_LOCATION_KEY,
+        OzoneConfigKeys.OZONE_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY,
+        OzoneConfigKeys.OZONE_SERVER_HTTPS_KEYPASSWORD_KEY
+    };
+
+    // Check if the required properties are included
+    for (String sslProp : reqSslProps) {
+      if (sslConf.get(sslProp) == null) {
+        LOG.warn("SSL config " + sslProp + " is missing. If " +
+            OzoneConfigKeys.OZONE_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY +
+            " is specified, make sure it is a relative path");
+      }
+    }
+
+    boolean requireClientAuth = conf.getBoolean(
+        OZONE_CLIENT_HTTPS_NEED_AUTH_KEY, OZONE_CLIENT_HTTPS_NEED_AUTH_DEFAULT);
+    sslConf.setBoolean(OZONE_CLIENT_HTTPS_NEED_AUTH_KEY, requireClientAuth);
+    return sslConf;
+  }
+
   public InetSocketAddress getHttpAddress() {
     return httpAddress;
   }
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/FilterContainer.java
similarity index 54%
copy from hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java
copy to hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/FilterContainer.java
index 899fd9e..3b2b8eb 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/FilterContainer.java
@@ -15,24 +15,28 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hdds.server;
+package org.apache.hadoop.hdds.server.http;
 
-import com.codahale.metrics.MetricRegistry;
-import io.prometheus.client.dropwizard.DropwizardExports;
-import io.prometheus.client.dropwizard.samplebuilder.DefaultSampleBuilder;
+import java.util.Map;
 
 /**
- * Collect Dropwizard metrics, but rename ratis specific metrics.
+ * A container class for javax.servlet.Filter. 
  */
-public class RatisDropwizardExports extends DropwizardExports {
-
+public interface FilterContainer {
   /**
-   * Creates a new DropwizardExports with a {@link DefaultSampleBuilder}.
-   *
-   * @param registry a metric registry to export in prometheus.
+   * Add a filter to the container.
+   * @param name Filter name
+   * @param classname Filter class name
+   * @param parameters a map from parameter names to initial values
    */
-  public RatisDropwizardExports(MetricRegistry registry) {
-    super(registry, new RatisNameRewriteSampleBuilder());
-  }
+  void addFilter(String name, String classname, Map<String, String> parameters);
 
-}
\ No newline at end of file
+  /**
+   * Add a global filter to the container.
+   * @param name filter name
+   * @param classname filter class name
+   * @param parameters a map from parameter names to initial values
+   */
+  void addGlobalFilter(String name, String classname,
+      Map<String, String> parameters);
+}
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/FilterInitializer.java
similarity index 57%
copy from hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java
copy to hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/FilterInitializer.java
index 899fd9e..388fc21 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/FilterInitializer.java
@@ -15,24 +15,19 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hdds.server;
+package org.apache.hadoop.hdds.server.http;
 
-import com.codahale.metrics.MetricRegistry;
-import io.prometheus.client.dropwizard.DropwizardExports;
-import io.prometheus.client.dropwizard.samplebuilder.DefaultSampleBuilder;
+import org.apache.hadoop.conf.Configuration;
 
 /**
- * Collect Dropwizard metrics, but rename ratis specific metrics.
+ * Initialize a javax.servlet.Filter. 
  */
-public class RatisDropwizardExports extends DropwizardExports {
-
+public abstract class FilterInitializer {
   /**
-   * Creates a new DropwizardExports with a {@link DefaultSampleBuilder}.
-   *
-   * @param registry a metric registry to export in prometheus.
+   * Initialize a Filter to a FilterContainer.
+   * @param container The filter container
+   * @param conf Configuration for run-time parameters
    */
-  public RatisDropwizardExports(MetricRegistry registry) {
-    super(registry, new RatisNameRewriteSampleBuilder());
-  }
-
+  public abstract void initFilter(FilterContainer container,
+      Configuration conf);
 }
\ No newline at end of file
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HtmlQuoting.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HtmlQuoting.java
new file mode 100644
index 0000000..f4262f9
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HtmlQuoting.java
@@ -0,0 +1,235 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.server.http;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
+
+/**
+ * This class is responsible for quoting HTML characters.
+ */
+public final class HtmlQuoting {
+  private static final byte[] AMP_BYTES =
+      "&amp;".getBytes(StandardCharsets.UTF_8);
+  private static final byte[] APOS_BYTES =
+      "&apos;".getBytes(StandardCharsets.UTF_8);
+  private static final byte[] GT_BYTES =
+      "&gt;".getBytes(StandardCharsets.UTF_8);
+  private static final byte[] LT_BYTES =
+      "&lt;".getBytes(StandardCharsets.UTF_8);
+  private static final byte[] QUOT_BYTES =
+      "&quot;".getBytes(StandardCharsets.UTF_8);
+
+  private HtmlQuoting() {
+  }
+
+  /**
+   * Does the given string need to be quoted?
+   *
+   * @param data the string to check
+   * @param off  the starting position
+   * @param len  the number of bytes to check
+   * @return does the string contain any of the active html characters?
+   */
+  public static boolean needsQuoting(byte[] data, int off, int len) {
+    for (int i = off; i < off + len; ++i) {
+      switch (data[i]) {
+      case '&':
+      case '<':
+      case '>':
+      case '\'':
+      case '"':
+        return true;
+      default:
+        break;
+      }
+    }
+    return false;
+  }
+
+  /**
+   * Does the given string need to be quoted?
+   *
+   * @param str the string to check
+   * @return does the string contain any of the active html characters?
+   */
+  public static boolean needsQuoting(String str) {
+    if (str == null) {
+      return false;
+    }
+    byte[] bytes = str.getBytes(StandardCharsets.UTF_8);
+    return needsQuoting(bytes, 0, bytes.length);
+  }
+
+  /**
+   * Quote all of the active HTML characters in the given string as they
+   * are added to the buffer.
+   *
+   * @param output the stream to write the output to
+   * @param buffer the byte array to take the characters from
+   * @param off    the index of the first byte to quote
+   * @param len    the number of bytes to quote
+   */
+  public static void quoteHtmlChars(OutputStream output, byte[] buffer,
+      int off, int len) throws IOException {
+    for (int i = off; i < off + len; i++) {
+      switch (buffer[i]) {
+      case '&':
+        output.write(AMP_BYTES);
+        break;
+      case '<':
+        output.write(LT_BYTES);
+        break;
+      case '>':
+        output.write(GT_BYTES);
+        break;
+      case '\'':
+        output.write(APOS_BYTES);
+        break;
+      case '"':
+        output.write(QUOT_BYTES);
+        break;
+      default:
+        output.write(buffer, i, 1);
+      }
+    }
+  }
+
+  /**
+   * Quote the given item to make it html-safe.
+   *
+   * @param item the string to quote
+   * @return the quoted string
+   */
+  public static String quoteHtmlChars(String item) {
+    if (item == null) {
+      return null;
+    }
+    byte[] bytes = item.getBytes(StandardCharsets.UTF_8);
+    if (needsQuoting(bytes, 0, bytes.length)) {
+      ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+      try {
+        quoteHtmlChars(buffer, bytes, 0, bytes.length);
+        return buffer.toString("UTF-8");
+      } catch (IOException ioe) {
+        // Won't happen, since it is a bytearrayoutputstream
+        return null;
+      }
+    } else {
+      return item;
+    }
+  }
+
+  /**
+   * Return an output stream that quotes all of the output.
+   *
+   * @param out the stream to write the quoted output to
+   * @return a new stream that the application show write to
+   * @throws IOException if the underlying output fails
+   */
+  public static OutputStream quoteOutputStream(final OutputStream out
+  ) throws IOException {
+    return new OutputStream() {
+      private byte[] data = new byte[1];
+
+      @Override
+      public void write(byte[] data, int off, int len) throws IOException {
+        quoteHtmlChars(out, data, off, len);
+      }
+
+      @Override
+      public void write(int b) throws IOException {
+        data[0] = (byte) b;
+        quoteHtmlChars(out, data, 0, 1);
+      }
+
+      @Override
+      public void flush() throws IOException {
+        out.flush();
+      }
+
+      @Override
+      public void close() throws IOException {
+        out.close();
+      }
+    };
+  }
+
+  /**
+   * Remove HTML quoting from a string.
+   *
+   * @param item the string to unquote
+   * @return the unquoted string
+   */
+  public static String unquoteHtmlChars(String item) {
+    if (item == null) {
+      return null;
+    }
+    int next = item.indexOf('&');
+    // nothing was quoted
+    if (next == -1) {
+      return item;
+    }
+    int len = item.length();
+    int posn = 0;
+    StringBuilder buffer = new StringBuilder();
+    while (next != -1) {
+      buffer.append(item.substring(posn, next));
+      if (item.startsWith("&amp;", next)) {
+        buffer.append('&');
+        next += 5;
+      } else if (item.startsWith("&apos;", next)) {
+        buffer.append('\'');
+        next += 6;
+      } else if (item.startsWith("&gt;", next)) {
+        buffer.append('>');
+        next += 4;
+      } else if (item.startsWith("&lt;", next)) {
+        buffer.append('<');
+        next += 4;
+      } else if (item.startsWith("&quot;", next)) {
+        buffer.append('"');
+        next += 6;
+      } else {
+        int end = item.indexOf(';', next) + 1;
+        if (end == 0) {
+          end = len;
+        }
+        throw new IllegalArgumentException("Bad HTML quoting for " +
+            item.substring(next, end));
+      }
+      posn = next;
+      next = item.indexOf('&', posn);
+    }
+    buffer.append(item.substring(posn, len));
+    return buffer.toString();
+  }
+
+  public static void main(String[] args) throws Exception {
+    for (String arg : args) {
+      System.out.println("Original: " + arg);
+      String quoted = quoteHtmlChars(arg);
+      System.out.println("Quoted: " + quoted);
+      String unquoted = unquoteHtmlChars(quoted);
+      System.out.println("Unquoted: " + unquoted);
+      System.out.println();
+    }
+  }
+}
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpConfig.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpConfig.java
new file mode 100644
index 0000000..08879c9
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpConfig.java
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.server.http;
+
+import org.apache.hadoop.HadoopIllegalArgumentException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+
+/**
+ * Singleton to get access to Http related configuration.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public final class HttpConfig {
+
+  private HttpConfig() {
+  }
+
+  /**
+   * Enum for different kind of security combinations.
+   */
+  public enum Policy {
+    HTTP_ONLY,
+    HTTPS_ONLY,
+    HTTP_AND_HTTPS;
+
+    private static final Policy[] VALUES = values();
+
+    public static Policy fromString(String value) {
+      for (Policy p : VALUES) {
+        if (p.name().equalsIgnoreCase(value)) {
+          return p;
+        }
+      }
+      return null;
+    }
+
+    public boolean isHttpEnabled() {
+      return this == HTTP_ONLY || this == HTTP_AND_HTTPS;
+    }
+
+    public boolean isHttpsEnabled() {
+      return this == HTTPS_ONLY || this == HTTP_AND_HTTPS;
+    }
+  }
+
+  public static Policy getHttpPolicy(Configuration conf) {
+    String policyStr = conf.get(OzoneConfigKeys.OZONE_HTTP_POLICY_KEY,
+        OzoneConfigKeys.OZONE_HTTP_POLICY_DEFAULT);
+    HttpConfig.Policy policy = HttpConfig.Policy.fromString(policyStr);
+    if (policy == null) {
+      throw new HadoopIllegalArgumentException("Unregonized value '"
+          + policyStr + "' for " + OzoneConfigKeys.OZONE_HTTP_POLICY_KEY);
+    }
+    conf.set(OzoneConfigKeys.OZONE_HTTP_POLICY_KEY, policy.name());
+    return policy;
+  }
+}
\ No newline at end of file
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpRequestLog.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpRequestLog.java
new file mode 100644
index 0000000..63b07d1
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpRequestLog.java
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.server.http;
+
+import java.util.HashMap;
+
+import org.apache.commons.logging.impl.Log4JLogger;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogConfigurationException;
+import org.apache.commons.logging.LogFactory;
+import org.apache.log4j.Appender;
+import org.eclipse.jetty.server.AsyncRequestLogWriter;
+import org.eclipse.jetty.server.CustomRequestLog;
+import org.eclipse.jetty.server.RequestLog;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * RequestLog object for use with Http.
+ */
+public final class HttpRequestLog {
+
+  public static final Logger LOG =
+      LoggerFactory.getLogger(HttpRequestLog.class);
+  private static final HashMap<String, String> SERVER_TO_COMPONENT;
+
+  private HttpRequestLog() {
+  }
+
+  static {
+    SERVER_TO_COMPONENT = new HashMap<String, String>();
+    SERVER_TO_COMPONENT.put("cluster", "resourcemanager");
+    SERVER_TO_COMPONENT.put("hdfs", "namenode");
+    SERVER_TO_COMPONENT.put("node", "nodemanager");
+  }
+
+  public static RequestLog getRequestLog(String name) {
+
+    String lookup = SERVER_TO_COMPONENT.get(name);
+    if (lookup != null) {
+      name = lookup;
+    }
+    String loggerName = "http.requests." + name;
+    String appenderName = name + "requestlog";
+    Log logger = LogFactory.getLog(loggerName);
+
+    boolean isLog4JLogger;
+
+    try {
+      isLog4JLogger = logger instanceof Log4JLogger;
+    } catch (NoClassDefFoundError err) {
+      // In some dependent projects, log4j may not even be on the classpath at
+      // runtime, in which case the above instanceof check will throw
+      // NoClassDefFoundError.
+      LOG.debug("Could not load Log4JLogger class", err);
+      isLog4JLogger = false;
+    }
+    if (isLog4JLogger) {
+      Log4JLogger httpLog4JLog = (Log4JLogger) logger;
+      org.apache.log4j.Logger httpLogger = httpLog4JLog.getLogger();
+      Appender appender = null;
+
+      try {
+        appender = httpLogger.getAppender(appenderName);
+      } catch (LogConfigurationException e) {
+        LOG.warn("Http request log for {} could not be created", loggerName);
+        throw e;
+      }
+
+      if (appender == null) {
+        LOG.info("Http request log for {} is not defined", loggerName);
+        return null;
+      }
+
+      if (appender instanceof HttpRequestLogAppender) {
+        HttpRequestLogAppender requestLogAppender
+            = (HttpRequestLogAppender) appender;
+        AsyncRequestLogWriter logWriter = new AsyncRequestLogWriter();
+        logWriter.setFilename(requestLogAppender.getFilename());
+        logWriter.setRetainDays(requestLogAppender.getRetainDays());
+        return new CustomRequestLog(logWriter,
+            CustomRequestLog.EXTENDED_NCSA_FORMAT);
+      } else {
+        LOG.warn("Jetty request log for {} was of the wrong class", loggerName);
+        return null;
+      }
+    } else {
+      LOG.warn("Jetty request log can only be enabled using Log4j");
+      return null;
+    }
+  }
+}
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpRequestLogAppender.java
similarity index 51%
copy from hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java
copy to hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpRequestLogAppender.java
index 899fd9e..2666692 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpRequestLogAppender.java
@@ -15,24 +15,48 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hdds.server;
+package org.apache.hadoop.hdds.server.http;
 
-import com.codahale.metrics.MetricRegistry;
-import io.prometheus.client.dropwizard.DropwizardExports;
-import io.prometheus.client.dropwizard.samplebuilder.DefaultSampleBuilder;
+import org.apache.log4j.spi.LoggingEvent;
+import org.apache.log4j.AppenderSkeleton;
 
 /**
- * Collect Dropwizard metrics, but rename ratis specific metrics.
+ * Log4j Appender adapter for HttpRequestLog.
  */
-public class RatisDropwizardExports extends DropwizardExports {
+public class HttpRequestLogAppender extends AppenderSkeleton {
 
-  /**
-   * Creates a new DropwizardExports with a {@link DefaultSampleBuilder}.
-   *
-   * @param registry a metric registry to export in prometheus.
-   */
-  public RatisDropwizardExports(MetricRegistry registry) {
-    super(registry, new RatisNameRewriteSampleBuilder());
+  private String filename;
+  private int retainDays;
+
+  public HttpRequestLogAppender() {
+  }
+
+  public void setRetainDays(int retainDays) {
+    this.retainDays = retainDays;
+  }
+
+  public int getRetainDays() {
+    return retainDays;
+  }
+
+  public void setFilename(String filename) {
+    this.filename = filename;
   }
 
-}
\ No newline at end of file
+  public String getFilename() {
+    return filename;
+  }
+
+  @Override
+  public void append(LoggingEvent event) {
+  }
+
+  @Override
+  public void close() {
+  }
+
+  @Override
+  public boolean requiresLayout() {
+    return false;
+  }
+}
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
new file mode 100644
index 0000000..482615b
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
@@ -0,0 +1,1707 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.server.http;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InterruptedIOException;
+import java.io.PrintStream;
+import java.net.BindException;
+import java.net.InetSocketAddress;
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletRequestWrapper;
+import javax.servlet.http.HttpServletResponse;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
+import com.sun.jersey.spi.container.servlet.ServletContainer;
+import org.apache.hadoop.HadoopIllegalArgumentException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.ConfServlet;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configuration.IntegerRanges;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.jmx.JMXJsonServlet;
+import org.apache.hadoop.log.LogLevel;
+import org.apache.hadoop.security.AuthenticationFilterInitializer;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
+import org.apache.hadoop.security.authorize.AccessControlList;
+import org.apache.hadoop.security.ssl.SSLFactory;
+import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.util.StringUtils;
+import org.eclipse.jetty.http.HttpVersion;
+import org.eclipse.jetty.server.ConnectionFactory;
+import org.eclipse.jetty.server.Connector;
+import org.eclipse.jetty.server.Handler;
+import org.eclipse.jetty.server.HttpConfiguration;
+import org.eclipse.jetty.server.HttpConnectionFactory;
+import org.eclipse.jetty.server.RequestLog;
+import org.eclipse.jetty.server.SecureRequestCustomizer;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.SslConnectionFactory;
+import org.eclipse.jetty.server.handler.ContextHandlerCollection;
+import org.eclipse.jetty.server.handler.HandlerCollection;
+import org.eclipse.jetty.server.handler.RequestLogHandler;
+import org.eclipse.jetty.server.session.SessionHandler;
+import org.eclipse.jetty.servlet.DefaultServlet;
+import org.eclipse.jetty.servlet.FilterHolder;
+import org.eclipse.jetty.servlet.FilterMapping;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.eclipse.jetty.servlet.ServletMapping;
+import org.eclipse.jetty.util.ArrayUtil;
+import org.eclipse.jetty.util.MultiException;
+import org.eclipse.jetty.util.ssl.SslContextFactory;
+import org.eclipse.jetty.util.thread.QueuedThreadPool;
+import org.eclipse.jetty.webapp.WebAppContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Create a Jetty embedded server to answer http requests. The primary goal is
+ * to serve up status information for the server. There are three contexts:
+ * "/logs/" -> points to the log directory "/static/" -> points to common static
+ * files (src/webapps/static) "/" -> the jsp server code from
+ * (src/webapps/<name>)
+ *
+ * This class is a fork of the old HttpServer. HttpServer exists for
+ * compatibility reasons. See HBASE-10336 for more details.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public final class HttpServer2 implements FilterContainer {
+  public static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class);
+
+  private static final String HTTP_SCHEME = "http";
+  private static final String HTTPS_SCHEME = "https";
+
+  private static final String HTTP_MAX_REQUEST_HEADER_SIZE_KEY =
+      "hadoop.http.max.request.header.size";
+  private static final int HTTP_MAX_REQUEST_HEADER_SIZE_DEFAULT = 65536;
+  private static final String HTTP_MAX_RESPONSE_HEADER_SIZE_KEY =
+      "hadoop.http.max.response.header.size";
+  private static final int HTTP_MAX_RESPONSE_HEADER_SIZE_DEFAULT = 65536;
+
+  private static final String HTTP_SOCKET_BACKLOG_SIZE_KEY =
+      "hadoop.http.socket.backlog.size";
+  private static final int HTTP_SOCKET_BACKLOG_SIZE_DEFAULT = 128;
+  private static final String HTTP_MAX_THREADS_KEY = "hadoop.http.max.threads";
+  private static final String HTTP_ACCEPTOR_COUNT_KEY =
+      "hadoop.http.acceptor.count";
+  // -1 to use default behavior of setting count based on CPU core count
+  private static final int HTTP_ACCEPTOR_COUNT_DEFAULT = -1;
+  private static final String HTTP_SELECTOR_COUNT_KEY =
+      "hadoop.http.selector.count";
+  // -1 to use default behavior of setting count based on CPU core count
+  private static final int HTTP_SELECTOR_COUNT_DEFAULT = -1;
+  // idle timeout in milliseconds
+  private static final String HTTP_IDLE_TIMEOUT_MS_KEY =
+      "hadoop.http.idle_timeout.ms";
+  private static final int HTTP_IDLE_TIMEOUT_MS_DEFAULT = 10000;
+  private static final String HTTP_TEMP_DIR_KEY = "hadoop.http.temp.dir";
+
+  private static final String FILTER_INITIALIZER_PROPERTY
+      = "ozone.http.filter.initializers";
+
+  // The ServletContext attribute where the daemon Configuration
+  // gets stored.
+  private static final String CONF_CONTEXT_ATTRIBUTE = "hadoop.conf";
+  private static final String ADMINS_ACL = "admins.acl";
+  private static final String SPNEGO_FILTER = "SpnegoFilter";
+  private static final String NO_CACHE_FILTER = "NoCacheFilter";
+
+  private static final String BIND_ADDRESS = "bind.address";
+
+  private final AccessControlList adminsAcl;
+
+  private final Server webServer;
+
+  private final HandlerCollection handlers;
+
+  private final List<ServerConnector> listeners = Lists.newArrayList();
+
+  private final WebAppContext webAppContext;
+  private final boolean findPort;
+  private final IntegerRanges portRanges;
+  private final Map<ServletContextHandler, Boolean> defaultContexts =
+      new HashMap<>();
+  private final List<String> filterNames = new ArrayList<>();
+  static final String STATE_DESCRIPTION_ALIVE = " - alive";
+  static final String STATE_DESCRIPTION_NOT_LIVE = " - not live";
+  private final SignerSecretProvider secretProvider;
+  private XFrameOption xFrameOption;
+  private boolean xFrameOptionIsEnabled;
+  public static final String HTTP_HEADER_PREFIX = "hadoop.http.header.";
+  private static final String HTTP_HEADER_REGEX =
+      "hadoop\\.http\\.header\\.([a-zA-Z\\-_]+)";
+  static final String X_XSS_PROTECTION =
+      "X-XSS-Protection:1; mode=block";
+  static final String X_CONTENT_TYPE_OPTIONS =
+      "X-Content-Type-Options:nosniff";
+  private static final String X_FRAME_OPTIONS = "X-FRAME-OPTIONS";
+  private static final Pattern PATTERN_HTTP_HEADER_REGEX =
+      Pattern.compile(HTTP_HEADER_REGEX);
+  /**
+   * Class to construct instances of HTTP server with specific options.
+   */
+  public static class Builder {
+    private ArrayList<URI> endpoints = Lists.newArrayList();
+    private String name;
+    private Configuration conf;
+    private Configuration sslConf;
+    private String[] pathSpecs;
+    private AccessControlList adminsAcl;
+    private boolean securityEnabled = false;
+    private String usernameConfKey;
+    private String keytabConfKey;
+    private boolean needsClientAuth;
+    private String trustStore;
+    private String trustStorePassword;
+    private String trustStoreType;
+
+    private String keyStore;
+    private String keyStorePassword;
+    private String keyStoreType;
+
+    // The -keypass option in keytool
+    private String keyPassword;
+
+    private boolean findPort;
+    private IntegerRanges portRanges = null;
+
+    private String hostName;
+    private boolean disallowFallbackToRandomSignerSecretProvider;
+    private String authFilterConfigurationPrefix =
+        "hadoop.http.authentication.";
+    private String excludeCiphers;
+
+    private boolean xFrameEnabled;
+    private XFrameOption xFrameOption = XFrameOption.SAMEORIGIN;
+
+    public Builder setName(String serverName) {
+      this.name = serverName;
+      return this;
+    }
+
+    /**
+     * Add an endpoint that the HTTP server should listen to.
+     *
+     * @param endpoint
+     *          the endpoint of that the HTTP server should listen to. The
+     *          scheme specifies the protocol (i.e. HTTP / HTTPS), the host
+     *          specifies the binding address, and the port specifies the
+     *          listening port. Unspecified or zero port means that the server
+     *          can listen to any port.
+     */
+    public Builder addEndpoint(URI endpoint) {
+      endpoints.add(endpoint);
+      return this;
+    }
+
+    /**
+     * Set the hostname of the http server. The host name is used to resolve the
+     * _HOST field in Kerberos principals. The hostname of the first listener
+     * will be used if the name is unspecified.
+     */
+    public Builder hostName(String host) {
+      this.hostName = host;
+      return this;
+    }
+
+    public Builder trustStore(String location, String password, String type) {
+      this.trustStore = location;
+      this.trustStorePassword = password;
+      this.trustStoreType = type;
+      return this;
+    }
+
+    public Builder keyStore(String location, String password, String type) {
+      this.keyStore = location;
+      this.keyStorePassword = password;
+      this.keyStoreType = type;
+      return this;
+    }
+
+    public Builder keyPassword(String password) {
+      this.keyPassword = password;
+      return this;
+    }
+
+    /**
+     * Specify whether the server should authorize the client in SSL
+     * connections.
+     */
+    public Builder needsClientAuth(boolean value) {
+      this.needsClientAuth = value;
+      return this;
+    }
+
+    public Builder setFindPort(boolean portFind) {
+      this.findPort = portFind;
+      return this;
+    }
+
+    public Builder setPortRanges(IntegerRanges ranges) {
+      this.portRanges = ranges;
+      return this;
+    }
+
+    public Builder setConf(Configuration configuration) {
+      this.conf = configuration;
+      return this;
+    }
+
+    /**
+     * Specify the SSL configuration to load. This API provides an alternative
+     * to keyStore/keyPassword/trustStore.
+     */
+    public Builder setSSLConf(Configuration sslCnf) {
+      this.sslConf = sslCnf;
+      return this;
+    }
+
+    public Builder setPathSpec(String[] pathSpec) {
+      this.pathSpecs = pathSpec.clone();
+      return this;
+    }
+
+    public Builder setACL(AccessControlList acl) {
+      this.adminsAcl = acl;
+      return this;
+    }
+
+    public Builder setSecurityEnabled(boolean enabled) {
+      this.securityEnabled = enabled;
+      return this;
+    }
+
+    public Builder setUsernameConfKey(String confKey) {
+      this.usernameConfKey = confKey;
+      return this;
+    }
+
+    public Builder setKeytabConfKey(String confKey) {
+      this.keytabConfKey = confKey;
+      return this;
+    }
+
+    public Builder disallowFallbackToRandomSingerSecretProvider(boolean value) {
+      this.disallowFallbackToRandomSignerSecretProvider = value;
+      return this;
+    }
+
+    public Builder authFilterConfigurationPrefix(String value) {
+      this.authFilterConfigurationPrefix = value;
+      return this;
+    }
+
+    public Builder excludeCiphers(String pExcludeCiphers) {
+      this.excludeCiphers = pExcludeCiphers;
+      return this;
+    }
+
+    /**
+     * Adds the ability to control X_FRAME_OPTIONS on HttpServer2.
+     * @param enabled - True enables X_FRAME_OPTIONS false disables it.
+     * @return Builder.
+     */
+    public Builder configureXFrame(boolean enabled) {
+      this.xFrameEnabled = enabled;
+      return this;
+    }
+
+    /**
+     * Sets a valid X-Frame-option that can be used by HttpServer2.
+     * @param option - String DENY, SAMEORIGIN or ALLOW-FROM are the only valid
+     *               options. Any other value will throw IllegalArgument
+     *               Exception.
+     * @return Builder.
+     */
+    public Builder setXFrameOption(String option) {
+      this.xFrameOption = XFrameOption.getEnum(option);
+      return this;
+    }
+
+    /**
+     * A wrapper of {@link Configuration#getPassword(String)}. It returns
+     * <code>String</code> instead of <code>char[]</code>.
+     *
+     * @param conf the configuration
+     * @param name the property name
+     * @return the password string or null
+     */
+    private static String getPasswordString(Configuration conf, String name)
+        throws IOException {
+      char[] passchars = conf.getPassword(name);
+      if (passchars == null) {
+        return null;
+      }
+      return new String(passchars);
+    }
+
+    /**
+     * Load SSL properties from the SSL configuration.
+     */
+    private void loadSSLConfiguration() throws IOException {
+      if (sslConf == null) {
+        return;
+      }
+      needsClientAuth = sslConf.getBoolean(
+          SSLFactory.SSL_SERVER_NEED_CLIENT_AUTH,
+          SSLFactory.SSL_SERVER_NEED_CLIENT_AUTH_DEFAULT);
+      keyStore = sslConf.getTrimmed(SSLFactory.SSL_SERVER_KEYSTORE_LOCATION);
+      if (keyStore == null || keyStore.isEmpty()) {
+        throw new IOException(String.format("Property %s not specified",
+            SSLFactory.SSL_SERVER_KEYSTORE_LOCATION));
+      }
+      keyStorePassword = getPasswordString(sslConf,
+          SSLFactory.SSL_SERVER_KEYSTORE_PASSWORD);
+      if (keyStorePassword == null) {
+        throw new IOException(String.format("Property %s not specified",
+            SSLFactory.SSL_SERVER_KEYSTORE_PASSWORD));
+      }
+      keyStoreType = sslConf.get(SSLFactory.SSL_SERVER_KEYSTORE_TYPE,
+          SSLFactory.SSL_SERVER_KEYSTORE_TYPE_DEFAULT);
+      keyPassword = getPasswordString(sslConf,
+          SSLFactory.SSL_SERVER_KEYSTORE_KEYPASSWORD);
+      trustStore = sslConf.get(SSLFactory.SSL_SERVER_TRUSTSTORE_LOCATION);
+      trustStorePassword = getPasswordString(sslConf,
+          SSLFactory.SSL_SERVER_TRUSTSTORE_PASSWORD);
+      trustStoreType = sslConf.get(SSLFactory.SSL_SERVER_TRUSTSTORE_TYPE,
+          SSLFactory.SSL_SERVER_TRUSTSTORE_TYPE_DEFAULT);
+      excludeCiphers = sslConf.get(SSLFactory.SSL_SERVER_EXCLUDE_CIPHER_LIST);
+    }
+
+    public HttpServer2 build() throws IOException {
+      Preconditions.checkNotNull(name, "name is not set");
+      Preconditions.checkState(!endpoints.isEmpty(), "No endpoints specified");
+
+      if (hostName == null) {
+        hostName = endpoints.get(0).getHost();
+      }
+
+      if (this.conf == null) {
+        conf = new Configuration();
+      }
+
+      HttpServer2 server = new HttpServer2(this);
+
+      if (this.securityEnabled) {
+        server.initSpnego(conf, hostName, usernameConfKey, keytabConfKey);
+      }
+
+      for (URI ep : endpoints) {
+        if (HTTPS_SCHEME.equals(ep.getScheme())) {
+          loadSSLConfiguration();
+          break;
+        }
+      }
+
+      int requestHeaderSize = conf.getInt(
+          HTTP_MAX_REQUEST_HEADER_SIZE_KEY,
+          HTTP_MAX_REQUEST_HEADER_SIZE_DEFAULT);
+      int responseHeaderSize = conf.getInt(
+          HTTP_MAX_RESPONSE_HEADER_SIZE_KEY,
+          HTTP_MAX_RESPONSE_HEADER_SIZE_DEFAULT);
+      int idleTimeout = conf.getInt(HTTP_IDLE_TIMEOUT_MS_KEY,
+          HTTP_IDLE_TIMEOUT_MS_DEFAULT);
+
+      HttpConfiguration httpConfig = new HttpConfiguration();
+      httpConfig.setRequestHeaderSize(requestHeaderSize);
+      httpConfig.setResponseHeaderSize(responseHeaderSize);
+      httpConfig.setSendServerVersion(false);
+
+      int backlogSize = conf.getInt(HTTP_SOCKET_BACKLOG_SIZE_KEY,
+          HTTP_SOCKET_BACKLOG_SIZE_DEFAULT);
+
+      for (URI ep : endpoints) {
+        final ServerConnector connector;
+        String scheme = ep.getScheme();
+        if (HTTP_SCHEME.equals(scheme)) {
+          connector = createHttpChannelConnector(server.webServer,
+              httpConfig);
+        } else if (HTTPS_SCHEME.equals(scheme)) {
+          connector = createHttpsChannelConnector(server.webServer,
+              httpConfig);
+        } else {
+          throw new HadoopIllegalArgumentException(
+              "unknown scheme for endpoint:" + ep);
+        }
+        connector.setHost(ep.getHost());
+        connector.setPort(ep.getPort() == -1 ? 0 : ep.getPort());
+        connector.setAcceptQueueSize(backlogSize);
+        connector.setIdleTimeout(idleTimeout);
+        server.addListener(connector);
+      }
+      server.loadListeners();
+      return server;
+    }
+
+    private ServerConnector createHttpChannelConnector(
+        Server server, HttpConfiguration httpConfig) {
+      ServerConnector conn = new ServerConnector(server,
+          conf.getInt(HTTP_ACCEPTOR_COUNT_KEY, HTTP_ACCEPTOR_COUNT_DEFAULT),
+          conf.getInt(HTTP_SELECTOR_COUNT_KEY, HTTP_SELECTOR_COUNT_DEFAULT));
+      ConnectionFactory connFactory = new HttpConnectionFactory(httpConfig);
+      conn.addConnectionFactory(connFactory);
+      if (Shell.WINDOWS) {
+        // result of setting the SO_REUSEADDR flag is different on Windows
+        // http://msdn.microsoft.com/en-us/library/ms740621(v=vs.85).aspx
+        // without this 2 NN's can start on the same machine and listen on
+        // the same port with indeterminate routing of incoming requests to them
+        conn.setReuseAddress(false);
+      }
+      return conn;
+    }
+
+    private ServerConnector createHttpsChannelConnector(
+        Server server, HttpConfiguration httpConfig) {
+      httpConfig.setSecureScheme(HTTPS_SCHEME);
+      httpConfig.addCustomizer(new SecureRequestCustomizer());
+      ServerConnector conn = createHttpChannelConnector(server, httpConfig);
+
+      SslContextFactory.Server sslContextFactory =
+          new SslContextFactory.Server();
+      sslContextFactory.setNeedClientAuth(needsClientAuth);
+      if (keyPassword != null) {
+        sslContextFactory.setKeyManagerPassword(keyPassword);
+      }
+      if (keyStore != null) {
+        sslContextFactory.setKeyStorePath(keyStore);
+        sslContextFactory.setKeyStoreType(keyStoreType);
+        if (keyStorePassword != null) {
+          sslContextFactory.setKeyStorePassword(keyStorePassword);
+        }
+      }
+      if (trustStore != null) {
+        sslContextFactory.setTrustStorePath(trustStore);
+        sslContextFactory.setTrustStoreType(trustStoreType);
+        if (trustStorePassword != null) {
+          sslContextFactory.setTrustStorePassword(trustStorePassword);
+        }
+      }
+      if (null != excludeCiphers && !excludeCiphers.isEmpty()) {
+        sslContextFactory.setExcludeCipherSuites(
+            StringUtils.getTrimmedStrings(excludeCiphers));
+        LOG.info("Excluded Cipher List:" + excludeCiphers);
+      }
+
+      conn.addFirstConnectionFactory(new SslConnectionFactory(sslContextFactory,
+          HttpVersion.HTTP_1_1.asString()));
+
+      return conn;
+    }
+  }
+
+  private HttpServer2(final Builder b) throws IOException {
+    final String appDir = getWebAppsPath(b.name);
+    this.webServer = new Server();
+    this.adminsAcl = b.adminsAcl;
+    this.handlers = new HandlerCollection();
+    this.webAppContext = createWebAppContext(b, adminsAcl, appDir);
+    this.xFrameOptionIsEnabled = b.xFrameEnabled;
+    this.xFrameOption = b.xFrameOption;
+
+    try {
+      this.secretProvider =
+          constructSecretProvider(b, webAppContext.getServletContext());
+      this.webAppContext.getServletContext().setAttribute(
+          AuthenticationFilter.SIGNER_SECRET_PROVIDER_ATTRIBUTE,
+              secretProvider);
+    } catch (IOException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new IOException(e);
+    }
+
+    this.findPort = b.findPort;
+    this.portRanges = b.portRanges;
+    initializeWebServer(b.name, b.hostName, b.conf, b.pathSpecs);
+  }
+
+  private void initializeWebServer(String name, String hostName,
+      Configuration conf, String[] pathSpecs)
+      throws IOException {
+
+    Preconditions.checkNotNull(webAppContext);
+
+    int maxThreads = conf.getInt(HTTP_MAX_THREADS_KEY, -1);
+    // If HTTP_MAX_THREADS is not configured, QueueThreadPool() will use the
+    // default value (currently 250).
+
+    QueuedThreadPool threadPool = (QueuedThreadPool) webServer.getThreadPool();
+    threadPool.setDaemon(true);
+    if (maxThreads != -1) {
+      threadPool.setMaxThreads(maxThreads);
+    }
+
+    SessionHandler handler = webAppContext.getSessionHandler();
+    handler.setHttpOnly(true);
+    handler.getSessionCookieConfig().setSecure(true);
+
+    ContextHandlerCollection contexts = new ContextHandlerCollection();
+    RequestLog requestLog = HttpRequestLog.getRequestLog(name);
+
+    handlers.addHandler(contexts);
+    if (requestLog != null) {
+      RequestLogHandler requestLogHandler = new RequestLogHandler();
+      requestLogHandler.setRequestLog(requestLog);
+      handlers.addHandler(requestLogHandler);
+    }
+    handlers.addHandler(webAppContext);
+    final String appDir = getWebAppsPath(name);
+    addDefaultApps(contexts, appDir, conf);
+    webServer.setHandler(handlers);
+
+    Map<String, String> xFrameParams = setHeaders(conf);
+    addGlobalFilter("safety", QuotingInputFilter.class.getName(), xFrameParams);
+    final FilterInitializer[] initializers = getFilterInitializers(conf);
+    if (initializers != null) {
+      conf = new Configuration(conf);
+      conf.set(BIND_ADDRESS, hostName);
+      for (FilterInitializer c : initializers) {
+        c.initFilter(this, conf);
+      }
+    }
+
+    addDefaultServlets();
+
+    if (pathSpecs != null) {
+      for (String path : pathSpecs) {
+        LOG.info("adding path spec: " + path);
+        addFilterPathMapping(path, webAppContext);
+      }
+    }
+  }
+
+  private void addListener(ServerConnector connector) {
+    listeners.add(connector);
+  }
+
+  private static WebAppContext createWebAppContext(Builder b,
+      AccessControlList adminsAcl, final String appDir) {
+    WebAppContext ctx = new WebAppContext();
+    ctx.setDefaultsDescriptor(null);
+    ServletHolder holder = new ServletHolder(new DefaultServlet());
+    Map<String, String> params = ImmutableMap.<String, String>builder()
+        .put("acceptRanges", "true")
+        .put("dirAllowed", "false")
+        .put("gzip", "true")
+        .put("useFileMappedBuffer", "true")
+        .build();
+    holder.setInitParameters(params);
+    ctx.setWelcomeFiles(new String[] {"index.html"});
+    ctx.addServlet(holder, "/");
+    ctx.setDisplayName(b.name);
+    ctx.setContextPath("/");
+    ctx.setWar(appDir + "/" + b.name);
+    String tempDirectory = b.conf.get(HTTP_TEMP_DIR_KEY);
+    if (tempDirectory != null && !tempDirectory.isEmpty()) {
+      ctx.setTempDirectory(new File(tempDirectory));
+      ctx.setAttribute("javax.servlet.context.tempdir", tempDirectory);
+    }
+    ctx.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, b.conf);
+    ctx.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
+    addNoCacheFilter(ctx);
+    return ctx;
+  }
+
+  private static SignerSecretProvider constructSecretProvider(final Builder b,
+      ServletContext ctx)
+      throws Exception {
+    final Configuration conf = b.conf;
+    Properties config = getFilterProperties(conf,
+        b.authFilterConfigurationPrefix);
+    return AuthenticationFilter.constructSecretProvider(
+        ctx, config, b.disallowFallbackToRandomSignerSecretProvider);
+  }
+
+  private static Properties getFilterProperties(Configuration conf, String
+      prefix) {
+    Properties prop = new Properties();
+    Map<String, String> filterConfig = AuthenticationFilterInitializer
+        .getFilterConfigMap(conf, prefix);
+    prop.putAll(filterConfig);
+    return prop;
+  }
+
+  private static void addNoCacheFilter(ServletContextHandler ctxt) {
+    defineFilter(ctxt, NO_CACHE_FILTER, NoCacheFilter.class.getName(),
+        Collections.<String, String>emptyMap(), new String[] {"/*"});
+  }
+
+  /**
+   * Get an array of FilterConfiguration specified in the conf.
+   */
+  private static FilterInitializer[] getFilterInitializers(Configuration conf) {
+    if (conf == null) {
+      return null;
+    }
+
+    Class<?>[] classes =
+        conf.getClasses(FILTER_INITIALIZER_PROPERTY, StaticUserWebFilter.class);
+    if (classes == null) {
+      return null;
+    }
+
+    FilterInitializer[] initializers = new FilterInitializer[classes.length];
+    for (int i = 0; i < classes.length; i++) {
+      initializers[i] = (FilterInitializer) ReflectionUtils.newInstance(
+          classes[i], conf);
+    }
+    return initializers;
+  }
+
+  /**
+   * Add default apps.
+   * @param appDir The application directory
+   * @throws IOException
+   */
+  protected void addDefaultApps(ContextHandlerCollection parent,
+      final String appDir, Configuration conf) throws IOException {
+    // set up the context for "/logs/" if "hadoop.log.dir" property is defined
+    // and it's enabled.
+    String logDir = System.getProperty("hadoop.log.dir");
+    boolean logsEnabled = conf.getBoolean(
+        CommonConfigurationKeys.HADOOP_HTTP_LOGS_ENABLED,
+        CommonConfigurationKeys.HADOOP_HTTP_LOGS_ENABLED_DEFAULT);
+    if (logDir != null && logsEnabled) {
+      ServletContextHandler logContext =
+          new ServletContextHandler(parent, "/logs");
+      logContext.setResourceBase(logDir);
+      logContext.addServlet(AdminAuthorizedServlet.class, "/*");
+      if (conf.getBoolean(
+          CommonConfigurationKeys.HADOOP_JETTY_LOGS_SERVE_ALIASES,
+          CommonConfigurationKeys.DEFAULT_HADOOP_JETTY_LOGS_SERVE_ALIASES)) {
+        @SuppressWarnings("unchecked")
+        Map<String, String> params = logContext.getInitParams();
+        params.put("org.eclipse.jetty.servlet.Default.aliases", "true");
+      }
+      logContext.setDisplayName("logs");
+      SessionHandler handler = new SessionHandler();
+      handler.setHttpOnly(true);
+      handler.getSessionCookieConfig().setSecure(true);
+      logContext.setSessionHandler(handler);
+      setContextAttributes(logContext, conf);
+      addNoCacheFilter(logContext);
+      defaultContexts.put(logContext, true);
+    }
+    // set up the context for "/static/*"
+    ServletContextHandler staticContext =
+        new ServletContextHandler(parent, "/static");
+    staticContext.setResourceBase(appDir + "/static");
+    staticContext.addServlet(DefaultServlet.class, "/*");
+    staticContext.setDisplayName("static");
+    @SuppressWarnings("unchecked")
+    Map<String, String> params = staticContext.getInitParams();
+    params.put("org.eclipse.jetty.servlet.Default.dirAllowed", "false");
+    params.put("org.eclipse.jetty.servlet.Default.gzip", "true");
+    SessionHandler handler = new SessionHandler();
+    handler.setHttpOnly(true);
+    handler.getSessionCookieConfig().setSecure(true);
+    staticContext.setSessionHandler(handler);
+    setContextAttributes(staticContext, conf);
+    defaultContexts.put(staticContext, true);
+  }
+
+  private void setContextAttributes(ServletContextHandler context,
+      Configuration conf) {
+    context.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
+    context.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
+  }
+
+  /**
+   * Add default servlets.
+   */
+  protected void addDefaultServlets() {
+    // set up default servlets
+    addServlet("stacks", "/stacks", StackServlet.class);
+    addServlet("logLevel", "/logLevel", LogLevel.Servlet.class);
+    addServlet("jmx", "/jmx", JMXJsonServlet.class);
+    addServlet("conf", "/conf", ConfServlet.class);
+  }
+
+  public void addContext(ServletContextHandler ctxt, boolean isFiltered) {
+    handlers.addHandler(ctxt);
+    addNoCacheFilter(ctxt);
+    defaultContexts.put(ctxt, isFiltered);
+  }
+
+  /**
+   * Set a value in the webapp context. These values are available to the jsp
+   * pages as "application.getAttribute(name)".
+   * @param name The name of the attribute
+   * @param value The value of the attribute
+   */
+  public void setAttribute(String name, Object value) {
+    webAppContext.setAttribute(name, value);
+  }
+
+  /**
+   * Add a Jersey resource package.
+   * @param packageName The Java package name containing the Jersey resource.
+   * @param pathSpec The path spec for the servlet
+   */
+  public void addJerseyResourcePackage(final String packageName,
+      final String pathSpec) {
+    LOG.info("addJerseyResourcePackage: packageName=" + packageName
+        + ", pathSpec=" + pathSpec);
+    final ServletHolder sh = new ServletHolder(ServletContainer.class);
+    sh.setInitParameter("com.sun.jersey.config.property.resourceConfigClass",
+        "com.sun.jersey.api.core.PackagesResourceConfig");
+    sh.setInitParameter("com.sun.jersey.config.property.packages", packageName);
+    webAppContext.addServlet(sh, pathSpec);
+  }
+
+  /**
+   * Add a servlet in the server.
+   * @param name The name of the servlet (can be passed as null)
+   * @param pathSpec The path spec for the servlet
+   * @param clazz The servlet class
+   */
+  public void addServlet(String name, String pathSpec,
+      Class<? extends HttpServlet> clazz) {
+    addInternalServlet(name, pathSpec, clazz, false);
+    addFilterPathMapping(pathSpec, webAppContext);
+  }
+
+  /**
+   * Add an internal servlet in the server.
+   * Note: This method is to be used for adding servlets that facilitate
+   * internal communication and not for user facing functionality. For
+   * servlets added using this method, filters are not enabled.
+   *
+   * @param name The name of the servlet (can be passed as null)
+   * @param pathSpec The path spec for the servlet
+   * @param clazz The servlet class
+   */
+  public void addInternalServlet(String name, String pathSpec,
+      Class<? extends HttpServlet> clazz) {
+    addInternalServlet(name, pathSpec, clazz, false);
+  }
+
+  /**
+   * Add an internal servlet in the server, specifying whether or not to
+   * protect with Kerberos authentication.
+   * Note: This method is to be used for adding servlets that facilitate
+   * internal communication and not for user facing functionality. For
+   * servlets added using this method, filters (except internal Kerberos
+   * filters) are not enabled.
+   *
+   * @param name The name of the servlet (can be passed as null)
+   * @param pathSpec The path spec for the servlet
+   * @param clazz The servlet class
+   * @param requireAuth Require Kerberos authenticate to access servlet
+   */
+  public void addInternalServlet(String name, String pathSpec,
+      Class<? extends HttpServlet> clazz, boolean requireAuth) {
+    ServletHolder holder = new ServletHolder(clazz);
+    if (name != null) {
+      holder.setName(name);
+    }
+    // Jetty doesn't like the same path spec mapping to different servlets, so
+    // if there's already a mapping for this pathSpec, remove it and assume that
+    // the newest one is the one we want
+    final ServletMapping[] servletMappings =
+        webAppContext.getServletHandler().getServletMappings();
+    for (int i = 0; i < servletMappings.length; i++) {
+      if (servletMappings[i].containsPathSpec(pathSpec)) {
+        if (LOG.isDebugEnabled()) {
+          LOG.debug("Found existing " + servletMappings[i].getServletName() +
+              " servlet at path " + pathSpec + "; will replace mapping" +
+              " with " + holder.getName() + " servlet");
+        }
+        ServletMapping[] newServletMappings =
+            ArrayUtil.removeFromArray(servletMappings, servletMappings[i]);
+        webAppContext.getServletHandler()
+            .setServletMappings(newServletMappings);
+        break;
+      }
+    }
+    webAppContext.addServlet(holder, pathSpec);
+
+    if (requireAuth && UserGroupInformation.isSecurityEnabled()) {
+      LOG.info("Adding Kerberos (SPNEGO) filter to " + name);
+      ServletHandler handler = webAppContext.getServletHandler();
+      FilterMapping fmap = new FilterMapping();
+      fmap.setPathSpec(pathSpec);
+      fmap.setFilterName(SPNEGO_FILTER);
+      fmap.setDispatches(FilterMapping.ALL);
+      handler.addFilterMapping(fmap);
+    }
+  }
+
+  /**
+   * Add an internal servlet in the server, with initialization parameters.
+   * Note: This method is to be used for adding servlets that facilitate
+   * internal communication and not for user facing functionality. For
+   * servlets added using this method, filters (except internal Kerberos
+   * filters) are not enabled.
+   *
+   * @param name The name of the servlet (can be passed as null)
+   * @param pathSpec The path spec for the servlet
+   * @param clazz The servlet class
+   * @param params init parameters
+   */
+  public void addInternalServlet(String name, String pathSpec,
+      Class<? extends HttpServlet> clazz, Map<String, String> params) {
+    // Jetty doesn't like the same path spec mapping to different servlets, so
+    // if there's already a mapping for this pathSpec, remove it and assume that
+    // the newest one is the one we want
+    final ServletHolder sh = new ServletHolder(clazz);
+    sh.setName(name);
+    sh.setInitParameters(params);
+    final ServletMapping[] servletMappings =
+        webAppContext.getServletHandler().getServletMappings();
+    for (int i = 0; i < servletMappings.length; i++) {
+      if (servletMappings[i].containsPathSpec(pathSpec)) {
+        if (LOG.isDebugEnabled()) {
+          LOG.debug("Found existing " + servletMappings[i].getServletName() +
+              " servlet at path " + pathSpec + "; will replace mapping" +
+              " with " + sh.getName() + " servlet");
+        }
+        ServletMapping[] newServletMappings =
+            ArrayUtil.removeFromArray(servletMappings, servletMappings[i]);
+        webAppContext.getServletHandler()
+            .setServletMappings(newServletMappings);
+        break;
+      }
+    }
+    webAppContext.addServlet(sh, pathSpec);
+  }
+
+  /**
+   * Add the given handler to the front of the list of handlers.
+   *
+   * @param handler The handler to add
+   */
+  public void addHandlerAtFront(Handler handler) {
+    Handler[] h = ArrayUtil.prependToArray(
+        handler, this.handlers.getHandlers(), Handler.class);
+    handlers.setHandlers(h);
+  }
+
+  /**
+   * Add the given handler to the end of the list of handlers.
+   *
+   * @param handler The handler to add
+   */
+  public void addHandlerAtEnd(Handler handler) {
+    handlers.addHandler(handler);
+  }
+
+  @Override
+  public void addFilter(String name, String classname,
+      Map<String, String> parameters) {
+
+    FilterHolder filterHolder = getFilterHolder(name, classname, parameters);
+    FilterMapping fmap =
+        getFilterMapping(name, new String[] {"*.html", "*.jsp"});
+    defineFilter(webAppContext, filterHolder, fmap);
+    LOG.info(
+        "Added filter " + name + " (class=" + classname + ") to context "
+            + webAppContext.getDisplayName());
+    fmap = getFilterMapping(name, new String[] {"/*"});
+    for (Map.Entry<ServletContextHandler, Boolean> e
+        : defaultContexts.entrySet()) {
+      if (e.getValue()) {
+        ServletContextHandler ctx = e.getKey();
+        defineFilter(ctx, filterHolder, fmap);
+        LOG.info("Added filter " + name + " (class=" + classname
+            + ") to context " + ctx.getDisplayName());
+      }
+    }
+    filterNames.add(name);
+  }
+
+  @Override
+  public void addGlobalFilter(String name, String classname,
+      Map<String, String> parameters) {
+    FilterHolder filterHolder = getFilterHolder(name, classname, parameters);
+    FilterMapping fmap = getFilterMapping(name, new String[] {"/*"});
+    defineFilter(webAppContext, filterHolder, fmap);
+    for (ServletContextHandler ctx : defaultContexts.keySet()) {
+      defineFilter(ctx, filterHolder, fmap);
+    }
+    LOG.info("Added global filter '" + name + "' (class=" + classname + ")");
+  }
+
+  /**
+   * Define a filter for a context and set up default url mappings.
+   */
+  private static void defineFilter(ServletContextHandler ctx, String name,
+      String classname, Map<String, String> parameters, String[] urls) {
+    FilterHolder filterHolder = getFilterHolder(name, classname, parameters);
+    FilterMapping fmap = getFilterMapping(name, urls);
+    defineFilter(ctx, filterHolder, fmap);
+  }
+
+  /**
+   * Define a filter for a context and set up default url mappings.
+   */
+  private static void defineFilter(ServletContextHandler ctx,
+      FilterHolder holder, FilterMapping fmap) {
+    ServletHandler handler = ctx.getServletHandler();
+    handler.addFilter(holder, fmap);
+  }
+
+  private static FilterMapping getFilterMapping(String name, String[] urls) {
+    FilterMapping fmap = new FilterMapping();
+    fmap.setPathSpecs(urls);
+    fmap.setDispatches(FilterMapping.ALL);
+    fmap.setFilterName(name);
+    return fmap;
+  }
+
+  private static FilterHolder getFilterHolder(String name, String classname,
+      Map<String, String> parameters) {
+    FilterHolder holder = new FilterHolder();
+    holder.setName(name);
+    holder.setClassName(classname);
+    if (parameters != null) {
+      holder.setInitParameters(parameters);
+    }
+    return holder;
+  }
+
+  /**
+   * Add the path spec to the filter path mapping.
+   * @param pathSpec The path spec
+   * @param webAppCtx The WebApplicationContext to add to
+   */
+  private void addFilterPathMapping(String pathSpec,
+      ServletContextHandler webAppCtx) {
+    ServletHandler handler = webAppCtx.getServletHandler();
+    for (String name : filterNames) {
+      FilterMapping fmap = new FilterMapping();
+      fmap.setPathSpec(pathSpec);
+      fmap.setFilterName(name);
+      fmap.setDispatches(FilterMapping.ALL);
+      handler.addFilterMapping(fmap);
+    }
+  }
+
+  /**
+   * Get the value in the webapp context.
+   * @param name The name of the attribute
+   * @return The value of the attribute
+   */
+  public Object getAttribute(String name) {
+    return webAppContext.getAttribute(name);
+  }
+
+  WebAppContext getWebAppContext() {
+    return this.webAppContext;
+  }
+
+  /**
+   * Get the pathname to the webapps files.
+   * @param appName eg "secondary" or "datanode"
+   * @return the pathname as a URL
+   * @throws FileNotFoundException if 'webapps' directory cannot be found
+   *   on CLASSPATH or in the development location.
+   */
+  private String getWebAppsPath(String appName) throws FileNotFoundException {
+    URL resourceUrl = null;
+    File webResourceDevLocation = new File("src/main/webapps", appName);
+    if (webResourceDevLocation.exists()) {
+      LOG.info("Web server is in development mode. Resources "
+          + "will be read from the source tree.");
+      try {
+        resourceUrl = webResourceDevLocation.getParentFile().toURI().toURL();
+      } catch (MalformedURLException e) {
+        throw new FileNotFoundException("Mailformed URL while finding the "
+            + "web resource dir:" + e.getMessage());
+      }
+    } else {
+      resourceUrl =
+          getClass().getClassLoader().getResource("webapps/" + appName);
+
+      if (resourceUrl == null) {
+        throw new FileNotFoundException("webapps/" + appName +
+            " not found in CLASSPATH");
+      }
+    }
+    String urlString = resourceUrl.toString();
+    return urlString.substring(0, urlString.lastIndexOf('/'));
+  }
+
+  /**
+   * Get the port that the server is on.
+   *
+   * @return the port
+   */
+  @Deprecated
+  public int getPort() {
+    return ((ServerConnector) webServer.getConnectors()[0]).getLocalPort();
+  }
+
+  /**
+   * Get the address that corresponds to a particular connector.
+   *
+   * @return the corresponding address for the connector, or null if there's no
+   *         such connector or the connector is not bounded or was closed.
+   */
+  public InetSocketAddress getConnectorAddress(int index) {
+    Preconditions.checkArgument(index >= 0);
+    if (index > webServer.getConnectors().length) {
+      return null;
+    }
+
+    ServerConnector c = (ServerConnector) webServer.getConnectors()[index];
+    if (c.getLocalPort() == -1 || c.getLocalPort() == -2) {
+      // The connector is not bounded or was closed
+      return null;
+    }
+
+    return new InetSocketAddress(c.getHost(), c.getLocalPort());
+  }
+
+  /**
+   * Set the min, max number of worker threads (simultaneous connections).
+   */
+  public void setThreads(int min, int max) {
+    QueuedThreadPool pool = (QueuedThreadPool) webServer.getThreadPool();
+    pool.setMinThreads(min);
+    pool.setMaxThreads(max);
+  }
+
+  private void initSpnego(Configuration conf, String hostName,
+      String usernameConfKey, String keytabConfKey) throws IOException {
+    Map<String, String> params = new HashMap<>();
+    String principalInConf = conf.get(usernameConfKey);
+    if (principalInConf != null && !principalInConf.isEmpty()) {
+      params.put("kerberos.principal", SecurityUtil.getServerPrincipal(
+          principalInConf, hostName));
+    }
+    String httpKeytab = conf.get(keytabConfKey);
+    if (httpKeytab != null && !httpKeytab.isEmpty()) {
+      params.put("kerberos.keytab", httpKeytab);
+    }
+    params.put(AuthenticationFilter.AUTH_TYPE, "kerberos");
+    defineFilter(webAppContext, SPNEGO_FILTER,
+        AuthenticationFilter.class.getName(), params, null);
+  }
+
+  /**
+   * Start the server. Does not wait for the server to start.
+   */
+  public void start() throws IOException {
+    try {
+      try {
+        openListeners();
+        webServer.start();
+      } catch (IOException ex) {
+        LOG.info("HttpServer.start() threw a non Bind IOException", ex);
+        throw ex;
+      } catch (MultiException ex) {
+        LOG.info("HttpServer.start() threw a MultiException", ex);
+        throw ex;
+      }
+      // Make sure there is no handler failures.
+      Handler[] hs = webServer.getHandlers();
+      for (Handler handler : hs) {
+        if (handler.isFailed()) {
+          throw new IOException(
+              "Problem in starting http server. Server handlers failed");
+        }
+      }
+      // Make sure there are no errors initializing the context.
+      Throwable unavailableException = webAppContext.getUnavailableException();
+      if (unavailableException != null) {
+        // Have to stop the webserver, or else its non-daemon threads
+        // will hang forever.
+        webServer.stop();
+        throw new IOException("Unable to initialize WebAppContext",
+            unavailableException);
+      }
+    } catch (IOException e) {
+      throw e;
+    } catch (InterruptedException e) {
+      throw (IOException) new InterruptedIOException(
+          "Interrupted while starting HTTP server").initCause(e);
+    } catch (Exception e) {
+      throw new IOException("Problem starting http server", e);
+    }
+  }
+
+  private void loadListeners() {
+    for (Connector c : listeners) {
+      webServer.addConnector(c);
+    }
+  }
+
+  /**
+   * Bind listener by closing and opening the listener.
+   * @param listener
+   * @throws Exception
+   */
+  private static void bindListener(ServerConnector listener) throws Exception {
+    // jetty has a bug where you can't reopen a listener that previously
+    // failed to open w/o issuing a close first, even if the port is changed
+    listener.close();
+    listener.open();
+    LOG.info("Jetty bound to port " + listener.getLocalPort());
+  }
+
+  /**
+   * Create bind exception by wrapping the bind exception thrown.
+   * @param listener
+   * @param ex
+   * @return
+   */
+  private static BindException constructBindException(ServerConnector listener,
+      IOException ex) {
+    BindException be = new BindException("Port in use: "
+        + listener.getHost() + ":" + listener.getPort());
+    if (ex != null) {
+      be.initCause(ex);
+    }
+    return be;
+  }
+
+  /**
+   * Bind using single configured port. If findPort is true, we will try to bind
+   * after incrementing port till a free port is found.
+   * @param listener jetty listener.
+   * @param port port which is set in the listener.
+   * @throws Exception
+   */
+  private void bindForSinglePort(ServerConnector listener, int port)
+      throws Exception {
+    while (true) {
+      try {
+        bindListener(listener);
+        break;
+      } catch (IOException ex) {
+        if (port == 0 || !findPort) {
+          throw constructBindException(listener, ex);
+        }
+      }
+      // try the next port number
+      listener.setPort(++port);
+      Thread.sleep(100);
+    }
+  }
+
+  /**
+   * Bind using port ranges. Keep on looking for a free port in the port range
+   * and throw a bind exception if no port in the configured range binds.
+   * @param listener jetty listener.
+   * @param startPort initial port which is set in the listener.
+   * @throws Exception
+   */
+  private void bindForPortRange(ServerConnector listener, int startPort)
+      throws Exception {
+    IOException ioException = null;
+    try {
+      bindListener(listener);
+      return;
+    } catch (IOException ex) {
+      // Ignore exception.
+      ioException = ex;
+    }
+    for (Integer port : portRanges) {
+      if (port == startPort) {
+        continue;
+      }
+      Thread.sleep(100);
+      listener.setPort(port);
+      try {
+        bindListener(listener);
+        return;
+      } catch (BindException ex) {
+        // Ignore exception. Move to next port.
+        ioException = ex;
+      }
+    }
+    throw constructBindException(listener, ioException);
+  }
+
+  /**
+   * Open the main listener for the server.
+   * @throws Exception
+   */
+  void openListeners() throws Exception {
+    LOG.debug("opening listeners: {}", listeners);
+    for (ServerConnector listener : listeners) {
+      if (listener.getLocalPort() != -1 && listener.getLocalPort() != -2) {
+        // This listener is either started externally or has been bound or was
+        // closed
+        continue;
+      }
+      int port = listener.getPort();
+      if (portRanges != null && port != 0) {
+        bindForPortRange(listener, port);
+      } else {
+        bindForSinglePort(listener, port);
+      }
+    }
+  }
+
+  /**
+   * stop the server.
+   */
+  public void stop() throws Exception {
+    MultiException exception = null;
+    for (ServerConnector c : listeners) {
+      try {
+        c.close();
+      } catch (Exception e) {
+        LOG.error(
+            "Error while stopping listener for webapp"
+                + webAppContext.getDisplayName(), e);
+        exception = addMultiException(exception, e);
+      }
+    }
+
+    try {
+      // explicitly destroy the secret provider
+      secretProvider.destroy();
+      // clear & stop webAppContext attributes to avoid memory leaks.
+      webAppContext.clearAttributes();
+      webAppContext.stop();
+    } catch (Exception e) {
+      LOG.error("Error while stopping web app context for webapp "
+          + webAppContext.getDisplayName(), e);
+      exception = addMultiException(exception, e);
+    }
+
+    try {
+      webServer.stop();
+    } catch (Exception e) {
+      LOG.error("Error while stopping web server for webapp "
+          + webAppContext.getDisplayName(), e);
+      exception = addMultiException(exception, e);
+    }
+
+    if (exception != null) {
+      exception.ifExceptionThrow();
+    }
+
+  }
+
+  private MultiException addMultiException(MultiException exception,
+      Exception e) {
+    if (exception == null) {
+      exception = new MultiException();
+    }
+    exception.add(e);
+    return exception;
+  }
+
+  public void join() throws InterruptedException {
+    webServer.join();
+  }
+
+  /**
+   * Test for the availability of the web server.
+   *
+   * @return true if the web server is started, false otherwise
+   */
+  public boolean isAlive() {
+    return webServer != null && webServer.isStarted();
+  }
+
+  @Override
+  public String toString() {
+    Preconditions.checkState(!listeners.isEmpty());
+    StringBuilder sb = new StringBuilder("HttpServer (")
+        .append(isAlive() ? STATE_DESCRIPTION_ALIVE
+            : STATE_DESCRIPTION_NOT_LIVE)
+        .append("), listening at:");
+    for (ServerConnector l : listeners) {
+      sb.append(l.getHost()).append(":").append(l.getPort()).append("/,");
+    }
+    return sb.toString();
+  }
+
+  /**
+   * Checks the user has privileges to access to instrumentation servlets.
+   * <p/>
+   * If <code>hadoop.security.instrumentation.requires.admin</code> is set to
+   * FALSE
+   * (default value) it always returns TRUE.
+   * <p/>
+   * If <code>hadoop.security.instrumentation.requires.admin</code> is set to
+   * TRUE
+   * it will check that if the current user is in the admin ACLS. If the user is
+   * in the admin ACLs it returns TRUE, otherwise it returns FALSE.
+   *
+   * @param servletContext the servlet context.
+   * @param request the servlet request.
+   * @param response the servlet response.
+   * @return TRUE/FALSE based on the logic decribed above.
+   */
+  public static boolean isInstrumentationAccessAllowed(
+      ServletContext servletContext, HttpServletRequest request,
+      HttpServletResponse response) throws IOException {
+    Configuration conf =
+        (Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE);
+
+    boolean access = true;
+    boolean adminAccess = conf.getBoolean(
+        CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN,
+        false);
+    if (adminAccess) {
+      access = hasAdministratorAccess(servletContext, request, response);
+    }
+    return access;
+  }
+
+  /**
+   * Does the user sending the HttpServletRequest has the administrator ACLs? If
+   * it isn't the case, response will be modified to send an error to the user.
+   *
+   * @param response used to send the error response if user does not have
+   *                 admin access.
+   * @return true if admin-authorized, false otherwise
+   * @throws IOException
+   */
+  public static boolean hasAdministratorAccess(
+      ServletContext servletContext, HttpServletRequest request,
+      HttpServletResponse response) throws IOException {
+    Configuration conf =
+        (Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE);
+    // If there is no authorization, anybody has administrator access.
+    if (!conf.getBoolean(
+        CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false)) {
+      return true;
+    }
+
+    String remoteUser = request.getRemoteUser();
+    if (remoteUser == null) {
+      response.sendError(HttpServletResponse.SC_FORBIDDEN,
+          "Unauthenticated users are not " +
+              "authorized to access this page.");
+      return false;
+    }
+
+    if (servletContext.getAttribute(ADMINS_ACL) != null &&
+        !userHasAdministratorAccess(servletContext, remoteUser)) {
+      response.sendError(HttpServletResponse.SC_FORBIDDEN,
+          "Unauthenticated users are not " +
+              "authorized to access this page.");
+      LOG.warn("User " + remoteUser + " is unauthorized to access the page "
+          + request.getRequestURI() + ".");
+      return false;
+    }
+
+    return true;
+  }
+
+  /**
+   * Get the admin ACLs from the given ServletContext and check if the given
+   * user is in the ACL.
+   *
+   * @param servletContext the context containing the admin ACL.
+   * @param remoteUser the remote user to check for.
+   * @return true if the user is present in the ACL, false if no ACL is set or
+   *         the user is not present
+   */
+  public static boolean userHasAdministratorAccess(
+      ServletContext servletContext,
+      String remoteUser) {
+    AccessControlList adminsAcl = (AccessControlList) servletContext
+        .getAttribute(ADMINS_ACL);
+    UserGroupInformation remoteUserUGI =
+        UserGroupInformation.createRemoteUser(remoteUser);
+    return adminsAcl != null && adminsAcl.isUserAllowed(remoteUserUGI);
+  }
+
+  /**
+   * A very simple servlet to serve up a text representation of the current
+   * stack traces. It both returns the stacks to the caller and logs them.
+   * Currently the stack traces are done sequentially rather than exactly the
+   * same data.
+   */
+  public static class StackServlet extends HttpServlet {
+    private static final long serialVersionUID = -6284183679759467039L;
+
+    @Override
+    public void doGet(HttpServletRequest request, HttpServletResponse response)
+        throws ServletException, IOException {
+      if (!HttpServer2.isInstrumentationAccessAllowed(getServletContext(),
+          request, response)) {
+        return;
+      }
+      response.setContentType("text/plain; charset=UTF-8");
+      try (PrintStream out = new PrintStream(
+          response.getOutputStream(), false, "UTF-8")) {
+        ReflectionUtils.printThreadInfo(out, "");
+      }
+      ReflectionUtils.logThreadInfo(LOG, "jsp requested", 1);
+    }
+  }
+
+  /**
+   * A Servlet input filter that quotes all HTML active characters in the
+   * parameter names and values. The goal is to quote the characters to make
+   * all of the servlets resistant to cross-site scripting attacks. It also
+   * sets X-FRAME-OPTIONS in the header to mitigate clickjacking attacks.
+   */
+  public static class QuotingInputFilter implements Filter {
+
+    private FilterConfig config;
+    private Map<String, String> headerMap;
+
+    /**
+     * HttpServletRequest wrapper which forces quotation.
+     */
+    public static class RequestQuoter extends HttpServletRequestWrapper {
+      private final HttpServletRequest rawRequest;
+
+      public RequestQuoter(HttpServletRequest rawRequest) {
+        super(rawRequest);
+        this.rawRequest = rawRequest;
+      }
+
+      /**
+       * Return the set of parameter names, quoting each name.
+       */
+      @SuppressWarnings("unchecked")
+      @Override
+      public Enumeration<String> getParameterNames() {
+        return new Enumeration<String>() {
+          private Enumeration<String> rawIterator =
+              rawRequest.getParameterNames();
+          @Override
+          public boolean hasMoreElements() {
+            return rawIterator.hasMoreElements();
+          }
+
+          @Override
+          public String nextElement() {
+            return HtmlQuoting.quoteHtmlChars(rawIterator.nextElement());
+          }
+        };
+      }
+
+      /**
+       * Unquote the name and quote the value.
+       */
+      @Override
+      public String getParameter(String name) {
+        return HtmlQuoting.quoteHtmlChars(
+            rawRequest.getParameter(HtmlQuoting.unquoteHtmlChars(name)));
+      }
+
+      @Override
+      public String[] getParameterValues(String name) {
+        String unquoteName = HtmlQuoting.unquoteHtmlChars(name);
+        String[] unquoteValue = rawRequest.getParameterValues(unquoteName);
+        if (unquoteValue == null) {
+          return null;
+        }
+        String[] result = new String[unquoteValue.length];
+        for (int i = 0; i < result.length; ++i) {
+          result[i] = HtmlQuoting.quoteHtmlChars(unquoteValue[i]);
+        }
+        return result;
+      }
+
+      @SuppressWarnings("unchecked")
+      @Override
+      public Map<String, String[]> getParameterMap() {
+        Map<String, String[]> result = new HashMap<>();
+        Map<String, String[]> raw = rawRequest.getParameterMap();
+        for (Map.Entry<String, String[]> item : raw.entrySet()) {
+          String[] rawValue = item.getValue();
+          String[] cookedValue = new String[rawValue.length];
+          for (int i = 0; i < rawValue.length; ++i) {
+            cookedValue[i] = HtmlQuoting.quoteHtmlChars(rawValue[i]);
+          }
+          result.put(HtmlQuoting.quoteHtmlChars(item.getKey()), cookedValue);
+        }
+        return result;
+      }
+
+      /**
+       * Quote the url so that users specifying the HOST HTTP header
+       * can't inject attacks.
+       */
+      @Override
+      public StringBuffer getRequestURL() {
+        String url = rawRequest.getRequestURL().toString();
+        return new StringBuffer(HtmlQuoting.quoteHtmlChars(url));
+      }
+
+      /**
+       * Quote the server name so that users specifying the HOST HTTP header
+       * can't inject attacks.
+       */
+      @Override
+      public String getServerName() {
+        return HtmlQuoting.quoteHtmlChars(rawRequest.getServerName());
+      }
+    }
+
+    @Override
+    public void init(FilterConfig filterConfig) throws ServletException {
+      this.config = filterConfig;
+      initHttpHeaderMap();
+    }
+
+    @Override
+    public void destroy() {
+    }
+
+    @Override
+    public void doFilter(ServletRequest request,
+        ServletResponse response,
+        FilterChain chain
+    ) throws IOException, ServletException {
+      HttpServletRequestWrapper quoted =
+          new RequestQuoter((HttpServletRequest) request);
+      HttpServletResponse httpResponse = (HttpServletResponse) response;
+
+      String mime = inferMimeType(request);
+      if (mime == null) {
+        httpResponse.setContentType("text/plain; charset=utf-8");
+      } else if (mime.startsWith("text/html")) {
+        // HTML with unspecified encoding, we want to
+        // force HTML with utf-8 encoding
+        // This is to avoid the following security issue:
+        // http://openmya.hacker.jp/hasegawa/security/utf7cs.html
+        httpResponse.setContentType("text/html; charset=utf-8");
+      } else if (mime.startsWith("application/xml")) {
+        httpResponse.setContentType("text/xml; charset=utf-8");
+      }
+      headerMap.forEach((k, v) -> httpResponse.addHeader(k, v));
+      chain.doFilter(quoted, httpResponse);
+    }
+
+    /**
+     * Infer the mime type for the response based on the extension of the
+     * request
+     * URI. Returns null if unknown.
+     */
+    private String inferMimeType(ServletRequest request) {
+      String path = ((HttpServletRequest) request).getRequestURI();
+      ServletContextHandler.Context sContext =
+          (ServletContextHandler.Context) config.getServletContext();
+      String mime = sContext.getMimeType(path);
+      return (mime == null) ? null : mime;
+    }
+
+    private void initHttpHeaderMap() {
+      Enumeration<String> params = this.config.getInitParameterNames();
+      headerMap = new HashMap<>();
+      while (params.hasMoreElements()) {
+        String key = params.nextElement();
+        Matcher m = PATTERN_HTTP_HEADER_REGEX.matcher(key);
+        if (m.matches()) {
+          String headerKey = m.group(1);
+          headerMap.put(headerKey, config.getInitParameter(key));
+        }
+      }
+    }
+  }
+
+  /**
+   * The X-FRAME-OPTIONS header in HTTP response to mitigate clickjacking
+   * attack.
+   */
+  public enum XFrameOption {
+    DENY("DENY"), SAMEORIGIN("SAMEORIGIN"), ALLOWFROM("ALLOW-FROM");
+
+    XFrameOption(String name) {
+      this.name = name;
+    }
+
+    private final String name;
+
+    @Override
+    public String toString() {
+      return this.name;
+    }
+
+    /**
+     * We cannot use valueOf since the AllowFrom enum differs from its value
+     * Allow-From. This is a helper method that does exactly what valueof does,
+     * but allows us to handle the AllowFrom issue gracefully.
+     *
+     * @param value - String must be DENY, SAMEORIGIN or ALLOW-FROM.
+     * @return XFrameOption or throws IllegalException.
+     */
+    private static XFrameOption getEnum(String value) {
+      Preconditions.checkState(value != null && !value.isEmpty());
+      for (XFrameOption xoption : values()) {
+        if (value.equals(xoption.toString())) {
+          return xoption;
+        }
+      }
+      throw new IllegalArgumentException("Unexpected value in xFrameOption.");
+    }
+  }
+
+  private Map<String, String> setHeaders(Configuration conf) {
+    Map<String, String> xFrameParams = new HashMap<>();
+    Map<String, String> headerConfigMap =
+        conf.getValByRegex(HTTP_HEADER_REGEX);
+
+    xFrameParams.putAll(getDefaultHeaders());
+    if (this.xFrameOptionIsEnabled) {
+      xFrameParams.put(HTTP_HEADER_PREFIX + X_FRAME_OPTIONS,
+          this.xFrameOption.toString());
+    }
+    xFrameParams.putAll(headerConfigMap);
+    return xFrameParams;
+  }
+
+  private Map<String, String> getDefaultHeaders() {
+    Map<String, String> headers = new HashMap<>();
+    String[] splitVal = X_CONTENT_TYPE_OPTIONS.split(":");
+    headers.put(HTTP_HEADER_PREFIX + splitVal[0],
+        splitVal[1]);
+    splitVal = X_XSS_PROTECTION.split(":");
+    headers.put(HTTP_HEADER_PREFIX + splitVal[0],
+        splitVal[1]);
+    return headers;
+  }
+}
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/LogStreamServlet.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/LogStreamServlet.java
similarity index 97%
rename from hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/LogStreamServlet.java
rename to hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/LogStreamServlet.java
index 1869c8b..d111547 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/LogStreamServlet.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/LogStreamServlet.java
@@ -15,7 +15,7 @@
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  */
-package org.apache.hadoop.hdds.server;
+package org.apache.hadoop.hdds.server.http;
 
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServlet;
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/NoCacheFilter.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/NoCacheFilter.java
new file mode 100644
index 0000000..825357b
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/NoCacheFilter.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.server.http;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+
+/**
+ * Servlet filter to add no caching headers.
+ */
+public class NoCacheFilter implements Filter {
+
+  @Override
+  public void init(FilterConfig filterConfig) throws ServletException {
+  }
+
+  @Override
+  public void doFilter(ServletRequest req, ServletResponse res,
+      FilterChain chain)
+      throws IOException, ServletException {
+    HttpServletResponse httpRes = (HttpServletResponse) res;
+    httpRes.setHeader("Cache-Control", "no-cache");
+    long now = System.currentTimeMillis();
+    httpRes.addDateHeader("Expires", now);
+    httpRes.addDateHeader("Date", now);
+    httpRes.addHeader("Pragma", "no-cache");
+    chain.doFilter(req, res);
+  }
+
+  @Override
+  public void destroy() {
+  }
+
+}
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/ProfileServlet.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/ProfileServlet.java
similarity index 99%
rename from hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/ProfileServlet.java
rename to hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/ProfileServlet.java
index 0d8bffb..d290677 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/ProfileServlet.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/ProfileServlet.java
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hdds.server;
+package org.apache.hadoop.hdds.server.http;
 
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/PrometheusMetricsSink.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/PrometheusMetricsSink.java
similarity index 99%
rename from hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/PrometheusMetricsSink.java
rename to hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/PrometheusMetricsSink.java
index f5f51e7..1bd4734 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/PrometheusMetricsSink.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/PrometheusMetricsSink.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hdds.server;
+package org.apache.hadoop.hdds.server.http;
 
 import static org.apache.hadoop.hdds.utils.RocksDBStoreMBean.ROCKSDB_CONTEXT_PREFIX;
 
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/PrometheusServlet.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/PrometheusServlet.java
similarity index 97%
rename from hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/PrometheusServlet.java
rename to hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/PrometheusServlet.java
index 76bae2a..77c2136 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/PrometheusServlet.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/PrometheusServlet.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hdds.server;
+package org.apache.hadoop.hdds.server.http;
 
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServlet;
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/RatisDropwizardExports.java
similarity index 96%
copy from hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java
copy to hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/RatisDropwizardExports.java
index 899fd9e..1c6a60f 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/RatisDropwizardExports.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hdds.server;
+package org.apache.hadoop.hdds.server.http;
 
 import com.codahale.metrics.MetricRegistry;
 import io.prometheus.client.dropwizard.DropwizardExports;
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisNameRewriteSampleBuilder.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/RatisNameRewriteSampleBuilder.java
similarity index 98%
rename from hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisNameRewriteSampleBuilder.java
rename to hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/RatisNameRewriteSampleBuilder.java
index 107c651..7d02f21 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisNameRewriteSampleBuilder.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/RatisNameRewriteSampleBuilder.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hdds.server;
+package org.apache.hadoop.hdds.server.http;
 
 import java.util.ArrayList;
 import java.util.Arrays;
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/StaticUserWebFilter.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/StaticUserWebFilter.java
new file mode 100644
index 0000000..c2d88cf
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/StaticUserWebFilter.java
@@ -0,0 +1,157 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.server.http;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletRequestWrapper;
+import java.io.IOException;
+import java.security.Principal;
+import java.util.HashMap;
+
+import org.apache.hadoop.conf.Configuration;
+
+import static org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_STATIC_USER;
+import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Provides a servlet filter that pretends to authenticate a fake user (Dr.Who)
+ * so that the web UI is usable for a secure cluster without authentication.
+ */
+public class StaticUserWebFilter extends FilterInitializer {
+  static final String DEPRECATED_UGI_KEY = "dfs.web.ugi";
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(StaticUserWebFilter.class);
+
+  static class User implements Principal {
+    private final String name;
+
+    User(String name) {
+      this.name = name;
+    }
+
+    @Override
+    public String getName() {
+      return name;
+    }
+
+    @Override
+    public int hashCode() {
+      return name.hashCode();
+    }
+
+    @Override
+    public boolean equals(Object other) {
+      if (other == this) {
+        return true;
+      } else if (other == null || other.getClass() != getClass()) {
+        return false;
+      }
+      return ((User) other).name.equals(name);
+    }
+
+    @Override
+    public String toString() {
+      return name;
+    }
+  }
+
+  /**
+   * JavaEE filter implementation to do the work.
+   */
+  public static class StaticUserFilter implements Filter {
+    private User user;
+    private String username;
+
+    @Override
+    public void destroy() {
+      // NOTHING
+    }
+
+    @Override
+    public void doFilter(ServletRequest request, ServletResponse response,
+        FilterChain chain
+    ) throws IOException, ServletException {
+      HttpServletRequest httpRequest = (HttpServletRequest) request;
+      // if the user is already authenticated, don't override it
+      if (httpRequest.getRemoteUser() != null) {
+        chain.doFilter(request, response);
+      } else {
+        HttpServletRequestWrapper wrapper =
+            new HttpServletRequestWrapper(httpRequest) {
+              @Override
+              public Principal getUserPrincipal() {
+                return user;
+              }
+
+              @Override
+              public String getRemoteUser() {
+                return username;
+              }
+            };
+        chain.doFilter(wrapper, response);
+      }
+    }
+
+    @Override
+    public void init(FilterConfig conf) throws ServletException {
+      this.username = conf.getInitParameter(HADOOP_HTTP_STATIC_USER);
+      this.user = new User(username);
+    }
+
+  }
+
+  @Override
+  public void initFilter(FilterContainer container, Configuration conf) {
+    HashMap<String, String> options = new HashMap<String, String>();
+
+    String username = getUsernameFromConf(conf);
+    options.put(HADOOP_HTTP_STATIC_USER, username);
+
+    container.addFilter("static_user_filter",
+        StaticUserFilter.class.getName(),
+        options);
+  }
+
+  /**
+   * Retrieve the static username from the configuration.
+   */
+  static String getUsernameFromConf(Configuration conf) {
+    String oldStyleUgi = conf.get(DEPRECATED_UGI_KEY);
+    if (oldStyleUgi != null) {
+      // We can't use the normal configuration deprecation mechanism here
+      // since we need to split out the username from the configured UGI.
+      LOG.warn(DEPRECATED_UGI_KEY + " should not be used. Instead, use " +
+          HADOOP_HTTP_STATIC_USER + ".");
+      String[] parts = oldStyleUgi.split(",");
+      return parts[0];
+    } else {
+      return conf.get(HADOOP_HTTP_STATIC_USER,
+          DEFAULT_HADOOP_HTTP_STATIC_USER);
+    }
+  }
+
+}
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/package-info.java
similarity index 52%
copy from hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java
copy to hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/package-info.java
index 899fd9e..b8b7cc9 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/package-info.java
@@ -6,33 +6,18 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hdds.server;
 
-import com.codahale.metrics.MetricRegistry;
-import io.prometheus.client.dropwizard.DropwizardExports;
-import io.prometheus.client.dropwizard.samplebuilder.DefaultSampleBuilder;
+package org.apache.hadoop.hdds.server.http;
 
 /**
- * Collect Dropwizard metrics, but rename ratis specific metrics.
- */
-public class RatisDropwizardExports extends DropwizardExports {
-
-  /**
-   * Creates a new DropwizardExports with a {@link DefaultSampleBuilder}.
-   *
-   * @param registry a metric registry to export in prometheus.
-   */
-  public RatisDropwizardExports(MetricRegistry registry) {
-    super(registry, new RatisNameRewriteSampleBuilder());
-  }
-
-}
\ No newline at end of file
+ * Servlets and utilities for embedded web server of Ozone services..
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestBaseHttpServer.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestBaseHttpServer.java
similarity index 98%
rename from hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestBaseHttpServer.java
rename to hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestBaseHttpServer.java
index c6eae0e..d1832e2 100644
--- a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestBaseHttpServer.java
+++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestBaseHttpServer.java
@@ -15,9 +15,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hdds.server;
+package org.apache.hadoop.hdds.server.http;
 
 import org.apache.hadoop.conf.Configuration;
+
 import org.junit.Assert;
 import org.junit.Test;
 
diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestHtmlQuoting.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestHtmlQuoting.java
new file mode 100644
index 0000000..1410e17
--- /dev/null
+++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestHtmlQuoting.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.server.http;
+
+import static org.junit.Assert.*;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.junit.Test;
+import org.mockito.Mockito;
+
+/**
+ * Testing HTML Quoting.
+ */
+public class TestHtmlQuoting {
+
+  @Test
+  public void testNeedsQuoting() throws Exception {
+    assertTrue(HtmlQuoting.needsQuoting("abcde>"));
+    assertTrue(HtmlQuoting.needsQuoting("<abcde"));
+    assertTrue(HtmlQuoting.needsQuoting("abc'de"));
+    assertTrue(HtmlQuoting.needsQuoting("abcde\""));
+    assertTrue(HtmlQuoting.needsQuoting("&"));
+    assertFalse(HtmlQuoting.needsQuoting(""));
+    assertFalse(HtmlQuoting.needsQuoting("ab\ncdef"));
+    assertFalse(HtmlQuoting.needsQuoting(null));
+  }
+
+  @Test
+  public void testQuoting() throws Exception {
+    assertEquals("ab&lt;cd", HtmlQuoting.quoteHtmlChars("ab<cd"));
+    assertEquals("ab&gt;", HtmlQuoting.quoteHtmlChars("ab>"));
+    assertEquals("&amp;&amp;&amp;", HtmlQuoting.quoteHtmlChars("&&&"));
+    assertEquals(" &apos;\n", HtmlQuoting.quoteHtmlChars(" '\n"));
+    assertEquals("&quot;", HtmlQuoting.quoteHtmlChars("\""));
+    assertEquals(null, HtmlQuoting.quoteHtmlChars(null));
+  }
+
+  private void runRoundTrip(String str) throws Exception {
+    assertEquals(str,
+        HtmlQuoting.unquoteHtmlChars(HtmlQuoting.quoteHtmlChars(str)));
+  }
+
+  @Test
+  public void testRoundtrip() throws Exception {
+    runRoundTrip("");
+    runRoundTrip("<>&'\"");
+    runRoundTrip("ab>cd<ef&ghi'\"");
+    runRoundTrip("A string\n with no quotable chars in it!");
+    runRoundTrip(null);
+    StringBuilder buffer = new StringBuilder();
+    for (char ch = 0; ch < 127; ++ch) {
+      buffer.append(ch);
+    }
+    runRoundTrip(buffer.toString());
+  }
+
+  @Test
+  public void testRequestQuoting() throws Exception {
+    HttpServletRequest mockReq = Mockito.mock(HttpServletRequest.class);
+    HttpServer2.QuotingInputFilter.RequestQuoter quoter =
+        new HttpServer2.QuotingInputFilter.RequestQuoter(mockReq);
+
+    Mockito.doReturn("a<b").when(mockReq).getParameter("x");
+    assertEquals("Test simple param quoting",
+        "a&lt;b", quoter.getParameter("x"));
+
+    Mockito.doReturn(null).when(mockReq).getParameter("x");
+    assertEquals("Test that missing parameters dont cause NPE",
+        null, quoter.getParameter("x"));
+
+    Mockito.doReturn(new String[] {"a<b", "b"}).when(mockReq)
+        .getParameterValues("x");
+    assertArrayEquals("Test escaping of an array",
+        new String[] {"a&lt;b", "b"}, quoter.getParameterValues("x"));
+
+    Mockito.doReturn(null).when(mockReq).getParameterValues("x");
+    assertArrayEquals("Test that missing parameters dont cause NPE for array",
+        null, quoter.getParameterValues("x"));
+  }
+}
diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestHttpRequestLog.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestHttpRequestLog.java
new file mode 100644
index 0000000..1a10452
--- /dev/null
+++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestHttpRequestLog.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.server.http;
+
+import org.apache.log4j.Logger;
+import org.eclipse.jetty.server.CustomRequestLog;
+import org.eclipse.jetty.server.RequestLog;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+
+/**
+ * Testing HttpRequestLog.
+ */
+public class TestHttpRequestLog {
+
+  @Test
+  public void testAppenderUndefined() {
+    RequestLog requestLog = HttpRequestLog.getRequestLog("test");
+    assertNull("RequestLog should be null", requestLog);
+  }
+
+  @Test
+  public void testAppenderDefined() {
+    HttpRequestLogAppender requestLogAppender = new HttpRequestLogAppender();
+    requestLogAppender.setName("testrequestlog");
+    Logger.getLogger("http.requests.test").addAppender(requestLogAppender);
+    RequestLog requestLog = HttpRequestLog.getRequestLog("test");
+    Logger.getLogger("http.requests.test").removeAppender(requestLogAppender);
+    assertNotNull("RequestLog should not be null", requestLog);
+    assertEquals("Class mismatch",
+        CustomRequestLog.class, requestLog.getClass());
+  }
+}
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestHttpRequestLogAppender.java
similarity index 56%
copy from hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java
copy to hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestHttpRequestLogAppender.java
index 899fd9e..78bea69 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java
+++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestHttpRequestLogAppender.java
@@ -15,24 +15,26 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hdds.server;
+package org.apache.hadoop.hdds.server.http;
 
-import com.codahale.metrics.MetricRegistry;
-import io.prometheus.client.dropwizard.DropwizardExports;
-import io.prometheus.client.dropwizard.samplebuilder.DefaultSampleBuilder;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
 
 /**
- * Collect Dropwizard metrics, but rename ratis specific metrics.
+ * Test Http request log appender.
  */
-public class RatisDropwizardExports extends DropwizardExports {
+public class TestHttpRequestLogAppender {
 
-  /**
-   * Creates a new DropwizardExports with a {@link DefaultSampleBuilder}.
-   *
-   * @param registry a metric registry to export in prometheus.
-   */
-  public RatisDropwizardExports(MetricRegistry registry) {
-    super(registry, new RatisNameRewriteSampleBuilder());
-  }
+  @Test
+  public void testParameterPropagation() {
 
-}
\ No newline at end of file
+    HttpRequestLogAppender requestLogAppender = new HttpRequestLogAppender();
+    requestLogAppender.setFilename("jetty-namenode-yyyy_mm_dd.log");
+    requestLogAppender.setRetainDays(17);
+    assertEquals("Filename mismatch", "jetty-namenode-yyyy_mm_dd.log",
+        requestLogAppender.getFilename());
+    assertEquals("Retain days mismatch", 17,
+        requestLogAppender.getRetainDays());
+  }
+}
diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestProfileServlet.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestProfileServlet.java
similarity index 90%
rename from hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestProfileServlet.java
rename to hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestProfileServlet.java
index 1c4adf6..8890c1c 100644
--- a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestProfileServlet.java
+++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestProfileServlet.java
@@ -15,12 +15,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hdds.server;
+package org.apache.hadoop.hdds.server.http;
 
 import java.io.IOException;
 
-import org.apache.hadoop.hdds.server.ProfileServlet.Event;
-import org.apache.hadoop.hdds.server.ProfileServlet.Output;
+import org.apache.hadoop.hdds.server.http.ProfileServlet.Event;
+import org.apache.hadoop.hdds.server.http.ProfileServlet.Output;
 
 import org.junit.Test;
 
diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestPrometheusMetricsSink.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestPrometheusMetricsSink.java
similarity index 99%
rename from hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestPrometheusMetricsSink.java
rename to hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestPrometheusMetricsSink.java
index 7617bc7..5f3ffce 100644
--- a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestPrometheusMetricsSink.java
+++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestPrometheusMetricsSink.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hdds.server;
+package org.apache.hadoop.hdds.server.http;
 
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestRatisDropwizardExports.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestRatisDropwizardExports.java
similarity index 98%
rename from hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestRatisDropwizardExports.java
rename to hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestRatisDropwizardExports.java
index 0828875..0055beb 100644
--- a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestRatisDropwizardExports.java
+++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestRatisDropwizardExports.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hdds.server;
+package org.apache.hadoop.hdds.server.http;
 
 import java.io.IOException;
 import java.io.StringWriter;
diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestRatisNameRewrite.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestRatisNameRewrite.java
similarity index 98%
rename from hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestRatisNameRewrite.java
rename to hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestRatisNameRewrite.java
index cae8967..2f24dca 100644
--- a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestRatisNameRewrite.java
+++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestRatisNameRewrite.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hdds.server;
+package org.apache.hadoop.hdds.server.http;
 
 import java.util.ArrayList;
 import java.util.Arrays;
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/package-info.java
similarity index 52%
rename from hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java
rename to hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/package-info.java
index 899fd9e..4f67352 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/RatisDropwizardExports.java
+++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/package-info.java
@@ -6,33 +6,18 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hdds.server;
 
-import com.codahale.metrics.MetricRegistry;
-import io.prometheus.client.dropwizard.DropwizardExports;
-import io.prometheus.client.dropwizard.samplebuilder.DefaultSampleBuilder;
+package org.apache.hadoop.hdds.server.http;
 
 /**
- * Collect Dropwizard metrics, but rename ratis specific metrics.
- */
-public class RatisDropwizardExports extends DropwizardExports {
-
-  /**
-   * Creates a new DropwizardExports with a {@link DefaultSampleBuilder}.
-   *
-   * @param registry a metric registry to export in prometheus.
-   */
-  public RatisDropwizardExports(MetricRegistry registry) {
-    super(registry, new RatisNameRewriteSampleBuilder());
-  }
-
-}
\ No newline at end of file
+ * Testing embedded web server of Ozone services..
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java
index b04267a..3f963fd 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java
@@ -19,7 +19,7 @@ package org.apache.hadoop.hdds.scm.server;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdds.scm.ScmConfigKeys;
-import org.apache.hadoop.hdds.server.BaseHttpServer;
+import org.apache.hadoop.hdds.server.http.BaseHttpServer;
 
 import java.io.IOException;
 
diff --git a/hadoop-ozone/insight/src/main/java/org/apache/hadoop/ozone/insight/BaseInsightPoint.java b/hadoop-ozone/insight/src/main/java/org/apache/hadoop/ozone/insight/BaseInsightPoint.java
index b6c1c74..ef2011e 100644
--- a/hadoop-ozone/insight/src/main/java/org/apache/hadoop/ozone/insight/BaseInsightPoint.java
+++ b/hadoop-ozone/insight/src/main/java/org/apache/hadoop/ozone/insight/BaseInsightPoint.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.hdds.scm.ScmConfigKeys;
 import org.apache.hadoop.hdds.scm.client.ContainerOperationClient;
 import org.apache.hadoop.hdds.scm.client.ScmClient;
-import org.apache.hadoop.hdds.server.PrometheusMetricsSink;
+import org.apache.hadoop.hdds.server.http.PrometheusMetricsSink;
 import org.apache.hadoop.ozone.insight.LoggerSource.Level;
 
 import com.google.protobuf.ProtocolMessageEnum;
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManagerHttpServer.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManagerHttpServer.java
index b98d6d3..38dc1ad 100644
--- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManagerHttpServer.java
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManagerHttpServer.java
@@ -19,7 +19,7 @@ package org.apache.hadoop.ozone.om;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ozone.OzoneConsts;
-import org.apache.hadoop.hdds.server.BaseHttpServer;
+import org.apache.hadoop.hdds.server.http.BaseHttpServer;
 
 import java.io.IOException;
 
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/ha/OMNodeDetails.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/ha/OMNodeDetails.java
index 7d69b93..7ed666b 100644
--- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/ha/OMNodeDetails.java
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/ha/OMNodeDetails.java
@@ -18,7 +18,8 @@
 package org.apache.hadoop.ozone.om.ha;
 
 import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.http.HttpConfig;
+
+import org.apache.hadoop.hdds.server.http.HttpConfig;
 import org.apache.hadoop.net.NetUtils;
 
 import java.net.InetAddress;
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/snapshot/OzoneManagerSnapshotProvider.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/snapshot/OzoneManagerSnapshotProvider.java
index c175936..642bbcd 100644
--- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/snapshot/OzoneManagerSnapshotProvider.java
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/snapshot/OzoneManagerSnapshotProvider.java
@@ -18,26 +18,6 @@
 
 package org.apache.hadoop.ozone.om.snapshot;
 
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.hdds.HddsUtils;
-import org.apache.hadoop.http.HttpConfig;
-import org.apache.hadoop.ozone.om.ha.OMNodeDetails;
-import org.apache.hadoop.hdds.utils.db.DBCheckpoint;
-import org.apache.hadoop.hdds.utils.db.RocksDBCheckpoint;
-import org.apache.http.Header;
-import org.apache.http.HttpEntity;
-import org.apache.http.HttpResponse;
-import org.apache.http.client.HttpClient;
-import org.apache.http.client.config.RequestConfig;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.HttpClientBuilder;
-import org.apache.http.util.EntityUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
@@ -48,16 +28,35 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hdds.server.http.HttpConfig;
+import org.apache.hadoop.hdds.utils.db.DBCheckpoint;
+import org.apache.hadoop.hdds.utils.db.RocksDBCheckpoint;
+import org.apache.hadoop.ozone.om.ha.OMNodeDetails;
+
 import static java.net.HttpURLConnection.HTTP_CREATED;
 import static java.net.HttpURLConnection.HTTP_OK;
+import org.apache.commons.io.FileUtils;
 import static org.apache.hadoop.ozone.OzoneConsts.OM_RATIS_SNAPSHOT_INDEX;
 import static org.apache.hadoop.ozone.OzoneConsts.OM_RATIS_SNAPSHOT_TERM;
 import static org.apache.hadoop.ozone.om.OMConfigKeys.OZONE_OM_SNAPSHOT_PROVIDER_CONNECTION_TIMEOUT_DEFAULT;
+import static org.apache.hadoop.ozone.om.OMConfigKeys.OZONE_OM_SNAPSHOT_PROVIDER_CONNECTION_TIMEOUT_KEY;
 import static org.apache.hadoop.ozone.om.OMConfigKeys.OZONE_OM_SNAPSHOT_PROVIDER_REQUEST_TIMEOUT_DEFAULT;
 import static org.apache.hadoop.ozone.om.OMConfigKeys.OZONE_OM_SNAPSHOT_PROVIDER_REQUEST_TIMEOUT_KEY;
-import static org.apache.hadoop.ozone.om.OMConfigKeys.OZONE_OM_SNAPSHOT_PROVIDER_CONNECTION_TIMEOUT_KEY;
 import static org.apache.hadoop.ozone.om.OMConfigKeys.OZONE_OM_SNAPSHOT_PROVIDER_SOCKET_TIMEOUT_DEFAULT;
 import static org.apache.hadoop.ozone.om.OMConfigKeys.OZONE_OM_SNAPSHOT_PROVIDER_SOCKET_TIMEOUT_KEY;
+import org.apache.http.Header;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.config.RequestConfig;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClientBuilder;
+import org.apache.http.util.EntityUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * OzoneManagerSnapshotProvider downloads the latest checkpoint from the
@@ -87,7 +86,7 @@ public class OzoneManagerSnapshotProvider {
       this.peerNodesMap.put(peerNode.getOMNodeId(), peerNode);
     }
 
-    this.httpPolicy = HddsUtils.getHttpPolicy(conf);
+    this.httpPolicy = HttpConfig.getHttpPolicy(conf);
     this.httpRequestConfig = getHttpRequestConfig(conf);
   }
 
diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconHttpServer.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconHttpServer.java
index ea80eac..b4041e2 100644
--- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconHttpServer.java
+++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconHttpServer.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.ozone.recon;
 import java.io.IOException;
 
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
-import org.apache.hadoop.hdds.server.BaseHttpServer;
+import org.apache.hadoop.hdds.server.http.BaseHttpServer;
 
 import com.google.inject.Inject;
 
diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/OzoneManagerServiceProviderImpl.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/OzoneManagerServiceProviderImpl.java
index 020a5fa..486ff65 100644
--- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/OzoneManagerServiceProviderImpl.java
+++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/OzoneManagerServiceProviderImpl.java
@@ -47,9 +47,8 @@ import javax.inject.Inject;
 import javax.inject.Singleton;
 
 import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.hdds.HddsUtils;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
-import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.hdds.server.http.HttpConfig;
 import org.apache.hadoop.ozone.om.OMConfigKeys;
 import org.apache.hadoop.ozone.om.OMMetadataManager;
 import org.apache.hadoop.ozone.om.protocol.OzoneManagerProtocol;
@@ -123,7 +122,7 @@ public class OzoneManagerServiceProviderImpl
     omSnapshotDBParentDir = reconUtils.getReconDbDir(configuration,
         OZONE_RECON_OM_SNAPSHOT_DB_DIR);
 
-    HttpConfig.Policy policy = HddsUtils.getHttpPolicy(configuration);
+    HttpConfig.Policy policy = HttpConfig.getHttpPolicy(configuration);
 
     int socketTimeout = (int) configuration.getTimeDuration(
         RECON_OM_SOCKET_TIMEOUT, RECON_OM_SOCKET_TIMEOUT_DEFAULT,
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/Gateway.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/Gateway.java
index 061a2d7..09fcb01 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/Gateway.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/Gateway.java
@@ -48,6 +48,7 @@ public class Gateway extends GenericCli {
   public Void call() throws Exception {
     OzoneConfiguration ozoneConfiguration = createOzoneConfiguration();
     OzoneConfigurationHolder.setConfiguration(ozoneConfiguration);
+    ozoneConfiguration.set("hadoop.http.authentication.type", "simple");
     httpServer = new S3GatewayHttpServer(ozoneConfiguration, "s3gateway");
     start();
     return null;
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/S3GatewayHttpServer.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/S3GatewayHttpServer.java
index f3d8341..b164e54 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/S3GatewayHttpServer.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/S3GatewayHttpServer.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.ozone.s3;
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdds.server.BaseHttpServer;
+import org.apache.hadoop.hdds.server.http.BaseHttpServer;
 
 /**
  * S3 Gateway specific configuration keys.
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/FreonHttpServer.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/FreonHttpServer.java
index dab4889..c8deb98 100644
--- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/FreonHttpServer.java
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/FreonHttpServer.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.ozone.freon;
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdds.server.BaseHttpServer;
+import org.apache.hadoop.hdds.server.http.BaseHttpServer;
 import org.apache.hadoop.ozone.OzoneConfigKeys;
 
 /**
diff --git a/pom.xml b/pom.xml
index 0b12f5a..d641c39 100644
--- a/pom.xml
+++ b/pom.xml
@@ -98,7 +98,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xs
 
     <failIfNoTests>false</failIfNoTests>
     <maven.test.redirectTestOutputToFile>true</maven.test.redirectTestOutputToFile>
-    <jetty.version>9.3.25.v20180904</jetty.version>
+    <jetty.version>9.4.26.v20200117</jetty.version>
     <test.exclude>_</test.exclude>
     <test.exclude.pattern>_</test.exclude.pattern>
 


---------------------------------------------------------------------
To unsubscribe, e-mail: ozone-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: ozone-commits-help@hadoop.apache.org