You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by bu...@apache.org on 2017/11/01 22:13:56 UTC

[11/15] hbase git commit: Revert "HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module"

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
deleted file mode 100644
index 4fad031..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
+++ /dev/null
@@ -1,258 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.security.Principal;
-import java.security.PrivilegedExceptionAction;
-import java.util.Set;
-
-import javax.security.auth.Subject;
-import javax.security.auth.kerberos.KerberosTicket;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.http.TestHttpServer.EchoServlet;
-import org.apache.hadoop.hbase.http.resource.JerseyResource;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.security.authentication.util.KerberosName;
-import org.apache.http.HttpHost;
-import org.apache.http.HttpResponse;
-import org.apache.http.auth.AuthSchemeProvider;
-import org.apache.http.auth.AuthScope;
-import org.apache.http.auth.KerberosCredentials;
-import org.apache.http.client.HttpClient;
-import org.apache.http.client.config.AuthSchemes;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.protocol.HttpClientContext;
-import org.apache.http.config.Lookup;
-import org.apache.http.config.RegistryBuilder;
-import org.apache.http.entity.ByteArrayEntity;
-import org.apache.http.entity.ContentType;
-import org.apache.http.impl.auth.SPNegoSchemeFactory;
-import org.apache.http.impl.client.BasicCredentialsProvider;
-import org.apache.http.impl.client.HttpClients;
-import org.apache.http.util.EntityUtils;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.client.JaasKrbUtil;
-import org.apache.kerby.kerberos.kerb.server.SimpleKdcServer;
-import org.ietf.jgss.GSSCredential;
-import org.ietf.jgss.GSSManager;
-import org.ietf.jgss.GSSName;
-import org.ietf.jgss.Oid;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-/**
- * Test class for SPNEGO authentication on the HttpServer. Uses Kerby's MiniKDC and Apache
- * HttpComponents to verify that a simple Servlet is reachable via SPNEGO and unreachable w/o.
- */
-@Category({MiscTests.class, SmallTests.class})
-public class TestSpnegoHttpServer extends HttpServerFunctionalTest {
-  private static final Log LOG = LogFactory.getLog(TestSpnegoHttpServer.class);
-  private static final String KDC_SERVER_HOST = "localhost";
-  private static final String CLIENT_PRINCIPAL = "client";
-
-  private static HttpServer server;
-  private static URL baseUrl;
-  private static SimpleKdcServer kdc;
-  private static File infoServerKeytab;
-  private static File clientKeytab;
-
-  @BeforeClass
-  public static void setupServer() throws Exception {
-    final String serverPrincipal = "HTTP/" + KDC_SERVER_HOST;
-    final File target = new File(System.getProperty("user.dir"), "target");
-    assertTrue(target.exists());
-
-    kdc = buildMiniKdc();
-    kdc.start();
-
-    File keytabDir = new File(target, TestSpnegoHttpServer.class.getSimpleName()
-        + "_keytabs");
-    if (keytabDir.exists()) {
-      deleteRecursively(keytabDir);
-    }
-    keytabDir.mkdirs();
-
-    infoServerKeytab = new File(keytabDir, serverPrincipal.replace('/', '_') + ".keytab");
-    clientKeytab = new File(keytabDir, CLIENT_PRINCIPAL + ".keytab");
-
-    setupUser(kdc, clientKeytab, CLIENT_PRINCIPAL);
-    setupUser(kdc, infoServerKeytab, serverPrincipal);
-
-    Configuration conf = buildSpnegoConfiguration(serverPrincipal, infoServerKeytab);
-
-    server = createTestServerWithSecurity(conf);
-    server.addServlet("echo", "/echo", EchoServlet.class);
-    server.addJerseyResourcePackage(JerseyResource.class.getPackage().getName(), "/jersey/*");
-    server.start();
-    baseUrl = getServerURL(server);
-
-    LOG.info("HTTP server started: "+ baseUrl);
-  }
-
-  @AfterClass
-  public static void stopServer() throws Exception {
-    try {
-      if (null != server) {
-        server.stop();
-      }
-    } catch (Exception e) {
-      LOG.info("Failed to stop info server", e);
-    }
-    try {
-      if (null != kdc) {
-        kdc.stop();
-      }
-    } catch (Exception e) {
-      LOG.info("Failed to stop mini KDC", e);
-    }
-  }
-
-  private static void setupUser(SimpleKdcServer kdc, File keytab, String principal)
-      throws KrbException {
-    kdc.createPrincipal(principal);
-    kdc.exportPrincipal(principal, keytab);
-  }
-
-  private static SimpleKdcServer buildMiniKdc() throws Exception {
-    SimpleKdcServer kdc = new SimpleKdcServer();
-
-    final File target = new File(System.getProperty("user.dir"), "target");
-    File kdcDir = new File(target, TestSpnegoHttpServer.class.getSimpleName());
-    if (kdcDir.exists()) {
-      deleteRecursively(kdcDir);
-    }
-    kdcDir.mkdirs();
-    kdc.setWorkDir(kdcDir);
-
-    kdc.setKdcHost(KDC_SERVER_HOST);
-    int kdcPort = getFreePort();
-    kdc.setAllowTcp(true);
-    kdc.setAllowUdp(false);
-    kdc.setKdcTcpPort(kdcPort);
-
-    LOG.info("Starting KDC server at " + KDC_SERVER_HOST + ":" + kdcPort);
-
-    kdc.init();
-
-    return kdc;
-  }
-
-  private static Configuration buildSpnegoConfiguration(String serverPrincipal, File
-      serverKeytab) {
-    Configuration conf = new Configuration();
-    KerberosName.setRules("DEFAULT");
-
-    conf.setInt(HttpServer.HTTP_MAX_THREADS, TestHttpServer.MAX_THREADS);
-
-    // Enable Kerberos (pre-req)
-    conf.set("hbase.security.authentication", "kerberos");
-    conf.set(HttpServer.HTTP_UI_AUTHENTICATION, "kerberos");
-    conf.set(HttpServer.HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY, serverPrincipal);
-    conf.set(HttpServer.HTTP_SPNEGO_AUTHENTICATION_KEYTAB_KEY, serverKeytab.getAbsolutePath());
-
-    return conf;
-  }
-
-  @Test
-  public void testUnauthorizedClientsDisallowed() throws IOException {
-    URL url = new URL(getServerURL(server), "/echo?a=b");
-    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
-  }
-
-  @Test
-  public void testAllowedClient() throws Exception {
-    // Create the subject for the client
-    final Subject clientSubject = JaasKrbUtil.loginUsingKeytab(CLIENT_PRINCIPAL, clientKeytab);
-    final Set<Principal> clientPrincipals = clientSubject.getPrincipals();
-    // Make sure the subject has a principal
-    assertFalse(clientPrincipals.isEmpty());
-
-    // Get a TGT for the subject (might have many, different encryption types). The first should
-    // be the default encryption type.
-    Set<KerberosTicket> privateCredentials =
-            clientSubject.getPrivateCredentials(KerberosTicket.class);
-    assertFalse(privateCredentials.isEmpty());
-    KerberosTicket tgt = privateCredentials.iterator().next();
-    assertNotNull(tgt);
-
-    // The name of the principal
-    final String principalName = clientPrincipals.iterator().next().getName();
-
-    // Run this code, logged in as the subject (the client)
-    HttpResponse resp = Subject.doAs(clientSubject,
-        new PrivilegedExceptionAction<HttpResponse>() {
-      @Override
-      public HttpResponse run() throws Exception {
-        // Logs in with Kerberos via GSS
-        GSSManager gssManager = GSSManager.getInstance();
-        // jGSS Kerberos login constant
-        Oid oid = new Oid("1.2.840.113554.1.2.2");
-        GSSName gssClient = gssManager.createName(principalName, GSSName.NT_USER_NAME);
-        GSSCredential credential = gssManager.createCredential(gssClient,
-            GSSCredential.DEFAULT_LIFETIME, oid, GSSCredential.INITIATE_ONLY);
-
-        HttpClientContext context = HttpClientContext.create();
-        Lookup<AuthSchemeProvider> authRegistry = RegistryBuilder.<AuthSchemeProvider>create()
-            .register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory(true, true))
-            .build();
-
-        HttpClient client = HttpClients.custom().setDefaultAuthSchemeRegistry(authRegistry).build();
-        BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider();
-        credentialsProvider.setCredentials(AuthScope.ANY, new KerberosCredentials(credential));
-
-        URL url = new URL(getServerURL(server), "/echo?a=b");
-        context.setTargetHost(new HttpHost(url.getHost(), url.getPort()));
-        context.setCredentialsProvider(credentialsProvider);
-        context.setAuthSchemeRegistry(authRegistry);
-
-        HttpGet get = new HttpGet(url.toURI());
-        return client.execute(get, context);
-      }
-    });
-
-    assertNotNull(resp);
-    assertEquals(HttpURLConnection.HTTP_OK, resp.getStatusLine().getStatusCode());
-    assertEquals("a:b", EntityUtils.toString(resp.getEntity()).trim());
-  }
-
-  @Test(expected = IllegalArgumentException.class)
-  public void testMissingConfigurationThrowsException() throws Exception {
-    Configuration conf = new Configuration();
-    conf.setInt(HttpServer.HTTP_MAX_THREADS, TestHttpServer.MAX_THREADS);
-    // Enable Kerberos (pre-req)
-    conf.set("hbase.security.authentication", "kerberos");
-    // Intentionally skip keytab and principal
-
-    HttpServer customServer = createTestServerWithSecurity(conf);
-    customServer.addServlet("echo", "/echo", EchoServlet.class);
-    customServer.addJerseyResourcePackage(JerseyResource.class.getPackage().getName(), "/jersey/*");
-    customServer.start();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java
deleted file mode 100644
index 8bd1e6d..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http.conf;
-
-import java.io.StringReader;
-import java.io.StringWriter;
-import java.util.Map;
-
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-
-import junit.framework.TestCase;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.eclipse.jetty.util.ajax.JSON;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-import org.xml.sax.InputSource;
-
-/**
- * Basic test case that the ConfServlet can write configuration
- * to its output in XML and JSON format.
- */
-@Category({MiscTests.class, SmallTests.class})
-public class TestConfServlet extends TestCase {
-  private static final String TEST_KEY = "testconfservlet.key";
-  private static final String TEST_VAL = "testval";
-
-  private Configuration getTestConf() {
-    Configuration testConf = new Configuration();
-    testConf.set(TEST_KEY, TEST_VAL);
-    return testConf;
-  }
-
-  @Test
-  @SuppressWarnings("unchecked")
-  public void testWriteJson() throws Exception {
-    StringWriter sw = new StringWriter();
-    ConfServlet.writeResponse(getTestConf(), sw, "json");
-    String json = sw.toString();
-    boolean foundSetting = false;
-    Object parsed = JSON.parse(json);
-    Object[] properties = ((Map<String, Object[]>)parsed).get("properties");
-    for (Object o : properties) {
-      Map<String, Object> propertyInfo = (Map<String, Object>)o;
-      String key = (String)propertyInfo.get("key");
-      String val = (String)propertyInfo.get("value");
-      String resource = (String)propertyInfo.get("resource");
-      System.err.println("k: " + key + " v: " + val + " r: " + resource);
-      if (TEST_KEY.equals(key) && TEST_VAL.equals(val)
-          && "programatically".equals(resource)) {
-        foundSetting = true;
-      }
-    }
-    assertTrue(foundSetting);
-  }
-
-  @Test
-  public void testWriteXml() throws Exception {
-    StringWriter sw = new StringWriter();
-    ConfServlet.writeResponse(getTestConf(), sw, "xml");
-    String xml = sw.toString();
-
-    DocumentBuilderFactory docBuilderFactory
-      = DocumentBuilderFactory.newInstance();
-    DocumentBuilder builder = docBuilderFactory.newDocumentBuilder();
-    Document doc = builder.parse(new InputSource(new StringReader(xml)));
-    NodeList nameNodes = doc.getElementsByTagName("name");
-    boolean foundSetting = false;
-    for (int i = 0; i < nameNodes.getLength(); i++) {
-      Node nameNode = nameNodes.item(i);
-      String key = nameNode.getTextContent();
-      System.err.println("xml key: " + key);
-      if (TEST_KEY.equals(key)) {
-        foundSetting = true;
-        Element propertyElem = (Element)nameNode.getParentNode();
-        String val = propertyElem.getElementsByTagName("value").item(0).getTextContent();
-        assertEquals(TEST_VAL, val);
-      }
-    }
-    assertTrue(foundSetting);
-  }
-
-  @Test
-  public void testBadFormat() throws Exception {
-    StringWriter sw = new StringWriter();
-    try {
-      ConfServlet.writeResponse(getTestConf(), sw, "not a format");
-      fail("writeResponse with bad format didn't throw!");
-    } catch (ConfServlet.BadFormatException bfe) {
-      // expected
-    }
-    assertEquals("", sw.toString());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java
deleted file mode 100644
index 484162a..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.http.jmx;
-
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.net.URLEncoder;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.http.HttpServer;
-import org.apache.hadoop.hbase.http.HttpServerFunctionalTest;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestJMXJsonServlet extends HttpServerFunctionalTest {
-  private   static final Log LOG = LogFactory.getLog(TestJMXJsonServlet.class);
-  private static HttpServer server;
-  private static URL baseUrl;
-
-  @BeforeClass public static void setup() throws Exception {
-    // Eclipse doesn't pick this up correctly from the plugin
-    // configuration in the pom.
-    System.setProperty(HttpServerFunctionalTest.TEST_BUILD_WEBAPPS, "target/test-classes/webapps");
-    server = createTestServer();
-    server.start();
-    baseUrl = getServerURL(server);
-  }
-
-  @AfterClass public static void cleanup() throws Exception {
-    server.stop();
-  }
-
-  public static void assertReFind(String re, String value) {
-    Pattern p = Pattern.compile(re);
-    Matcher m = p.matcher(value);
-    assertTrue("'"+p+"' does not match "+value, m.find());
-  }
-
-  @Test public void testQuery() throws Exception {
-    String result = readOutput(new URL(baseUrl, "/jmx?qry=java.lang:type=Runtime"));
-    LOG.info("/jmx?qry=java.lang:type=Runtime RESULT: "+result);
-    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Runtime\"", result);
-    assertReFind("\"modelerType\"", result);
-
-    result = readOutput(new URL(baseUrl, "/jmx?qry=java.lang:type=Memory"));
-    LOG.info("/jmx?qry=java.lang:type=Memory RESULT: "+result);
-    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
-    assertReFind("\"modelerType\"", result);
-
-    result = readOutput(new URL(baseUrl, "/jmx"));
-    LOG.info("/jmx RESULT: "+result);
-    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
-
-    // test to get an attribute of a mbean
-    result = readOutput(new URL(baseUrl,
-        "/jmx?get=java.lang:type=Memory::HeapMemoryUsage"));
-    LOG.info("/jmx RESULT: "+result);
-    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
-    assertReFind("\"committed\"\\s*:", result);
-
-    // negative test to get an attribute of a mbean
-    result = readOutput(new URL(baseUrl,
-        "/jmx?get=java.lang:type=Memory::"));
-    LOG.info("/jmx RESULT: "+result);
-    assertReFind("\"ERROR\"", result);
-
-    // test to get JSONP result
-    result = readOutput(new URL(baseUrl, "/jmx?qry=java.lang:type=Memory&callback=mycallback1"));
-    LOG.info("/jmx?qry=java.lang:type=Memory&callback=mycallback RESULT: "+result);
-    assertReFind("^mycallback1\\(\\{", result);
-    assertReFind("\\}\\);$", result);
-
-    // negative test to get an attribute of a mbean as JSONP
-    result = readOutput(new URL(baseUrl,
-        "/jmx?get=java.lang:type=Memory::&callback=mycallback2"));
-    LOG.info("/jmx RESULT: "+result);
-    assertReFind("^mycallback2\\(\\{", result);
-    assertReFind("\"ERROR\"", result);
-    assertReFind("\\}\\);$", result);
-
-    // test to get an attribute of a mbean as JSONP
-    result = readOutput(new URL(baseUrl,
-        "/jmx?get=java.lang:type=Memory::HeapMemoryUsage&callback=mycallback3"));
-    LOG.info("/jmx RESULT: "+result);
-    assertReFind("^mycallback3\\(\\{", result);
-    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
-    assertReFind("\"committed\"\\s*:", result);
-    assertReFind("\\}\\);$", result);
-
-  }
-
-  @Test
-  public void testDisallowedJSONPCallback() throws Exception {
-    String callback = "function(){alert('bigproblems!')};foo";
-    URL url = new URL(
-        baseUrl, "/jmx?qry=java.lang:type=Memory&callback="+URLEncoder.encode(callback, "UTF-8"));
-    HttpURLConnection cnxn = (HttpURLConnection) url.openConnection();
-    assertEquals(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, cnxn.getResponseCode());
-  }
-
-  @Test
-  public void testUnderscoresInJSONPCallback() throws Exception {
-    String callback = "my_function";
-    URL url = new URL(
-        baseUrl, "/jmx?qry=java.lang:type=Memory&callback="+URLEncoder.encode(callback, "UTF-8"));
-    HttpURLConnection cnxn = (HttpURLConnection) url.openConnection();
-    assertEquals(HttpServletResponse.SC_OK, cnxn.getResponseCode());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java
deleted file mode 100644
index 3adca50..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http.lib;
-
-import static org.junit.Assert.assertEquals;
-import static org.mockito.Mockito.mock;
-
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletRequestWrapper;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.http.ServerConfigurationKeys;
-import org.apache.hadoop.hbase.http.lib.StaticUserWebFilter.StaticUserFilter;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.mockito.ArgumentCaptor;
-import org.mockito.Mockito;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestStaticUserWebFilter {
-  private FilterConfig mockConfig(String username) {
-    FilterConfig mock = Mockito.mock(FilterConfig.class);
-    Mockito.doReturn(username).when(mock).getInitParameter(
-            ServerConfigurationKeys.HBASE_HTTP_STATIC_USER);
-    return mock;
-  }
-
-  @Test
-  public void testFilter() throws Exception {
-    FilterConfig config = mockConfig("myuser");
-    StaticUserFilter suf = new StaticUserFilter();
-    suf.init(config);
-
-    ArgumentCaptor<HttpServletRequestWrapper> wrapperArg =
-      ArgumentCaptor.forClass(HttpServletRequestWrapper.class);
-
-    FilterChain chain = mock(FilterChain.class);
-
-    suf.doFilter(mock(HttpServletRequest.class), mock(ServletResponse.class),
-        chain);
-
-    Mockito.verify(chain).doFilter(wrapperArg.capture(), Mockito.<ServletResponse>anyObject());
-
-    HttpServletRequestWrapper wrapper = wrapperArg.getValue();
-    assertEquals("myuser", wrapper.getUserPrincipal().getName());
-    assertEquals("myuser", wrapper.getRemoteUser());
-
-    suf.destroy();
-  }
-
-  @Test
-  public void testOldStyleConfiguration() {
-    Configuration conf = new Configuration();
-    conf.set("dfs.web.ugi", "joe,group1,group2");
-    assertEquals("joe", StaticUserWebFilter.getUsernameFromConf(conf));
-  }
-
-  @Test
-  public void testConfiguration() {
-    Configuration conf = new Configuration();
-    conf.set(CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER, "dr.stack");
-    assertEquals("dr.stack", StaticUserWebFilter.getUsernameFromConf(conf));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
deleted file mode 100644
index e14e3b4..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/**
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-package org.apache.hadoop.hbase.http.log;
-
-import static org.junit.Assert.assertTrue;
-
-import java.io.*;
-import java.net.*;
-
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.http.HttpServer;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.commons.logging.*;
-import org.apache.commons.logging.impl.*;
-import org.apache.log4j.*;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestLogLevel {
-  static final PrintStream out = System.out;
-
-  @Test (timeout=60000)
-  @SuppressWarnings("deprecation")
-  public void testDynamicLogLevel() throws Exception {
-    String logName = TestLogLevel.class.getName();
-    Log testlog = LogFactory.getLog(logName);
-
-    //only test Log4JLogger
-    if (testlog instanceof Log4JLogger) {
-      Logger log = ((Log4JLogger)testlog).getLogger();
-      log.debug("log.debug1");
-      log.info("log.info1");
-      log.error("log.error1");
-      assertTrue(!Level.ERROR.equals(log.getEffectiveLevel()));
-
-      HttpServer server = null;
-      try {
-        server = new HttpServer.Builder().setName("..")
-            .addEndpoint(new URI("http://localhost:0")).setFindPort(true)
-            .build();
-
-        server.start();
-        String authority = NetUtils.getHostPortString(server
-            .getConnectorAddress(0));
-
-        //servlet
-        URL url =
-            new URL("http://" + authority + "/logLevel?log=" + logName + "&level=" + Level.ERROR);
-        out.println("*** Connecting to " + url);
-        try (BufferedReader in = new BufferedReader(new InputStreamReader(url.openStream()))) {
-          for(String line; (line = in.readLine()) != null; out.println(line));
-        }
-        log.debug("log.debug2");
-        log.info("log.info2");
-        log.error("log.error2");
-        assertTrue(Level.ERROR.equals(log.getEffectiveLevel()));
-
-        //command line
-        String[] args = {"-setlevel", authority, logName, Level.DEBUG.toString()};
-        LogLevel.main(args);
-        log.debug("log.debug3");
-        log.info("log.info3");
-        log.error("log.error3");
-        assertTrue(Level.DEBUG.equals(log.getEffectiveLevel()));
-      } finally {
-        if (server != null) {
-          server.stop();
-        }
-      }
-    }
-    else {
-      out.println(testlog.getClass() + " not tested.");
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
deleted file mode 100644
index bf0e609..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http.resource;
-
-import java.io.IOException;
-import java.util.Map;
-import java.util.TreeMap;
-
-import javax.ws.rs.DefaultValue;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.eclipse.jetty.util.ajax.JSON;
-
-/**
- * A simple Jersey resource class TestHttpServer.
- * The servlet simply puts the path and the op parameter in a map
- * and return it in JSON format in the response.
- */
-@Path("")
-public class JerseyResource {
-  private static final Log LOG = LogFactory.getLog(JerseyResource.class);
-
-  public static final String PATH = "path";
-  public static final String OP = "op";
-
-  @GET
-  @Path("{" + PATH + ":.*}")
-  @Produces({MediaType.APPLICATION_JSON})
-  public Response get(
-      @PathParam(PATH) @DefaultValue("UNKNOWN_" + PATH) final String path,
-      @QueryParam(OP) @DefaultValue("UNKNOWN_" + OP) final String op
-      ) throws IOException {
-    LOG.info("get: " + PATH + "=" + path + ", " + OP + "=" + op);
-
-    final Map<String, Object> m = new TreeMap<>();
-    m.put(PATH, path);
-    m.put(OP, op);
-    final String js = JSON.toString(m);
-    return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java
deleted file mode 100644
index 35d6236..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java
+++ /dev/null
@@ -1,342 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.http.ssl;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.Writer;
-import java.math.BigInteger;
-import java.net.URL;
-import java.security.GeneralSecurityException;
-import java.security.InvalidKeyException;
-import java.security.Key;
-import java.security.KeyPair;
-import java.security.KeyPairGenerator;
-import java.security.KeyStore;
-import java.security.NoSuchAlgorithmException;
-import java.security.NoSuchProviderException;
-import java.security.SecureRandom;
-import java.security.SignatureException;
-import java.security.cert.Certificate;
-import java.security.cert.CertificateEncodingException;
-import java.security.cert.X509Certificate;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
-
-import javax.security.auth.x500.X500Principal;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory;
-import org.apache.hadoop.security.ssl.SSLFactory;
-import org.bouncycastle.x509.X509V1CertificateGenerator;
-
-public class KeyStoreTestUtil {
-
-  public static String getClasspathDir(Class<?> klass) throws Exception {
-    String file = klass.getName();
-    file = file.replace('.', '/') + ".class";
-    URL url = Thread.currentThread().getContextClassLoader().getResource(file);
-    String baseDir = url.toURI().getPath();
-    baseDir = baseDir.substring(0, baseDir.length() - file.length() - 1);
-    return baseDir;
-  }
-
-  /**
-   * Create a self-signed X.509 Certificate.
-   *
-   * @param dn the X.509 Distinguished Name, eg "CN=Test, L=London, C=GB"
-   * @param pair the KeyPair
-   * @param days how many days from now the Certificate is valid for
-   * @param algorithm the signing algorithm, eg "SHA1withRSA"
-   * @return the self-signed certificate
-   */
-  public static X509Certificate generateCertificate(String dn, KeyPair pair, int days, String algorithm)
-      throws CertificateEncodingException, InvalidKeyException, IllegalStateException,
-      NoSuchProviderException, NoSuchAlgorithmException, SignatureException {
-    Date from = new Date();
-    Date to = new Date(from.getTime() + days * 86400000l);
-    BigInteger sn = new BigInteger(64, new SecureRandom());
-    KeyPair keyPair = pair;
-    X509V1CertificateGenerator certGen = new X509V1CertificateGenerator();
-    X500Principal  dnName = new X500Principal(dn);
-
-    certGen.setSerialNumber(sn);
-    certGen.setIssuerDN(dnName);
-    certGen.setNotBefore(from);
-    certGen.setNotAfter(to);
-    certGen.setSubjectDN(dnName);
-    certGen.setPublicKey(keyPair.getPublic());
-    certGen.setSignatureAlgorithm(algorithm);
-    X509Certificate cert = certGen.generate(pair.getPrivate());
-    return cert;
-  }
-
-  public static KeyPair generateKeyPair(String algorithm)
-    throws NoSuchAlgorithmException {
-    KeyPairGenerator keyGen = KeyPairGenerator.getInstance(algorithm);
-    keyGen.initialize(1024);
-    return keyGen.genKeyPair();
-  }
-
-  private static KeyStore createEmptyKeyStore()
-    throws GeneralSecurityException, IOException {
-    KeyStore ks = KeyStore.getInstance("JKS");
-    ks.load(null, null); // initialize
-    return ks;
-  }
-
-  private static void saveKeyStore(KeyStore ks, String filename,
-                                   String password)
-    throws GeneralSecurityException, IOException {
-    FileOutputStream out = new FileOutputStream(filename);
-    try {
-      ks.store(out, password.toCharArray());
-    } finally {
-      out.close();
-    }
-  }
-
-  public static void createKeyStore(String filename,
-                                    String password, String alias,
-                                    Key privateKey, Certificate cert)
-    throws GeneralSecurityException, IOException {
-    KeyStore ks = createEmptyKeyStore();
-    ks.setKeyEntry(alias, privateKey, password.toCharArray(),
-                   new Certificate[]{cert});
-    saveKeyStore(ks, filename, password);
-  }
-
-  /**
-   * Creates a keystore with a single key and saves it to a file.
-   *
-   * @param filename String file to save
-   * @param password String store password to set on keystore
-   * @param keyPassword String key password to set on key
-   * @param alias String alias to use for the key
-   * @param privateKey Key to save in keystore
-   * @param cert Certificate to use as certificate chain associated to key
-   * @throws GeneralSecurityException for any error with the security APIs
-   * @throws IOException if there is an I/O error saving the file
-   */
-  public static void createKeyStore(String filename,
-                                    String password, String keyPassword, String alias,
-                                    Key privateKey, Certificate cert)
-    throws GeneralSecurityException, IOException {
-    KeyStore ks = createEmptyKeyStore();
-    ks.setKeyEntry(alias, privateKey, keyPassword.toCharArray(),
-                   new Certificate[]{cert});
-    saveKeyStore(ks, filename, password);
-  }
-
-  public static void createTrustStore(String filename,
-                                      String password, String alias,
-                                      Certificate cert)
-    throws GeneralSecurityException, IOException {
-    KeyStore ks = createEmptyKeyStore();
-    ks.setCertificateEntry(alias, cert);
-    saveKeyStore(ks, filename, password);
-  }
-
-  public static <T extends Certificate> void createTrustStore(
-    String filename, String password, Map<String, T> certs)
-    throws GeneralSecurityException, IOException {
-    KeyStore ks = createEmptyKeyStore();
-    for (Map.Entry<String, T> cert : certs.entrySet()) {
-      ks.setCertificateEntry(cert.getKey(), cert.getValue());
-    }
-    saveKeyStore(ks, filename, password);
-  }
-
-  public static void cleanupSSLConfig(String keystoresDir, String sslConfDir)
-    throws Exception {
-    File f = new File(keystoresDir + "/clientKS.jks");
-    f.delete();
-    f = new File(keystoresDir + "/serverKS.jks");
-    f.delete();
-    f = new File(keystoresDir + "/trustKS.jks");
-    f.delete();
-    f = new File(sslConfDir + "/ssl-client.xml");
-    f.delete();
-    f = new File(sslConfDir +  "/ssl-server.xml");
-    f.delete();
-  }
-
-  /**
-   * Performs complete setup of SSL configuration in preparation for testing an
-   * SSLFactory.  This includes keys, certs, keystores, truststores, the server
-   * SSL configuration file, the client SSL configuration file, and the master
-   * configuration file read by the SSLFactory.
-   *
-   * @param keystoresDir String directory to save keystores
-   * @param sslConfDir String directory to save SSL configuration files
-   * @param conf Configuration master configuration to be used by an SSLFactory,
-   *   which will be mutated by this method
-   * @param useClientCert boolean true to make the client present a cert in the
-   *   SSL handshake
-   */
-  public static void setupSSLConfig(String keystoresDir, String sslConfDir,
-                                    Configuration conf, boolean useClientCert)
-    throws Exception {
-    String clientKS = keystoresDir + "/clientKS.jks";
-    String clientPassword = "clientP";
-    String serverKS = keystoresDir + "/serverKS.jks";
-    String serverPassword = "serverP";
-    String trustKS = keystoresDir + "/trustKS.jks";
-    String trustPassword = "trustP";
-
-    File sslClientConfFile = new File(sslConfDir + "/ssl-client.xml");
-    File sslServerConfFile = new File(sslConfDir + "/ssl-server.xml");
-
-    Map<String, X509Certificate> certs = new HashMap<>();
-
-    if (useClientCert) {
-      KeyPair cKP = KeyStoreTestUtil.generateKeyPair("RSA");
-      X509Certificate cCert =
-        KeyStoreTestUtil.generateCertificate("CN=localhost, O=client", cKP, 30,
-                                             "SHA1withRSA");
-      KeyStoreTestUtil.createKeyStore(clientKS, clientPassword, "client",
-                                      cKP.getPrivate(), cCert);
-      certs.put("client", cCert);
-    }
-
-    KeyPair sKP = KeyStoreTestUtil.generateKeyPair("RSA");
-    X509Certificate sCert =
-      KeyStoreTestUtil.generateCertificate("CN=localhost, O=server", sKP, 30,
-                                           "SHA1withRSA");
-    KeyStoreTestUtil.createKeyStore(serverKS, serverPassword, "server",
-                                    sKP.getPrivate(), sCert);
-    certs.put("server", sCert);
-
-    KeyStoreTestUtil.createTrustStore(trustKS, trustPassword, certs);
-
-    Configuration clientSSLConf = createClientSSLConfig(clientKS, clientPassword,
-      clientPassword, trustKS);
-    Configuration serverSSLConf = createServerSSLConfig(serverKS, serverPassword,
-      serverPassword, trustKS);
-
-    saveConfig(sslClientConfFile, clientSSLConf);
-    saveConfig(sslServerConfFile, serverSSLConf);
-
-    conf.set(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, "ALLOW_ALL");
-    conf.set(SSLFactory.SSL_CLIENT_CONF_KEY, sslClientConfFile.getName());
-    conf.set(SSLFactory.SSL_SERVER_CONF_KEY, sslServerConfFile.getName());
-    conf.setBoolean(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY, useClientCert);
-  }
-
-  /**
-   * Creates SSL configuration for a client.
-   *
-   * @param clientKS String client keystore file
-   * @param password String store password, or null to avoid setting store
-   *   password
-   * @param keyPassword String key password, or null to avoid setting key
-   *   password
-   * @param trustKS String truststore file
-   * @return Configuration for client SSL
-   */
-  public static Configuration createClientSSLConfig(String clientKS,
-      String password, String keyPassword, String trustKS) {
-    Configuration clientSSLConf = createSSLConfig(SSLFactory.Mode.CLIENT,
-      clientKS, password, keyPassword, trustKS);
-    return clientSSLConf;
-  }
-
-  /**
-   * Creates SSL configuration for a server.
-   *
-   * @param serverKS String server keystore file
-   * @param password String store password, or null to avoid setting store
-   *   password
-   * @param keyPassword String key password, or null to avoid setting key
-   *   password
-   * @param trustKS String truststore file
-   * @return Configuration for server SSL
-   */
-  public static Configuration createServerSSLConfig(String serverKS,
-      String password, String keyPassword, String trustKS) throws IOException {
-    Configuration serverSSLConf = createSSLConfig(SSLFactory.Mode.SERVER,
-      serverKS, password, keyPassword, trustKS);
-    return serverSSLConf;
-  }
-
-  /**
-   * Creates SSL configuration.
-   *
-   * @param mode SSLFactory.Mode mode to configure
-   * @param keystore String keystore file
-   * @param password String store password, or null to avoid setting store
-   *   password
-   * @param keyPassword String key password, or null to avoid setting key
-   *   password
-   * @param trustKS String truststore file
-   * @return Configuration for SSL
-   */
-  private static Configuration createSSLConfig(SSLFactory.Mode mode,
-      String keystore, String password, String keyPassword, String trustKS) {
-    String trustPassword = "trustP";
-
-    Configuration sslConf = new Configuration(false);
-    if (keystore != null) {
-      sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
-        FileBasedKeyStoresFactory.SSL_KEYSTORE_LOCATION_TPL_KEY), keystore);
-    }
-    if (password != null) {
-      sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
-        FileBasedKeyStoresFactory.SSL_KEYSTORE_PASSWORD_TPL_KEY), password);
-    }
-    if (keyPassword != null) {
-      sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
-        FileBasedKeyStoresFactory.SSL_KEYSTORE_KEYPASSWORD_TPL_KEY),
-        keyPassword);
-    }
-    if (trustKS != null) {
-      sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
-        FileBasedKeyStoresFactory.SSL_TRUSTSTORE_LOCATION_TPL_KEY), trustKS);
-    }
-    if (trustPassword != null) {
-      sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
-        FileBasedKeyStoresFactory.SSL_TRUSTSTORE_PASSWORD_TPL_KEY),
-        trustPassword);
-    }
-    sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
-      FileBasedKeyStoresFactory.SSL_TRUSTSTORE_RELOAD_INTERVAL_TPL_KEY), "1000");
-
-    return sslConf;
-  }
-
-  /**
-   * Saves configuration to a file.
-   *
-   * @param file File to save
-   * @param conf Configuration contents to write to file
-   * @throws IOException if there is an I/O error saving the file
-   */
-  public static void saveConfig(File file, Configuration conf)
-      throws IOException {
-    Writer writer = new FileWriter(file);
-    try {
-      conf.writeXml(writer);
-    } finally {
-      writer.close();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/resources/log4j.properties b/hbase-http/src/test/resources/log4j.properties
deleted file mode 100644
index c322699..0000000
--- a/hbase-http/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,68 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hbase.root.logger=INFO,console
-hbase.log.dir=.
-hbase.log.file=hbase.log
-
-# Define the root logger to the system property "hbase.root.logger".
-log4j.rootLogger=${hbase.root.logger}
-
-# Logging Threshold
-log4j.threshold=ALL
-
-#
-# Daily Rolling File Appender
-#
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-# Debugging Pattern format
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
-
-# Custom Logging levels
-
-#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
-
-log4j.logger.org.apache.hadoop=WARN
-log4j.logger.org.apache.zookeeper=ERROR
-log4j.logger.org.apache.hadoop.hbase=DEBUG
-
-#These settings are workarounds against spurious logs from the minicluster.
-#See HBASE-4709
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
-log4j.logger.org.apache.hadoop.metrics2.util.MBeans=WARN
-# Enable this to get detailed connection error/retry logging.
-# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/resources/webapps/static/test.css
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/resources/webapps/static/test.css b/hbase-http/src/test/resources/webapps/static/test.css
deleted file mode 100644
index ae43828..0000000
--- a/hbase-http/src/test/resources/webapps/static/test.css
+++ /dev/null
@@ -1,21 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/**
- * Test CSS file for content type handling - empty, since we just check
- * returned content type!
- */

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/resources/webapps/test/testjsp.jsp
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/resources/webapps/test/testjsp.jsp b/hbase-http/src/test/resources/webapps/test/testjsp.jsp
deleted file mode 100644
index 540adf9..0000000
--- a/hbase-http/src/test/resources/webapps/test/testjsp.jsp
+++ /dev/null
@@ -1,21 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?><%!
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-%>
-<%@ page contentType="text/html; charset=UTF-8" %>
-Hello world!

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
index 360ab9d..c2f6cf6 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
@@ -20,8 +20,10 @@ package org.apache.hadoop.hbase.rest;
 
 import java.lang.management.ManagementFactory;
 import java.util.ArrayList;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 import java.util.EnumSet;
 import java.util.concurrent.ArrayBlockingQueue;
 
@@ -44,7 +46,7 @@ import org.apache.hadoop.hbase.rest.filter.GzipFilter;
 import org.apache.hadoop.hbase.rest.filter.RestCsrfPreventionFilter;
 import org.apache.hadoop.hbase.security.UserProvider;
 import org.apache.hadoop.hbase.util.DNS;
-import org.apache.hadoop.hbase.http.HttpServerUtil;
+import org.apache.hadoop.hbase.util.HttpServerUtil;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.hbase.util.VersionInfo;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
index 4c977fd..0714c7b 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
@@ -24,7 +24,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.security.UserProvider;
-import org.apache.hadoop.hbase.http.HttpServerUtil;
+import org.apache.hadoop.hbase.util.HttpServerUtil;
 import org.apache.hadoop.util.StringUtils;
 
 import org.eclipse.jetty.server.HttpConfiguration;
@@ -99,7 +99,7 @@ public class HBaseRESTTestingUtility {
     // get the port
     testServletPort = ((ServerConnector)server.getConnectors()[0]).getLocalPort();
 
-    LOG.info("started " + server.getClass().getName() + " on port " +
+    LOG.info("started " + server.getClass().getName() + " on port " + 
       testServletPort);
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-server/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index ea02f26..3a025d1 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -360,16 +360,6 @@
       <artifactId>hbase-common</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-http</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-http</artifactId>
-      <type>test-jar</type>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
       <!--Needed by the visiblity tags and acl CPEP things
            in here in hbase-server (that should be out in hbase-endpoints
            or integrated). -->
@@ -470,13 +460,48 @@
     </dependency>
     <dependency>
       <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-util</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-util-ajax</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-jsp</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
       <artifactId>jetty-webapp</artifactId>
     </dependency>
     <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-http</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-security</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-databind</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.glassfish.jersey.containers</groupId>
+      <artifactId>jersey-container-servlet-core</artifactId>
+    </dependency>
+    <dependency>
       <!--For JspC used in ant task-->
       <groupId>org.glassfish.web</groupId>
       <artifactId>javax.servlet.jsp</artifactId>
     </dependency>
+    <!-- Specifically needed for jetty-jsp, included
+         to bypass version scanning that hits a bad repo
+         see HBASE-18831 -->
+    <dependency>
+      <groupId>org.glassfish</groupId>
+      <artifactId>javax.el</artifactId>
+    </dependency>
     <dependency>
       <groupId>org.codehaus.jettison</groupId>
       <artifactId>jettison</artifactId>

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedServlet.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedServlet.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedServlet.java
new file mode 100644
index 0000000..bd8570e
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedServlet.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.yetus.audience.InterfaceStability;
+import org.eclipse.jetty.servlet.DefaultServlet;
+
+/**
+ * General servlet which is admin-authorized.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class AdminAuthorizedServlet extends DefaultServlet {
+
+  private static final long serialVersionUID = 1L;
+
+  @Override
+  protected void doGet(HttpServletRequest request, HttpServletResponse response)
+    throws ServletException, IOException {
+    // Do the authorization
+    if (HttpServer.hasAdministratorAccess(getServletContext(), request,
+        response)) {
+      // Authorization is done. Just call super.
+      super.doGet(request, response);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/ClickjackingPreventionFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/ClickjackingPreventionFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/ClickjackingPreventionFilter.java
new file mode 100644
index 0000000..9944d29
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/ClickjackingPreventionFilter.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+
+@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
+public class ClickjackingPreventionFilter implements Filter {
+
+    private FilterConfig filterConfig;
+
+    @Override
+    public void init(FilterConfig filterConfig) throws ServletException {
+        this.filterConfig = filterConfig;
+    }
+
+    @Override
+    public void doFilter(ServletRequest req, ServletResponse res,
+                         FilterChain chain)
+            throws IOException, ServletException {
+        HttpServletResponse httpRes = (HttpServletResponse) res;
+        httpRes.addHeader("X-Frame-Options", filterConfig.getInitParameter("xframeoptions"));
+        chain.doFilter(req, res);
+    }
+
+    @Override
+    public void destroy() {
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/FilterContainer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/FilterContainer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/FilterContainer.java
new file mode 100644
index 0000000..7a79acc
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/FilterContainer.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import java.util.Map;
+
+/**
+ * A container interface to add javax.servlet.Filter.
+ */
+public interface FilterContainer {
+  /**
+   * Add a filter to the container.
+   * @param name Filter name
+   * @param classname Filter class name
+   * @param parameters a map from parameter names to initial values
+   */
+  void addFilter(String name, String classname, Map<String, String> parameters);
+  /**
+   * Add a global filter to the container - This global filter will be
+   * applied to all available web contexts.
+   * @param name filter name
+   * @param classname filter class name
+   * @param parameters a map from parameter names to initial values
+   */
+  void addGlobalFilter(String name, String classname, Map<String, String> parameters);
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/FilterInitializer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/FilterInitializer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/FilterInitializer.java
new file mode 100644
index 0000000..e033c10
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/FilterInitializer.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * Initialize a javax.servlet.Filter. 
+ */
+public abstract class FilterInitializer {
+  /**
+   * Initialize a Filter to a FilterContainer.
+   * @param container The filter container
+   * @param conf Configuration for run-time parameters
+   */
+  public abstract void initFilter(FilterContainer container, Configuration conf);
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HtmlQuoting.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HtmlQuoting.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HtmlQuoting.java
new file mode 100644
index 0000000..60a74b7
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HtmlQuoting.java
@@ -0,0 +1,215 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+
+/**
+ * This class is responsible for quoting HTML characters.
+ */
+public class HtmlQuoting {
+  private static final byte[] ampBytes = "&amp;".getBytes();
+  private static final byte[] aposBytes = "&apos;".getBytes();
+  private static final byte[] gtBytes = "&gt;".getBytes();
+  private static final byte[] ltBytes = "&lt;".getBytes();
+  private static final byte[] quotBytes = "&quot;".getBytes();
+
+  /**
+   * Does the given string need to be quoted?
+   * @param data the string to check
+   * @param off the starting position
+   * @param len the number of bytes to check
+   * @return does the string contain any of the active html characters?
+   */
+  public static boolean needsQuoting(byte[] data, int off, int len) {
+    if (off+len > data.length) {
+        throw new IllegalStateException("off+len=" + off+len + " should be lower"
+                + " than data length=" + data.length);
+    }
+    for(int i=off; i< off+len; ++i) {
+      switch(data[i]) {
+      case '&':
+      case '<':
+      case '>':
+      case '\'':
+      case '"':
+        return true;
+      default:
+        break;
+      }
+    }
+    return false;
+  }
+
+  /**
+   * Does the given string need to be quoted?
+   * @param str the string to check
+   * @return does the string contain any of the active html characters?
+   */
+  public static boolean needsQuoting(String str) {
+    if (str == null) {
+      return false;
+    }
+    byte[] bytes = str.getBytes();
+    return needsQuoting(bytes, 0 , bytes.length);
+  }
+
+  /**
+   * Quote all of the active HTML characters in the given string as they
+   * are added to the buffer.
+   * @param output the stream to write the output to
+   * @param buffer the byte array to take the characters from
+   * @param off the index of the first byte to quote
+   * @param len the number of bytes to quote
+   */
+  public static void quoteHtmlChars(OutputStream output, byte[] buffer,
+                                    int off, int len) throws IOException {
+    for(int i=off; i < off+len; i++) {
+      switch (buffer[i]) {
+      case '&': output.write(ampBytes); break;
+      case '<': output.write(ltBytes); break;
+      case '>': output.write(gtBytes); break;
+      case '\'': output.write(aposBytes); break;
+      case '"': output.write(quotBytes); break;
+      default: output.write(buffer, i, 1);
+      }
+    }
+  }
+  
+  /**
+   * Quote the given item to make it html-safe.
+   * @param item the string to quote
+   * @return the quoted string
+   */
+  public static String quoteHtmlChars(String item) {
+    if (item == null) {
+      return null;
+    }
+    byte[] bytes = item.getBytes();
+    if (needsQuoting(bytes, 0, bytes.length)) {
+      ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+      try {
+        quoteHtmlChars(buffer, bytes, 0, bytes.length);
+      } catch (IOException ioe) {
+        // Won't happen, since it is a bytearrayoutputstream
+      }
+      return buffer.toString();
+    } else {
+      return item;
+    }
+  }
+
+  /**
+   * Return an output stream that quotes all of the output.
+   * @param out the stream to write the quoted output to
+   * @return a new stream that the application show write to
+   * @throws IOException if the underlying output fails
+   */
+  public static OutputStream quoteOutputStream(final OutputStream out
+                                               ) throws IOException {
+    return new OutputStream() {
+      private byte[] data = new byte[1];
+      @Override
+      public void write(byte[] data, int off, int len) throws IOException {
+        quoteHtmlChars(out, data, off, len);
+      }
+      
+      @Override
+      public void write(int b) throws IOException {
+        data[0] = (byte) b;
+        quoteHtmlChars(out, data, 0, 1);
+      }
+      
+      @Override
+      public void flush() throws IOException {
+        out.flush();
+      }
+      
+      @Override
+      public void close() throws IOException {
+        out.close();
+      }
+    };
+  }
+
+  /**
+   * Remove HTML quoting from a string.
+   * @param item the string to unquote
+   * @return the unquoted string
+   */
+  public static String unquoteHtmlChars(String item) {
+    if (item == null) {
+      return null;
+    }
+    int next = item.indexOf('&');
+    // nothing was quoted
+    if (next == -1) {
+      return item;
+    }
+    int len = item.length();
+    int posn = 0;
+    StringBuilder buffer = new StringBuilder();
+    while (next != -1) {
+      buffer.append(item.substring(posn, next));
+      if (item.startsWith("&amp;", next)) {
+        buffer.append('&');
+        next += 5;
+      } else if (item.startsWith("&apos;", next)) {
+        buffer.append('\'');
+        next += 6;        
+      } else if (item.startsWith("&gt;", next)) {
+        buffer.append('>');
+        next += 4;
+      } else if (item.startsWith("&lt;", next)) {
+        buffer.append('<');
+        next += 4;
+      } else if (item.startsWith("&quot;", next)) {
+        buffer.append('"');
+        next += 6;
+      } else {
+        int end = item.indexOf(';', next)+1;
+        if (end == 0) {
+          end = len;
+        }
+        throw new IllegalArgumentException("Bad HTML quoting for " + 
+                                           item.substring(next,end));
+      }
+      posn = next;
+      next = item.indexOf('&', posn);
+    }
+    buffer.append(item.substring(posn, len));
+    return buffer.toString();
+  }
+  
+  public static void main(String[] args) throws Exception {
+    if (args.length == 0) {
+        throw new IllegalArgumentException("Please provide some arguments");
+    }
+    for(String arg:args) {
+      System.out.println("Original: " + arg);
+      String quoted = quoteHtmlChars(arg);
+      System.out.println("Quoted: "+ quoted);
+      String unquoted = unquoteHtmlChars(quoted);
+      System.out.println("Unquoted: " + unquoted);
+      System.out.println();
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpConfig.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpConfig.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpConfig.java
new file mode 100644
index 0000000..b9dde23
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpConfig.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.yetus.audience.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * Statics to get access to Http related configuration.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class HttpConfig {
+  private Policy policy;
+  public enum Policy {
+    HTTP_ONLY,
+    HTTPS_ONLY,
+    HTTP_AND_HTTPS;
+
+    public Policy fromString(String value) {
+      if (HTTPS_ONLY.name().equalsIgnoreCase(value)) {
+        return HTTPS_ONLY;
+      } else if (HTTP_AND_HTTPS.name().equalsIgnoreCase(value)) {
+        return HTTP_AND_HTTPS;
+      }
+      return HTTP_ONLY;
+    }
+
+    public boolean isHttpEnabled() {
+      return this == HTTP_ONLY || this == HTTP_AND_HTTPS;
+    }
+
+    public boolean isHttpsEnabled() {
+      return this == HTTPS_ONLY || this == HTTP_AND_HTTPS;
+    }
+  }
+
+   public HttpConfig(final Configuration conf) {
+    boolean sslEnabled = conf.getBoolean(
+      ServerConfigurationKeys.HBASE_SSL_ENABLED_KEY,
+      ServerConfigurationKeys.HBASE_SSL_ENABLED_DEFAULT);
+    policy = sslEnabled ? Policy.HTTPS_ONLY : Policy.HTTP_ONLY;
+    if (sslEnabled) {
+      conf.addResource("ssl-server.xml");
+      conf.addResource("ssl-client.xml");
+    }
+  }
+
+  public void setPolicy(Policy policy) {
+    this.policy = policy;
+  }
+
+  public boolean isSecure() {
+    return policy == Policy.HTTPS_ONLY;
+  }
+
+  public String getSchemePrefix() {
+    return (isSecure()) ? "https://" : "http://";
+  }
+
+  public String getScheme(Policy policy) {
+    return policy == Policy.HTTPS_ONLY ? "https://" : "http://";
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java
new file mode 100644
index 0000000..cfc0640
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import java.util.HashMap;
+
+import org.apache.commons.logging.impl.Log4JLogger;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogConfigurationException;
+import org.apache.commons.logging.LogFactory;
+import org.apache.log4j.Appender;
+import org.apache.log4j.Logger;
+
+import org.eclipse.jetty.server.RequestLog;
+import org.eclipse.jetty.server.NCSARequestLog;
+
+/**
+ * RequestLog object for use with Http
+ */
+public class HttpRequestLog {
+
+  private static final Log LOG = LogFactory.getLog(HttpRequestLog.class);
+  private static final HashMap<String, String> serverToComponent;
+
+  static {
+    serverToComponent = new HashMap<>();
+    serverToComponent.put("master", "master");
+    serverToComponent.put("region", "regionserver");
+  }
+
+  public static RequestLog getRequestLog(String name) {
+
+    String lookup = serverToComponent.get(name);
+    if (lookup != null) {
+      name = lookup;
+    }
+    String loggerName = "http.requests." + name;
+    String appenderName = name + "requestlog";
+    Log logger = LogFactory.getLog(loggerName);
+
+    if (logger instanceof Log4JLogger) {
+      Log4JLogger httpLog4JLog = (Log4JLogger)logger;
+      Logger httpLogger = httpLog4JLog.getLogger();
+      Appender appender = null;
+
+      try {
+        appender = httpLogger.getAppender(appenderName);
+      } catch (LogConfigurationException e) {
+        LOG.warn("Http request log for " + loggerName
+            + " could not be created");
+        throw e;
+      }
+
+      if (appender == null) {
+        LOG.info("Http request log for " + loggerName
+            + " is not defined");
+        return null;
+      }
+
+      if (appender instanceof HttpRequestLogAppender) {
+        HttpRequestLogAppender requestLogAppender
+          = (HttpRequestLogAppender)appender;
+        NCSARequestLog requestLog = new NCSARequestLog();
+        requestLog.setFilename(requestLogAppender.getFilename());
+        requestLog.setRetainDays(requestLogAppender.getRetainDays());
+        return requestLog;
+      } else {
+        LOG.warn("Jetty request log for " + loggerName
+            + " was of the wrong class");
+        return null;
+      }
+    }
+    else {
+      LOG.warn("Jetty request log can only be enabled using Log4j");
+      return null;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLogAppender.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLogAppender.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLogAppender.java
new file mode 100644
index 0000000..8039b34
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLogAppender.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import org.apache.log4j.spi.LoggingEvent;
+import org.apache.log4j.AppenderSkeleton;
+
+/**
+ * Log4j Appender adapter for HttpRequestLog
+ */
+public class HttpRequestLogAppender extends AppenderSkeleton {
+
+  private String filename;
+  private int retainDays;
+
+  public HttpRequestLogAppender() {
+  }
+
+  public void setRetainDays(int retainDays) {
+    this.retainDays = retainDays;
+  }
+
+  public int getRetainDays() {
+    return retainDays;
+  }
+
+  public void setFilename(String filename) {
+    this.filename = filename;
+  }
+
+  public String getFilename() {
+    return filename;
+  }
+
+  @Override
+  public void append(LoggingEvent event) {
+  }
+
+  @Override
+  public void close() {
+      // Do nothing, we don't have close() on AppenderSkeleton.
+  }
+
+  @Override
+  public boolean requiresLayout() {
+    return false;
+  }
+}