You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2014/06/11 22:55:42 UTC

[2/4] HBASE-10336 Remove deprecated usage of Hadoop HttpServer in InfoServer (Eric Charles)

http://git-wip-us.apache.org/repos/asf/hbase/blob/26096f36/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
new file mode 100644
index 0000000..b92eabc
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
@@ -0,0 +1,613 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.mock;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.net.HttpURLConnection;
+import java.net.URI;
+import java.net.URL;
+import java.util.Arrays;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.SortedSet;
+import java.util.TreeSet;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.Executor;
+import java.util.concurrent.Executors;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletRequestWrapper;
+import javax.servlet.http.HttpServletResponse;
+
+import junit.framework.Assert;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.http.HttpServer.QuotingInputFilter.RequestQuoter;
+import org.apache.hadoop.hbase.http.resource.JerseyResource;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.Groups;
+import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.AccessControlList;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.mockito.Mockito;
+import org.mockito.internal.util.reflection.Whitebox;
+import org.mortbay.jetty.Connector;
+import org.mortbay.util.ajax.JSON;
+
+@Category(SmallTests.class)
+public class TestHttpServer extends HttpServerFunctionalTest {
+  static final Log LOG = LogFactory.getLog(TestHttpServer.class);
+  private static HttpServer server;
+  private static URL baseUrl;
+  private static final int MAX_THREADS = 10;
+  
+  @SuppressWarnings("serial")
+  public static class EchoMapServlet extends HttpServlet {
+    @SuppressWarnings("unchecked")
+    @Override
+    public void doGet(HttpServletRequest request, 
+                      HttpServletResponse response
+                      ) throws ServletException, IOException {
+      PrintWriter out = response.getWriter();
+      Map<String, String[]> params = request.getParameterMap();
+      SortedSet<String> keys = new TreeSet<String>(params.keySet());
+      for(String key: keys) {
+        out.print(key);
+        out.print(':');
+        String[] values = params.get(key);
+        if (values.length > 0) {
+          out.print(values[0]);
+          for(int i=1; i < values.length; ++i) {
+            out.print(',');
+            out.print(values[i]);
+          }
+        }
+        out.print('\n');
+      }
+      out.close();
+    }    
+  }
+
+  @SuppressWarnings("serial")
+  public static class EchoServlet extends HttpServlet {
+    @SuppressWarnings("unchecked")
+    @Override
+    public void doGet(HttpServletRequest request, 
+                      HttpServletResponse response
+                      ) throws ServletException, IOException {
+      PrintWriter out = response.getWriter();
+      SortedSet<String> sortedKeys = new TreeSet<String>();
+      Enumeration<String> keys = request.getParameterNames();
+      while(keys.hasMoreElements()) {
+        sortedKeys.add(keys.nextElement());
+      }
+      for(String key: sortedKeys) {
+        out.print(key);
+        out.print(':');
+        out.print(request.getParameter(key));
+        out.print('\n');
+      }
+      out.close();
+    }    
+  }
+
+  @SuppressWarnings("serial")
+  public static class LongHeaderServlet extends HttpServlet {
+    @Override
+    public void doGet(HttpServletRequest request,
+                      HttpServletResponse response
+    ) throws ServletException, IOException {
+      Assert.assertEquals(63 * 1024, request.getHeader("longheader").length());
+      response.setStatus(HttpServletResponse.SC_OK);
+    }
+  }
+
+  @SuppressWarnings("serial")
+  public static class HtmlContentServlet extends HttpServlet {
+    @Override
+    public void doGet(HttpServletRequest request, 
+                      HttpServletResponse response
+                      ) throws ServletException, IOException {
+      response.setContentType("text/html");
+      PrintWriter out = response.getWriter();
+      out.print("hello world");
+      out.close();
+    }
+  }
+
+  @BeforeClass public static void setup() throws Exception {
+    Configuration conf = new Configuration();
+    conf.setInt(HttpServer.HTTP_MAX_THREADS, 10);
+    server = createTestServer(conf);
+    server.addServlet("echo", "/echo", EchoServlet.class);
+    server.addServlet("echomap", "/echomap", EchoMapServlet.class);
+    server.addServlet("htmlcontent", "/htmlcontent", HtmlContentServlet.class);
+    server.addServlet("longheader", "/longheader", LongHeaderServlet.class);
+    server.addJerseyResourcePackage(
+        JerseyResource.class.getPackage().getName(), "/jersey/*");
+    server.start();
+    baseUrl = getServerURL(server);
+    LOG.info("HTTP server started: "+ baseUrl);
+  }
+  
+  @AfterClass public static void cleanup() throws Exception {
+    server.stop();
+  }
+  
+  /** Test the maximum number of threads cannot be exceeded. */
+  @Test public void testMaxThreads() throws Exception {
+    int clientThreads = MAX_THREADS * 10;
+    Executor executor = Executors.newFixedThreadPool(clientThreads);
+    // Run many clients to make server reach its maximum number of threads
+    final CountDownLatch ready = new CountDownLatch(clientThreads);
+    final CountDownLatch start = new CountDownLatch(1);
+    for (int i = 0; i < clientThreads; i++) {
+      executor.execute(new Runnable() {
+        @Override
+        public void run() {
+          ready.countDown();
+          try {
+            start.await();
+            assertEquals("a:b\nc:d\n",
+                         readOutput(new URL(baseUrl, "/echo?a=b&c=d")));
+            int serverThreads = server.webServer.getThreadPool().getThreads();
+            assertTrue("More threads are started than expected, Server Threads count: "
+                    + serverThreads, serverThreads <= MAX_THREADS);
+            System.out.println("Number of threads = " + serverThreads +
+                " which is less or equal than the max = " + MAX_THREADS);
+          } catch (Exception e) {
+            // do nothing
+          }
+        }
+      });
+    }
+    // Start the client threads when they are all ready
+    ready.await();
+    start.countDown();
+  }
+  
+  @Test public void testEcho() throws Exception {
+    assertEquals("a:b\nc:d\n", 
+                 readOutput(new URL(baseUrl, "/echo?a=b&c=d")));
+    assertEquals("a:b\nc&lt;:d\ne:&gt;\n", 
+                 readOutput(new URL(baseUrl, "/echo?a=b&c<=d&e=>")));    
+  }
+  
+  /** Test the echo map servlet that uses getParameterMap. */
+  @Test public void testEchoMap() throws Exception {
+    assertEquals("a:b\nc:d\n", 
+                 readOutput(new URL(baseUrl, "/echomap?a=b&c=d")));
+    assertEquals("a:b,&gt;\nc&lt;:d\n", 
+                 readOutput(new URL(baseUrl, "/echomap?a=b&c<=d&a=>")));
+  }
+
+  /** 
+   *  Test that verifies headers can be up to 64K long. 
+   *  The test adds a 63K header leaving 1K for other headers.
+   *  This is because the header buffer setting is for ALL headers,
+   *  names and values included. */
+  @Test public void testLongHeader() throws Exception {
+    URL url = new URL(baseUrl, "/longheader");
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    StringBuilder sb = new StringBuilder();
+    for (int i = 0 ; i < 63 * 1024; i++) {
+      sb.append("a");
+    }
+    conn.setRequestProperty("longheader", sb.toString());
+    assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
+  }
+
+  @Test
+  @Ignore
+  public void testContentTypes() throws Exception {
+    // Static CSS files should have text/css
+    URL cssUrl = new URL(baseUrl, "/static/test.css");
+    HttpURLConnection conn = (HttpURLConnection)cssUrl.openConnection();
+    conn.connect();
+    assertEquals(200, conn.getResponseCode());
+    assertEquals("text/css", conn.getContentType());
+
+    // Servlets should have text/plain with proper encoding by default
+    URL servletUrl = new URL(baseUrl, "/echo?a=b");
+    conn = (HttpURLConnection)servletUrl.openConnection();
+    conn.connect();
+    assertEquals(200, conn.getResponseCode());
+    assertEquals("text/plain; charset=utf-8", conn.getContentType());
+
+    // We should ignore parameters for mime types - ie a parameter
+    // ending in .css should not change mime type
+    servletUrl = new URL(baseUrl, "/echo?a=b.css");
+    conn = (HttpURLConnection)servletUrl.openConnection();
+    conn.connect();
+    assertEquals(200, conn.getResponseCode());
+    assertEquals("text/plain; charset=utf-8", conn.getContentType());
+
+    // Servlets that specify text/html should get that content type
+    servletUrl = new URL(baseUrl, "/htmlcontent");
+    conn = (HttpURLConnection)servletUrl.openConnection();
+    conn.connect();
+    assertEquals(200, conn.getResponseCode());
+    assertEquals("text/html; charset=utf-8", conn.getContentType());
+
+    // JSPs should default to text/html with utf8
+    servletUrl = new URL(baseUrl, "/testjsp.jsp");
+    conn = (HttpURLConnection)servletUrl.openConnection();
+    conn.connect();
+    assertEquals(200, conn.getResponseCode());
+    assertEquals("text/html; charset=utf-8", conn.getContentType());
+  }
+
+  /**
+   * Dummy filter that mimics as an authentication filter. Obtains user identity
+   * from the request parameter user.name. Wraps around the request so that
+   * request.getRemoteUser() returns the user identity.
+   * 
+   */
+  public static class DummyServletFilter implements Filter {
+    @Override
+    public void destroy() { }
+
+    @Override
+    public void doFilter(ServletRequest request, ServletResponse response,
+        FilterChain filterChain) throws IOException, ServletException {
+      final String userName = request.getParameter("user.name");
+      ServletRequest requestModified =
+        new HttpServletRequestWrapper((HttpServletRequest) request) {
+        @Override
+        public String getRemoteUser() {
+          return userName;
+        }
+      };
+      filterChain.doFilter(requestModified, response);
+    }
+
+    @Override
+    public void init(FilterConfig arg0) throws ServletException { }
+  }
+
+  /**
+   * FilterInitializer that initialized the DummyFilter.
+   *
+   */
+  public static class DummyFilterInitializer extends FilterInitializer {
+    public DummyFilterInitializer() {
+    }
+
+    @Override
+    public void initFilter(FilterContainer container, Configuration conf) {
+      container.addFilter("DummyFilter", DummyServletFilter.class.getName(), null);
+    }
+  }
+
+  /**
+   * Access a URL and get the corresponding return Http status code. The URL
+   * will be accessed as the passed user, by sending user.name request
+   * parameter.
+   * 
+   * @param urlstring
+   * @param userName
+   * @return
+   * @throws IOException
+   */
+  static int getHttpStatusCode(String urlstring, String userName)
+      throws IOException {
+    URL url = new URL(urlstring + "?user.name=" + userName);
+    System.out.println("Accessing " + url + " as user " + userName);
+    HttpURLConnection connection = (HttpURLConnection)url.openConnection();
+    connection.connect();
+    return connection.getResponseCode();
+  }
+
+  /**
+   * Custom user->group mapping service.
+   */
+  public static class MyGroupsProvider extends ShellBasedUnixGroupsMapping {
+    static Map<String, List<String>> mapping = new HashMap<String, List<String>>();
+
+    static void clearMapping() {
+      mapping.clear();
+    }
+
+    @Override
+    public List<String> getGroups(String user) throws IOException {
+      return mapping.get(user);
+    }
+  }
+
+  /**
+   * Verify the access for /logs, /stacks, /conf, /logLevel and /metrics
+   * servlets, when authentication filters are set, but authorization is not
+   * enabled.
+   * @throws Exception 
+   */
+  @Test
+  @Ignore
+  public void testDisabledAuthorizationOfDefaultServlets() throws Exception {
+
+    Configuration conf = new Configuration();
+
+    // Authorization is disabled by default
+    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
+        DummyFilterInitializer.class.getName());
+    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
+        MyGroupsProvider.class.getName());
+    Groups.getUserToGroupsMappingService(conf);
+    MyGroupsProvider.clearMapping();
+    MyGroupsProvider.mapping.put("userA", Arrays.asList("groupA"));
+    MyGroupsProvider.mapping.put("userB", Arrays.asList("groupB"));
+
+    HttpServer myServer = new HttpServer.Builder().setName("test")
+        .addEndpoint(new URI("http://localhost:0")).setFindPort(true).build();
+    myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
+    myServer.start();
+    String serverURL = "http://" + NetUtils.getHostPortString(myServer.getConnectorAddress(0)) + "/";
+    for (String servlet : new String[] { "conf", "logs", "stacks",
+        "logLevel", "metrics" }) {
+      for (String user : new String[] { "userA", "userB" }) {
+        assertEquals(HttpURLConnection.HTTP_OK, getHttpStatusCode(serverURL
+            + servlet, user));
+      }
+    }
+    myServer.stop();
+  }
+
+  /**
+   * Verify the administrator access for /logs, /stacks, /conf, /logLevel and
+   * /metrics servlets.
+   * 
+   * @throws Exception
+   */
+  @Test
+  @Ignore
+  public void testAuthorizationOfDefaultServlets() throws Exception {
+    Configuration conf = new Configuration();
+    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
+        true);
+    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN,
+        true);
+    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
+        DummyFilterInitializer.class.getName());
+
+    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
+        MyGroupsProvider.class.getName());
+    Groups.getUserToGroupsMappingService(conf);
+    MyGroupsProvider.clearMapping();
+    MyGroupsProvider.mapping.put("userA", Arrays.asList("groupA"));
+    MyGroupsProvider.mapping.put("userB", Arrays.asList("groupB"));
+    MyGroupsProvider.mapping.put("userC", Arrays.asList("groupC"));
+    MyGroupsProvider.mapping.put("userD", Arrays.asList("groupD"));
+    MyGroupsProvider.mapping.put("userE", Arrays.asList("groupE"));
+
+    HttpServer myServer = new HttpServer.Builder().setName("test")
+        .addEndpoint(new URI("http://localhost:0")).setFindPort(true).setConf(conf)
+        .setACL(new AccessControlList("userA,userB groupC,groupD")).build();
+    myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
+    myServer.start();
+
+    String serverURL = "http://"
+        + NetUtils.getHostPortString(myServer.getConnectorAddress(0)) + "/";
+    for (String servlet : new String[] { "conf", "logs", "stacks",
+        "logLevel", "metrics" }) {
+      for (String user : new String[] { "userA", "userB", "userC", "userD" }) {
+        assertEquals(HttpURLConnection.HTTP_OK, getHttpStatusCode(serverURL
+            + servlet, user));
+      }
+      assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, getHttpStatusCode(
+          serverURL + servlet, "userE"));
+    }
+    myServer.stop();
+  }
+  
+  @Test
+  public void testRequestQuoterWithNull() throws Exception {
+    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+    Mockito.doReturn(null).when(request).getParameterValues("dummy");
+    RequestQuoter requestQuoter = new RequestQuoter(request);
+    String[] parameterValues = requestQuoter.getParameterValues("dummy");
+    Assert.assertEquals("It should return null "
+        + "when there are no values for the parameter", null, parameterValues);
+  }
+
+  @Test
+  public void testRequestQuoterWithNotNull() throws Exception {
+    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+    String[] values = new String[] { "abc", "def" };
+    Mockito.doReturn(values).when(request).getParameterValues("dummy");
+    RequestQuoter requestQuoter = new RequestQuoter(request);
+    String[] parameterValues = requestQuoter.getParameterValues("dummy");
+    Assert.assertTrue("It should return Parameter Values", Arrays.equals(
+        values, parameterValues));
+  }
+
+  @SuppressWarnings("unchecked")
+  private static Map<String, Object> parse(String jsonString) {
+    return (Map<String, Object>)JSON.parse(jsonString);
+  }
+
+  @Test public void testJersey() throws Exception {
+    LOG.info("BEGIN testJersey()");
+    final String js = readOutput(new URL(baseUrl, "/jersey/foo?op=bar"));
+    final Map<String, Object> m = parse(js);
+    LOG.info("m=" + m);
+    assertEquals("foo", m.get(JerseyResource.PATH));
+    assertEquals("bar", m.get(JerseyResource.OP));
+    LOG.info("END testJersey()");
+  }
+
+  @Test
+  public void testHasAdministratorAccess() throws Exception {
+    Configuration conf = new Configuration();
+    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false);
+    ServletContext context = Mockito.mock(ServletContext.class);
+    Mockito.when(context.getAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE)).thenReturn(conf);
+    Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(null);
+    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+    Mockito.when(request.getRemoteUser()).thenReturn(null);
+    HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
+
+    //authorization OFF
+    Assert.assertTrue(HttpServer.hasAdministratorAccess(context, request, response));
+
+    //authorization ON & user NULL
+    response = Mockito.mock(HttpServletResponse.class);
+    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true);
+    Assert.assertFalse(HttpServer.hasAdministratorAccess(context, request, response));
+    Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
+
+    //authorization ON & user NOT NULL & ACLs NULL
+    response = Mockito.mock(HttpServletResponse.class);
+    Mockito.when(request.getRemoteUser()).thenReturn("foo");
+    Assert.assertTrue(HttpServer.hasAdministratorAccess(context, request, response));
+
+    //authorization ON & user NOT NULL & ACLs NOT NULL & user not in ACLs
+    response = Mockito.mock(HttpServletResponse.class);
+    AccessControlList acls = Mockito.mock(AccessControlList.class);
+    Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(false);
+    Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls);
+    Assert.assertFalse(HttpServer.hasAdministratorAccess(context, request, response));
+    Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
+
+    //authorization ON & user NOT NULL & ACLs NOT NULL & user in in ACLs
+    response = Mockito.mock(HttpServletResponse.class);
+    Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(true);
+    Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls);
+    Assert.assertTrue(HttpServer.hasAdministratorAccess(context, request, response));
+
+  }
+
+  @Test
+  public void testRequiresAuthorizationAccess() throws Exception {
+    Configuration conf = new Configuration();
+    ServletContext context = Mockito.mock(ServletContext.class);
+    Mockito.when(context.getAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE)).thenReturn(conf);
+    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+    HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
+
+    //requires admin access to instrumentation, FALSE by default
+    Assert.assertTrue(HttpServer.isInstrumentationAccessAllowed(context, request, response));
+
+    //requires admin access to instrumentation, TRUE
+    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN, true);
+    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true);
+    AccessControlList acls = Mockito.mock(AccessControlList.class);
+    Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(false);
+    Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls);
+    Assert.assertFalse(HttpServer.isInstrumentationAccessAllowed(context, request, response));
+  }
+
+  @Test public void testBindAddress() throws Exception {
+    checkBindAddress("localhost", 0, false).stop();
+    // hang onto this one for a bit more testing
+    HttpServer myServer = checkBindAddress("localhost", 0, false);
+    HttpServer myServer2 = null;
+    try { 
+      int port = myServer.getConnectorAddress(0).getPort();
+      // it's already in use, true = expect a higher port
+      myServer2 = checkBindAddress("localhost", port, true);
+      // try to reuse the port
+      port = myServer2.getConnectorAddress(0).getPort();
+      myServer2.stop();
+      assertNull(myServer2.getConnectorAddress(0)); // not bound
+      myServer2.openListeners();
+      assertEquals(port, myServer2.getConnectorAddress(0).getPort()); // expect same port
+    } finally {
+      myServer.stop();
+      if (myServer2 != null) {
+        myServer2.stop();
+      }
+    }
+  }
+  
+  private HttpServer checkBindAddress(String host, int port, boolean findPort)
+      throws Exception {
+    HttpServer server = createServer(host, port);
+    try {
+      // not bound, ephemeral should return requested port (0 for ephemeral)
+      List<?> listeners = (List<?>) Whitebox.getInternalState(server,
+          "listeners");
+      Connector listener = (Connector) Whitebox.getInternalState(
+          listeners.get(0), "listener");
+
+      assertEquals(port, listener.getPort());
+      // verify hostname is what was given
+      server.openListeners();
+      assertEquals(host, server.getConnectorAddress(0).getHostName());
+
+      int boundPort = server.getConnectorAddress(0).getPort();
+      if (port == 0) {
+        assertTrue(boundPort != 0); // ephemeral should now return bound port
+      } else if (findPort) {
+        assertTrue(boundPort > port);
+        // allow a little wiggle room to prevent random test failures if
+        // some consecutive ports are already in use
+        assertTrue(boundPort - port < 8);
+      }
+    } catch (Exception e) {
+      server.stop();
+      throw e;
+    }
+    return server;
+  }
+
+  @Test
+  public void testNoCacheHeader() throws Exception {
+    URL url = new URL(baseUrl, "/echo?a=b&c=d");
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
+    assertEquals("no-cache", conn.getHeaderField("Cache-Control"));
+    assertEquals("no-cache", conn.getHeaderField("Pragma"));
+    assertNotNull(conn.getHeaderField("Expires"));
+    assertNotNull(conn.getHeaderField("Date"));
+    assertEquals(conn.getHeaderField("Expires"), conn.getHeaderField("Date"));
+  }
+
+  /**
+   * HTTPServer.Builder should proceed if a external connector is available.
+   */
+  @Test
+  public void testHttpServerBuilderWithExternalConnector() throws Exception {
+    Connector c = mock(Connector.class);
+    doReturn("localhost").when(c).getHost();
+    HttpServer s = new HttpServer.Builder().setName("test").setConnector(c)
+        .build();
+    s.stop();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/26096f36/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java
new file mode 100644
index 0000000..2d139e1
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java
@@ -0,0 +1,153 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.http.HttpRequestLogAppender;
+import org.apache.hadoop.hbase.http.HttpServer;
+import org.apache.log4j.Logger;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(SmallTests.class)
+public class TestHttpServerLifecycle extends HttpServerFunctionalTest {
+
+  /**
+   * Check that a server is alive by probing the {@link HttpServer#isAlive()} method
+   * and the text of its toString() description
+   * @param server server
+   */
+  private void assertAlive(HttpServer server) {
+    assertTrue("Server is not alive", server.isAlive());
+    assertToStringContains(server, HttpServer.STATE_DESCRIPTION_ALIVE);
+  }
+
+  private void assertNotLive(HttpServer server) {
+    assertTrue("Server should not be live", !server.isAlive());
+    assertToStringContains(server, HttpServer.STATE_DESCRIPTION_NOT_LIVE);
+  }
+
+  /**
+   * Test that the server is alive once started
+   *
+   * @throws Throwable on failure
+   */
+  @Test public void testCreatedServerIsNotAlive() throws Throwable {
+    HttpServer server = createTestServer();
+    assertNotLive(server);
+  }
+
+  @Test public void testStopUnstartedServer() throws Throwable {
+    HttpServer server = createTestServer();
+    stop(server);
+  }
+
+  /**
+   * Test that the server is alive once started
+   *
+   * @throws Throwable on failure
+   */
+  @Test
+  public void testStartedServerIsAlive() throws Throwable {
+    HttpServer server = null;
+    server = createTestServer();
+    assertNotLive(server);
+    server.start();
+    assertAlive(server);
+    stop(server);
+  }
+
+  /**
+   * Test that the server with request logging enabled
+   *
+   * @throws Throwable on failure
+   */
+  @Test
+  public void testStartedServerWithRequestLog() throws Throwable {
+    HttpRequestLogAppender requestLogAppender = new HttpRequestLogAppender();
+    requestLogAppender.setName("httprequestlog");
+    requestLogAppender.setFilename(System.getProperty("test.build.data", "/tmp/")
+        + "jetty-name-yyyy_mm_dd.log");
+    Logger.getLogger(HttpServer.class.getName() + ".test").addAppender(requestLogAppender);
+    HttpServer server = null;
+    server = createTestServer();
+    assertNotLive(server);
+    server.start();
+    assertAlive(server);
+    stop(server);
+    Logger.getLogger(HttpServer.class.getName() + ".test").removeAppender(requestLogAppender);
+  }
+
+  /**
+   * Assert that the result of {@link HttpServer#toString()} contains the specific text
+   * @param server server to examine
+   * @param text text to search for
+   */
+  private void assertToStringContains(HttpServer server, String text) {
+    String description = server.toString();
+    assertTrue("Did not find \"" + text + "\" in \"" + description + "\"",
+               description.contains(text));
+  }
+
+  /**
+   * Test that the server is not alive once stopped
+   *
+   * @throws Throwable on failure
+   */
+  @Test public void testStoppedServerIsNotAlive() throws Throwable {
+    HttpServer server = createAndStartTestServer();
+    assertAlive(server);
+    stop(server);
+    assertNotLive(server);
+  }
+
+  /**
+   * Test that the server is not alive once stopped
+   *
+   * @throws Throwable on failure
+   */
+  @Test public void testStoppingTwiceServerIsAllowed() throws Throwable {
+    HttpServer server = createAndStartTestServer();
+    assertAlive(server);
+    stop(server);
+    assertNotLive(server);
+    stop(server);
+    assertNotLive(server);
+  }
+
+  /**
+   * Test that the server is alive once started
+   * 
+   * @throws Throwable
+   *           on failure
+   */
+  @Test
+  public void testWepAppContextAfterServerStop() throws Throwable {
+    HttpServer server = null;
+    String key = "test.attribute.key";
+    String value = "test.attribute.value";
+    server = createTestServer();
+    assertNotLive(server);
+    server.start();
+    server.setAttribute(key, value);
+    assertAlive(server);
+    assertEquals(value, server.getAttribute(key));
+    stop(server);
+    assertNull("Server context should have cleared", server.getAttribute(key));
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/26096f36/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
new file mode 100644
index 0000000..ce6da2b
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.Log;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.http.HttpServer;
+
+import java.io.FileNotFoundException;
+
+/**
+ * Test webapp loading
+ */
+@Category(SmallTests.class)
+public class TestHttpServerWebapps extends HttpServerFunctionalTest {
+  private static final Log log = LogFactory.getLog(TestHttpServerWebapps.class);
+
+  /**
+   * Test that the test server is loadable on the classpath
+   * @throws Throwable if something went wrong
+   */
+  @Test
+  public void testValidServerResource() throws Throwable {
+    HttpServer server = null;
+    try {
+      server = createServer("test");
+    } finally {
+      stop(server);
+    }
+  }
+
+  /**
+   * Test that an invalid webapp triggers an exception
+   * @throws Throwable if something went wrong
+   */
+  @Test
+  public void testMissingServerResource() throws Throwable {
+    try {
+      HttpServer server = createServer("NoSuchWebapp");
+      //should not have got here.
+      //close the server
+      String serverDescription = server.toString();
+      stop(server);
+      fail("Expected an exception, got " + serverDescription);
+    } catch (FileNotFoundException expected) {
+      log.debug("Expected exception " + expected, expected);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/26096f36/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
new file mode 100644
index 0000000..9fade20
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
@@ -0,0 +1,157 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.URL;
+import java.net.URLConnection;
+import java.util.Set;
+import java.util.TreeSet;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.http.FilterContainer;
+import org.apache.hadoop.hbase.http.FilterInitializer;
+import org.apache.hadoop.hbase.http.HttpServer;
+import org.apache.hadoop.net.NetUtils;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(SmallTests.class)
+public class TestPathFilter extends HttpServerFunctionalTest {
+  static final Log LOG = LogFactory.getLog(HttpServer.class);
+  static final Set<String> RECORDS = new TreeSet<String>(); 
+
+  /** A very simple filter that records accessed uri's */
+  static public class RecordingFilter implements Filter {
+    private FilterConfig filterConfig = null;
+
+    @Override
+    public void init(FilterConfig filterConfig) {
+      this.filterConfig = filterConfig;
+    }
+
+    @Override
+    public void destroy() {
+      this.filterConfig = null;
+    }
+
+    @Override
+    public void doFilter(ServletRequest request, ServletResponse response,
+        FilterChain chain) throws IOException, ServletException {
+      if (filterConfig == null)
+         return;
+
+      String uri = ((HttpServletRequest)request).getRequestURI();
+      LOG.info("filtering " + uri);
+      RECORDS.add(uri);
+      chain.doFilter(request, response);
+    }
+
+    /** Configuration for RecordingFilter */
+    static public class Initializer extends FilterInitializer {
+      public Initializer() {}
+
+      @Override
+      public void initFilter(FilterContainer container, Configuration conf) {
+        container.addFilter("recording", RecordingFilter.class.getName(), null);
+      }
+    }
+  }
+  
+  
+  /** access a url, ignoring some IOException such as the page does not exist */
+  static void access(String urlstring) throws IOException {
+    LOG.warn("access " + urlstring);
+    URL url = new URL(urlstring);
+    
+    URLConnection connection = url.openConnection();
+    connection.connect();
+    
+    try {
+      BufferedReader in = new BufferedReader(new InputStreamReader(
+          connection.getInputStream()));
+      try {
+        for(; in.readLine() != null; );
+      } finally {
+        in.close();
+      }
+    } catch(IOException ioe) {
+      LOG.warn("urlstring=" + urlstring, ioe);
+    }
+  }
+
+  @Test
+  public void testPathSpecFilters() throws Exception {
+    Configuration conf = new Configuration();
+    
+    //start a http server with CountingFilter
+    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
+        RecordingFilter.Initializer.class.getName());
+    String[] pathSpecs = { "/path", "/path/*" };
+    HttpServer http = createTestServer(conf, pathSpecs);
+    http.start();
+
+    final String baseURL = "/path";
+    final String baseSlashURL = "/path/";
+    final String addedURL = "/path/nodes";
+    final String addedSlashURL = "/path/nodes/";
+    final String longURL = "/path/nodes/foo/job";
+    final String rootURL = "/";
+    final String allURL = "/*";
+
+    final String[] filteredUrls = {baseURL, baseSlashURL, addedURL, 
+        addedSlashURL, longURL};
+    final String[] notFilteredUrls = {rootURL, allURL};
+
+    // access the urls and verify our paths specs got added to the 
+    // filters
+    final String prefix = "http://"
+        + NetUtils.getHostPortString(http.getConnectorAddress(0));
+    try {
+      for(int i = 0; i < filteredUrls.length; i++) {
+        access(prefix + filteredUrls[i]);
+      }
+      for(int i = 0; i < notFilteredUrls.length; i++) {
+        access(prefix + notFilteredUrls[i]);
+      }
+    } finally {
+      http.stop();
+    }
+
+    LOG.info("RECORDS = " + RECORDS);
+    
+    //verify records
+    for(int i = 0; i < filteredUrls.length; i++) {
+      assertTrue(RECORDS.remove(filteredUrls[i]));
+    }
+    assertTrue(RECORDS.isEmpty());
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/26096f36/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
new file mode 100644
index 0000000..8244ba9
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.InputStream;
+import java.net.URI;
+import java.net.URL;
+
+import javax.net.ssl.HttpsURLConnection;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.ssl.SSLFactory;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * This testcase issues SSL certificates configures the HttpServer to serve
+ * HTTPS using the created certficates and calls an echo servlet using the
+ * corresponding HTTPS URL.
+ */
+@Category(SmallTests.class)
+public class TestSSLHttpServer extends HttpServerFunctionalTest {
+  private static final String BASEDIR = System.getProperty("test.build.dir",
+      "target/test-dir") + "/" + TestSSLHttpServer.class.getSimpleName();
+
+  private static final Log LOG = LogFactory.getLog(TestSSLHttpServer.class);
+  private static Configuration conf;
+  private static HttpServer server;
+  private static URL baseUrl;
+  private static String keystoresDir;
+  private static String sslConfDir;
+  private static SSLFactory clientSslFactory;
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    conf = new Configuration();
+    conf.setInt(HttpServer.HTTP_MAX_THREADS, 10);
+
+    File base = new File(BASEDIR);
+    FileUtil.fullyDelete(base);
+    base.mkdirs();
+    keystoresDir = new File(BASEDIR).getAbsolutePath();
+    sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class);
+
+    KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
+    Configuration sslConf = new Configuration(false);
+    sslConf.addResource("ssl-server.xml");
+    sslConf.addResource("ssl-client.xml");
+
+    clientSslFactory = new SSLFactory(SSLFactory.Mode.CLIENT, sslConf);
+    clientSslFactory.init();
+
+    server = new HttpServer.Builder()
+        .setName("test")
+        .addEndpoint(new URI("https://localhost"))
+        .setConf(conf)
+        .keyPassword(sslConf.get("ssl.server.keystore.keypassword"))
+        .keyStore(sslConf.get("ssl.server.keystore.location"),
+            sslConf.get("ssl.server.keystore.password"),
+            sslConf.get("ssl.server.keystore.type", "jks"))
+        .trustStore(sslConf.get("ssl.server.truststore.location"),
+            sslConf.get("ssl.server.truststore.password"),
+            sslConf.get("ssl.server.truststore.type", "jks")).build();
+    server.addServlet("echo", "/echo", TestHttpServer.EchoServlet.class);
+    server.start();
+    baseUrl = new URL("https://"
+        + NetUtils.getHostPortString(server.getConnectorAddress(0)));
+    LOG.info("HTTP server started: " + baseUrl);
+  }
+
+  @AfterClass
+  public static void cleanup() throws Exception {
+    server.stop();
+    FileUtil.fullyDelete(new File(BASEDIR));
+    KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, sslConfDir);
+    clientSslFactory.destroy();
+  }
+
+  @Test
+  public void testEcho() throws Exception {
+    assertEquals("a:b\nc:d\n", readOut(new URL(baseUrl, "/echo?a=b&c=d")));
+    assertEquals("a:b\nc&lt;:d\ne:&gt;\n", readOut(new URL(baseUrl,
+        "/echo?a=b&c<=d&e=>")));
+  }
+
+  private static String readOut(URL url) throws Exception {
+    HttpsURLConnection conn = (HttpsURLConnection) url.openConnection();
+    conn.setSSLSocketFactory(clientSslFactory.createSSLSocketFactory());
+    InputStream in = conn.getInputStream();
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    IOUtils.copyBytes(in, out, 1024);
+    return out.toString();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/26096f36/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
new file mode 100644
index 0000000..9864827
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
@@ -0,0 +1,204 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.URL;
+import java.net.URLConnection;
+import java.util.Random;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.GenericTestUtils;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.net.NetUtils;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(SmallTests.class)
+public class TestServletFilter extends HttpServerFunctionalTest {
+  static final Log LOG = LogFactory.getLog(HttpServer.class);
+  static volatile String uri = null; 
+
+  /** A very simple filter which record the uri filtered. */
+  static public class SimpleFilter implements Filter {
+    private FilterConfig filterConfig = null;
+
+    @Override
+    public void init(FilterConfig filterConfig) throws ServletException {
+      this.filterConfig = filterConfig;
+    }
+
+    @Override
+    public void destroy() {
+      this.filterConfig = null;
+    }
+
+    @Override
+    public void doFilter(ServletRequest request, ServletResponse response,
+        FilterChain chain) throws IOException, ServletException {
+      if (filterConfig == null)
+         return;
+
+      uri = ((HttpServletRequest)request).getRequestURI();
+      LOG.info("filtering " + uri);
+      chain.doFilter(request, response);
+    }
+
+    /** Configuration for the filter */
+    static public class Initializer extends FilterInitializer {
+      public Initializer() {}
+
+      @Override
+      public void initFilter(FilterContainer container, Configuration conf) {
+        container.addFilter("simple", SimpleFilter.class.getName(), null);
+      }
+    }
+  }
+  
+  
+  /** access a url, ignoring some IOException such as the page does not exist */
+  static void access(String urlstring) throws IOException {
+    LOG.warn("access " + urlstring);
+    URL url = new URL(urlstring);
+    URLConnection connection = url.openConnection();
+    connection.connect();
+    
+    try {
+      BufferedReader in = new BufferedReader(new InputStreamReader(
+          connection.getInputStream()));
+      try {
+        for(; in.readLine() != null; );
+      } finally {
+        in.close();
+      }
+    } catch(IOException ioe) {
+      LOG.warn("urlstring=" + urlstring, ioe);
+    }
+  }
+
+  @Test
+  public void testServletFilter() throws Exception {
+    Configuration conf = new Configuration();
+    
+    //start a http server with CountingFilter
+    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
+        SimpleFilter.Initializer.class.getName());
+    HttpServer http = createTestServer(conf);
+    http.start();
+
+    final String fsckURL = "/fsck";
+    final String stacksURL = "/stacks";
+    final String ajspURL = "/a.jsp";
+    final String logURL = "/logs/a.log";
+    final String hadooplogoURL = "/static/hadoop-logo.jpg";
+    
+    final String[] urls = {fsckURL, stacksURL, ajspURL, logURL, hadooplogoURL};
+    final Random ran = new Random();
+    final int[] sequence = new int[50];
+
+    //generate a random sequence and update counts 
+    for(int i = 0; i < sequence.length; i++) {
+      sequence[i] = ran.nextInt(urls.length);
+    }
+
+    //access the urls as the sequence
+    final String prefix = "http://"
+        + NetUtils.getHostPortString(http.getConnectorAddress(0));
+    try {
+      for(int i = 0; i < sequence.length; i++) {
+        access(prefix + urls[sequence[i]]);
+
+        //make sure everything except fsck get filtered
+        if (sequence[i] == 0) {
+          assertEquals(null, uri);
+        } else {
+          assertEquals(urls[sequence[i]], uri);
+          uri = null;
+        }
+      }
+    } finally {
+      http.stop();
+    }
+  }
+  
+  static public class ErrorFilter extends SimpleFilter {
+    @Override
+    public void init(FilterConfig arg0) throws ServletException {
+      throw new ServletException("Throwing the exception from Filter init");
+    }
+
+    /** Configuration for the filter */
+    static public class Initializer extends FilterInitializer {
+      public Initializer() {
+      }
+
+      @Override
+      public void initFilter(FilterContainer container, Configuration conf) {
+        container.addFilter("simple", ErrorFilter.class.getName(), null);
+      }
+    }
+  }
+
+  @Test
+  public void testServletFilterWhenInitThrowsException() throws Exception {
+    Configuration conf = new Configuration();
+    // start a http server with ErrorFilter
+    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
+        ErrorFilter.Initializer.class.getName());
+    HttpServer http = createTestServer(conf);
+    try {
+      http.start();
+      fail("expecting exception");
+    } catch (IOException e) {
+      assertTrue( e.getMessage().contains("Problem in starting http server. Server handlers failed"));
+    }
+  }
+  
+  /**
+   * Similar to the above test case, except that it uses a different API to add the
+   * filter. Regression test for HADOOP-8786.
+   */
+  @Test
+  public void testContextSpecificServletFilterWhenInitThrowsException()
+      throws Exception {
+    Configuration conf = new Configuration();
+    HttpServer http = createTestServer(conf);
+    HttpServer.defineFilter(http.webAppContext,
+        "ErrorFilter", ErrorFilter.class.getName(),
+        null, null);
+    try {
+      http.start();
+      fail("expecting exception");
+    } catch (IOException e) {
+      GenericTestUtils.assertExceptionContains("Unable to initialize WebAppContext", e);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/26096f36/hbase-server/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java
new file mode 100644
index 0000000..f805cd6
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java
@@ -0,0 +1,116 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http.conf;
+
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.util.Map;
+
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.http.conf.ConfServlet;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.mortbay.util.ajax.JSON;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+import org.xml.sax.InputSource;
+
+/**
+ * Basic test case that the ConfServlet can write configuration
+ * to its output in XML and JSON format.
+ */
+@Category(SmallTests.class)
+public class TestConfServlet extends TestCase {
+  private static final String TEST_KEY = "testconfservlet.key";
+  private static final String TEST_VAL = "testval";
+
+  private Configuration getTestConf() {
+    Configuration testConf = new Configuration();
+    testConf.set(TEST_KEY, TEST_VAL);
+    return testConf;
+  }
+
+  @Test
+  @SuppressWarnings("unchecked")
+  public void testWriteJson() throws Exception {
+    StringWriter sw = new StringWriter();
+    ConfServlet.writeResponse(getTestConf(), sw, "json");
+    String json = sw.toString();
+    boolean foundSetting = false;
+    Object parsed = JSON.parse(json);
+    Object[] properties = ((Map<String, Object[]>)parsed).get("properties");
+    for (Object o : properties) {
+      Map<String, Object> propertyInfo = (Map<String, Object>)o;
+      String key = (String)propertyInfo.get("key");
+      String val = (String)propertyInfo.get("value");
+      String resource = (String)propertyInfo.get("resource");
+      System.err.println("k: " + key + " v: " + val + " r: " + resource);
+      if (TEST_KEY.equals(key) && TEST_VAL.equals(val)
+          && "programatically".equals(resource)) {
+        foundSetting = true;
+      }
+    }
+    assertTrue(foundSetting);
+  }
+
+  @Test
+  public void testWriteXml() throws Exception {
+    StringWriter sw = new StringWriter();
+    ConfServlet.writeResponse(getTestConf(), sw, "xml");
+    String xml = sw.toString();
+
+    DocumentBuilderFactory docBuilderFactory 
+      = DocumentBuilderFactory.newInstance();
+    DocumentBuilder builder = docBuilderFactory.newDocumentBuilder();
+    Document doc = builder.parse(new InputSource(new StringReader(xml)));
+    NodeList nameNodes = doc.getElementsByTagName("name");
+    boolean foundSetting = false;
+    for (int i = 0; i < nameNodes.getLength(); i++) {
+      Node nameNode = nameNodes.item(i);
+      String key = nameNode.getTextContent();
+      System.err.println("xml key: " + key);
+      if (TEST_KEY.equals(key)) {
+        foundSetting = true;
+        Element propertyElem = (Element)nameNode.getParentNode();
+        String val = propertyElem.getElementsByTagName("value").item(0).getTextContent();
+        assertEquals(TEST_VAL, val);
+      }
+    }
+    assertTrue(foundSetting);
+  }
+
+  @Test
+  public void testBadFormat() throws Exception {
+    StringWriter sw = new StringWriter();
+    try {
+      ConfServlet.writeResponse(getTestConf(), sw, "not a format");
+      fail("writeResponse with bad format didn't throw!");
+    } catch (ConfServlet.BadFormatException bfe) {
+      // expected
+    }
+    assertEquals("", sw.toString());
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/26096f36/hbase-server/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java
new file mode 100644
index 0000000..b95feba
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java
@@ -0,0 +1,108 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.http.jmx;
+
+import java.net.URL;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.http.HttpServer;
+import org.apache.hadoop.hbase.http.HttpServerFunctionalTest;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(SmallTests.class)
+public class TestJMXJsonServlet extends HttpServerFunctionalTest {
+  private   static final Log LOG = LogFactory.getLog(TestJMXJsonServlet.class);
+  private static HttpServer server;
+  private static URL baseUrl;
+
+  @BeforeClass public static void setup() throws Exception {
+    server = createTestServer();
+    server.start();
+    baseUrl = getServerURL(server);
+  }
+  
+  @AfterClass public static void cleanup() throws Exception {
+    server.stop();
+  }
+  
+  public static void assertReFind(String re, String value) {
+    Pattern p = Pattern.compile(re);
+    Matcher m = p.matcher(value);
+    assertTrue("'"+p+"' does not match "+value, m.find());
+  }
+  
+  @Test public void testQuery() throws Exception {
+    String result = readOutput(new URL(baseUrl, "/jmx?qry=java.lang:type=Runtime"));
+    LOG.info("/jmx?qry=java.lang:type=Runtime RESULT: "+result);
+    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Runtime\"", result);
+    assertReFind("\"modelerType\"", result);
+    
+    result = readOutput(new URL(baseUrl, "/jmx?qry=java.lang:type=Memory"));
+    LOG.info("/jmx?qry=java.lang:type=Memory RESULT: "+result);
+    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
+    assertReFind("\"modelerType\"", result);
+    
+    result = readOutput(new URL(baseUrl, "/jmx"));
+    LOG.info("/jmx RESULT: "+result);
+    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
+    
+    // test to get an attribute of a mbean
+    result = readOutput(new URL(baseUrl, 
+        "/jmx?get=java.lang:type=Memory::HeapMemoryUsage"));
+    LOG.info("/jmx RESULT: "+result);
+    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
+    assertReFind("\"committed\"\\s*:", result);
+    
+    // negative test to get an attribute of a mbean
+    result = readOutput(new URL(baseUrl, 
+        "/jmx?get=java.lang:type=Memory::"));
+    LOG.info("/jmx RESULT: "+result);
+    assertReFind("\"ERROR\"", result);
+
+    // test to get JSONP result
+    result = readOutput(new URL(baseUrl, "/jmx?qry=java.lang:type=Memory&callback=mycallback1"));
+    LOG.info("/jmx?qry=java.lang:type=Memory&callback=mycallback RESULT: "+result);
+    assertReFind("^mycallback1\\(\\{", result);
+    assertReFind("\\}\\);$", result);
+
+    // negative test to get an attribute of a mbean as JSONP
+    result = readOutput(new URL(baseUrl,
+        "/jmx?get=java.lang:type=Memory::&callback=mycallback2"));
+    LOG.info("/jmx RESULT: "+result);
+    assertReFind("^mycallback2\\(\\{", result);
+    assertReFind("\"ERROR\"", result);
+    assertReFind("\\}\\);$", result);
+
+    // test to get an attribute of a mbean as JSONP
+    result = readOutput(new URL(baseUrl,
+        "/jmx?get=java.lang:type=Memory::HeapMemoryUsage&callback=mycallback3"));
+    LOG.info("/jmx RESULT: "+result);
+    assertReFind("^mycallback3\\(\\{", result);
+    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
+    assertReFind("\"committed\"\\s*:", result);
+    assertReFind("\\}\\);$", result);
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/26096f36/hbase-server/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java
new file mode 100644
index 0000000..998c852
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http.lib;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.mock;
+
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletRequestWrapper;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.http.ServerConfigurationKeys;
+import org.apache.hadoop.hbase.http.lib.StaticUserWebFilter.StaticUserFilter;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Mockito;
+
+@Category(SmallTests.class)
+public class TestStaticUserWebFilter {
+  private FilterConfig mockConfig(String username) {
+    FilterConfig mock = Mockito.mock(FilterConfig.class);
+    Mockito.doReturn(username).when(mock).getInitParameter(
+            ServerConfigurationKeys.HBASE_HTTP_STATIC_USER);
+    return mock;
+  }
+  
+  @Test
+  public void testFilter() throws Exception {
+    FilterConfig config = mockConfig("myuser");
+    StaticUserFilter suf = new StaticUserFilter();
+    suf.init(config);
+    
+    ArgumentCaptor<HttpServletRequestWrapper> wrapperArg =
+      ArgumentCaptor.forClass(HttpServletRequestWrapper.class);
+
+    FilterChain chain = mock(FilterChain.class);
+    
+    suf.doFilter(mock(HttpServletRequest.class), mock(ServletResponse.class),
+        chain);
+        
+    Mockito.verify(chain).doFilter(wrapperArg.capture(), Mockito.<ServletResponse>anyObject());
+    
+    HttpServletRequestWrapper wrapper = wrapperArg.getValue();
+    assertEquals("myuser", wrapper.getUserPrincipal().getName());
+    assertEquals("myuser", wrapper.getRemoteUser());
+    
+    suf.destroy();
+  }
+  
+  @Test
+  public void testOldStyleConfiguration() {
+    Configuration conf = new Configuration();
+    conf.set("dfs.web.ugi", "joe,group1,group2");
+    assertEquals("joe", StaticUserWebFilter.getUsernameFromConf(conf));
+  }
+
+  @Test
+  public void testConfiguration() {
+    Configuration conf = new Configuration();
+    conf.set(CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER, "dr.stack");
+    assertEquals("dr.stack", StaticUserWebFilter.getUsernameFromConf(conf));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/26096f36/hbase-server/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
new file mode 100644
index 0000000..3ca23d0
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
@@ -0,0 +1,85 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.apache.hadoop.hbase.http.log;
+
+import java.io.*;
+import java.net.*;
+
+import org.apache.hadoop.hbase.http.HttpServer;
+import org.apache.hadoop.hbase.http.log.LogLevel;
+import org.apache.hadoop.net.NetUtils;
+
+import junit.framework.TestCase;
+
+import org.apache.commons.logging.*;
+import org.apache.commons.logging.impl.*;
+import org.apache.log4j.*;
+
+public class TestLogLevel extends TestCase {
+  static final PrintStream out = System.out;
+
+  public void testDynamicLogLevel() throws Exception {
+    String logName = TestLogLevel.class.getName();
+    Log testlog = LogFactory.getLog(logName);
+
+    //only test Log4JLogger
+    if (testlog instanceof Log4JLogger) {
+      Logger log = ((Log4JLogger)testlog).getLogger();
+      log.debug("log.debug1");
+      log.info("log.info1");
+      log.error("log.error1");
+      assertTrue(!Level.ERROR.equals(log.getEffectiveLevel()));
+
+      HttpServer server = new HttpServer.Builder().setName("..")
+          .addEndpoint(new URI("http://localhost:0")).setFindPort(true)
+          .build();
+      
+      server.start();
+      String authority = NetUtils.getHostPortString(server
+          .getConnectorAddress(0));
+
+      //servlet
+      URL url = new URL("http://" + authority + "/logLevel?log=" + logName
+          + "&level=" + Level.ERROR);
+      out.println("*** Connecting to " + url);
+      URLConnection connection = url.openConnection();
+      connection.connect();
+
+      BufferedReader in = new BufferedReader(new InputStreamReader(
+          connection.getInputStream()));
+      for(String line; (line = in.readLine()) != null; out.println(line));
+      in.close();
+
+      log.debug("log.debug2");
+      log.info("log.info2");
+      log.error("log.error2");
+      assertTrue(Level.ERROR.equals(log.getEffectiveLevel()));
+
+      //command line
+      String[] args = {"-setlevel", authority, logName, Level.DEBUG.toString()};
+      LogLevel.main(args);
+      log.debug("log.debug3");
+      log.info("log.info3");
+      log.error("log.error3");
+      assertTrue(Level.DEBUG.equals(log.getEffectiveLevel()));
+    }
+    else {
+      out.println(testlog.getClass() + " not tested.");
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/26096f36/hbase-server/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
new file mode 100644
index 0000000..e38ae0f
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http.resource;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.TreeMap;
+
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.mortbay.util.ajax.JSON;
+
+/**
+ * A simple Jersey resource class TestHttpServer.
+ * The servlet simply puts the path and the op parameter in a map
+ * and return it in JSON format in the response.
+ */
+@Path("")
+public class JerseyResource {
+  static final Log LOG = LogFactory.getLog(JerseyResource.class);
+
+  public static final String PATH = "path";
+  public static final String OP = "op";
+
+  @GET
+  @Path("{" + PATH + ":.*}")
+  @Produces({MediaType.APPLICATION_JSON})
+  public Response get(
+      @PathParam(PATH) @DefaultValue("UNKNOWN_" + PATH) final String path,
+      @QueryParam(OP) @DefaultValue("UNKNOWN_" + OP) final String op
+      ) throws IOException {
+    LOG.info("get: " + PATH + "=" + path + ", " + OP + "=" + op);
+
+    final Map<String, Object> m = new TreeMap<String, Object>();
+    m.put(PATH, path);
+    m.put(OP, op);
+    final String js = JSON.toString(m);
+    return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/26096f36/hbase-server/src/test/java/org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java
new file mode 100644
index 0000000..248b820
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java
@@ -0,0 +1,365 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.http.ssl;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.Writer;
+import java.math.BigInteger;
+import java.net.URL;
+import java.security.GeneralSecurityException;
+import java.security.Key;
+import java.security.KeyPair;
+import java.security.KeyPairGenerator;
+import java.security.KeyStore;
+import java.security.NoSuchAlgorithmException;
+import java.security.PrivateKey;
+import java.security.SecureRandom;
+import java.security.cert.Certificate;
+import java.security.cert.X509Certificate;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory;
+import org.apache.hadoop.security.ssl.SSLFactory;
+
+import sun.security.x509.AlgorithmId;
+import sun.security.x509.CertificateAlgorithmId;
+import sun.security.x509.CertificateIssuerName;
+import sun.security.x509.CertificateSerialNumber;
+import sun.security.x509.CertificateSubjectName;
+import sun.security.x509.CertificateValidity;
+import sun.security.x509.CertificateVersion;
+import sun.security.x509.CertificateX509Key;
+import sun.security.x509.X500Name;
+import sun.security.x509.X509CertImpl;
+import sun.security.x509.X509CertInfo;
+
+public class KeyStoreTestUtil {
+
+  public static String getClasspathDir(Class klass) throws Exception {
+    String file = klass.getName();
+    file = file.replace('.', '/') + ".class";
+    URL url = Thread.currentThread().getContextClassLoader().getResource(file);
+    String baseDir = url.toURI().getPath();
+    baseDir = baseDir.substring(0, baseDir.length() - file.length() - 1);
+    return baseDir;
+  }
+
+  /**
+   * Create a self-signed X.509 Certificate.
+   * From http://bfo.com/blog/2011/03/08/odds_and_ends_creating_a_new_x_509_certificate.html.
+   *
+   * @param dn the X.509 Distinguished Name, eg "CN=Test, L=London, C=GB"
+   * @param pair the KeyPair
+   * @param days how many days from now the Certificate is valid for
+   * @param algorithm the signing algorithm, eg "SHA1withRSA"
+   * @return the self-signed certificate
+   * @throws IOException thrown if an IO error ocurred.
+   * @throws GeneralSecurityException thrown if an Security error ocurred.
+   */
+  public static X509Certificate generateCertificate(String dn, KeyPair pair,
+                                                    int days, String algorithm)
+    throws GeneralSecurityException, IOException {
+    PrivateKey privkey = pair.getPrivate();
+    X509CertInfo info = new X509CertInfo();
+    Date from = new Date();
+    Date to = new Date(from.getTime() + days * 86400000l);
+    CertificateValidity interval = new CertificateValidity(from, to);
+    BigInteger sn = new BigInteger(64, new SecureRandom());
+    X500Name owner = new X500Name(dn);
+
+    info.set(X509CertInfo.VALIDITY, interval);
+    info.set(X509CertInfo.SERIAL_NUMBER, new CertificateSerialNumber(sn));
+    info.set(X509CertInfo.SUBJECT, new CertificateSubjectName(owner));
+    info.set(X509CertInfo.ISSUER, new CertificateIssuerName(owner));
+    info.set(X509CertInfo.KEY, new CertificateX509Key(pair.getPublic()));
+    info
+      .set(X509CertInfo.VERSION, new CertificateVersion(CertificateVersion.V3));
+    AlgorithmId algo = new AlgorithmId(AlgorithmId.md5WithRSAEncryption_oid);
+    info.set(X509CertInfo.ALGORITHM_ID, new CertificateAlgorithmId(algo));
+
+    // Sign the cert to identify the algorithm that's used.
+    X509CertImpl cert = new X509CertImpl(info);
+    cert.sign(privkey, algorithm);
+
+    // Update the algorith, and resign.
+    algo = (AlgorithmId) cert.get(X509CertImpl.SIG_ALG);
+    info
+      .set(CertificateAlgorithmId.NAME + "." + CertificateAlgorithmId.ALGORITHM,
+           algo);
+    cert = new X509CertImpl(info);
+    cert.sign(privkey, algorithm);
+    return cert;
+  }
+
+  public static KeyPair generateKeyPair(String algorithm)
+    throws NoSuchAlgorithmException {
+    KeyPairGenerator keyGen = KeyPairGenerator.getInstance(algorithm);
+    keyGen.initialize(1024);
+    return keyGen.genKeyPair();
+  }
+
+  private static KeyStore createEmptyKeyStore()
+    throws GeneralSecurityException, IOException {
+    KeyStore ks = KeyStore.getInstance("JKS");
+    ks.load(null, null); // initialize
+    return ks;
+  }
+
+  private static void saveKeyStore(KeyStore ks, String filename,
+                                   String password)
+    throws GeneralSecurityException, IOException {
+    FileOutputStream out = new FileOutputStream(filename);
+    try {
+      ks.store(out, password.toCharArray());
+    } finally {
+      out.close();
+    }
+  }
+
+  public static void createKeyStore(String filename,
+                                    String password, String alias,
+                                    Key privateKey, Certificate cert)
+    throws GeneralSecurityException, IOException {
+    KeyStore ks = createEmptyKeyStore();
+    ks.setKeyEntry(alias, privateKey, password.toCharArray(),
+                   new Certificate[]{cert});
+    saveKeyStore(ks, filename, password);
+  }
+
+  /**
+   * Creates a keystore with a single key and saves it to a file.
+   * 
+   * @param filename String file to save
+   * @param password String store password to set on keystore
+   * @param keyPassword String key password to set on key
+   * @param alias String alias to use for the key
+   * @param privateKey Key to save in keystore
+   * @param cert Certificate to use as certificate chain associated to key
+   * @throws GeneralSecurityException for any error with the security APIs
+   * @throws IOException if there is an I/O error saving the file
+   */
+  public static void createKeyStore(String filename,
+                                    String password, String keyPassword, String alias,
+                                    Key privateKey, Certificate cert)
+    throws GeneralSecurityException, IOException {
+    KeyStore ks = createEmptyKeyStore();
+    ks.setKeyEntry(alias, privateKey, keyPassword.toCharArray(),
+                   new Certificate[]{cert});
+    saveKeyStore(ks, filename, password);
+  }
+
+  public static void createTrustStore(String filename,
+                                      String password, String alias,
+                                      Certificate cert)
+    throws GeneralSecurityException, IOException {
+    KeyStore ks = createEmptyKeyStore();
+    ks.setCertificateEntry(alias, cert);
+    saveKeyStore(ks, filename, password);
+  }
+
+  public static <T extends Certificate> void createTrustStore(
+    String filename, String password, Map<String, T> certs)
+    throws GeneralSecurityException, IOException {
+    KeyStore ks = createEmptyKeyStore();
+    for (Map.Entry<String, T> cert : certs.entrySet()) {
+      ks.setCertificateEntry(cert.getKey(), cert.getValue());
+    }
+    saveKeyStore(ks, filename, password);
+  }
+
+  public static void cleanupSSLConfig(String keystoresDir, String sslConfDir)
+    throws Exception {
+    File f = new File(keystoresDir + "/clientKS.jks");
+    f.delete();
+    f = new File(keystoresDir + "/serverKS.jks");
+    f.delete();
+    f = new File(keystoresDir + "/trustKS.jks");
+    f.delete();
+    f = new File(sslConfDir + "/ssl-client.xml");
+    f.delete();
+    f = new File(sslConfDir +  "/ssl-server.xml");
+    f.delete();
+  }
+
+  /**
+   * Performs complete setup of SSL configuration in preparation for testing an
+   * SSLFactory.  This includes keys, certs, keystores, truststores, the server
+   * SSL configuration file, the client SSL configuration file, and the master
+   * configuration file read by the SSLFactory.
+   * 
+   * @param keystoresDir String directory to save keystores
+   * @param sslConfDir String directory to save SSL configuration files
+   * @param conf Configuration master configuration to be used by an SSLFactory,
+   *   which will be mutated by this method
+   * @param useClientCert boolean true to make the client present a cert in the
+   *   SSL handshake
+   */
+  public static void setupSSLConfig(String keystoresDir, String sslConfDir,
+                                    Configuration conf, boolean useClientCert)
+    throws Exception {
+    String clientKS = keystoresDir + "/clientKS.jks";
+    String clientPassword = "clientP";
+    String serverKS = keystoresDir + "/serverKS.jks";
+    String serverPassword = "serverP";
+    String trustKS = keystoresDir + "/trustKS.jks";
+    String trustPassword = "trustP";
+
+    File sslClientConfFile = new File(sslConfDir + "/ssl-client.xml");
+    File sslServerConfFile = new File(sslConfDir + "/ssl-server.xml");
+
+    Map<String, X509Certificate> certs = new HashMap<String, X509Certificate>();
+
+    if (useClientCert) {
+      KeyPair cKP = KeyStoreTestUtil.generateKeyPair("RSA");
+      X509Certificate cCert =
+        KeyStoreTestUtil.generateCertificate("CN=localhost, O=client", cKP, 30,
+                                             "SHA1withRSA");
+      KeyStoreTestUtil.createKeyStore(clientKS, clientPassword, "client",
+                                      cKP.getPrivate(), cCert);
+      certs.put("client", cCert);
+    }
+
+    KeyPair sKP = KeyStoreTestUtil.generateKeyPair("RSA");
+    X509Certificate sCert =
+      KeyStoreTestUtil.generateCertificate("CN=localhost, O=server", sKP, 30,
+                                           "SHA1withRSA");
+    KeyStoreTestUtil.createKeyStore(serverKS, serverPassword, "server",
+                                    sKP.getPrivate(), sCert);
+    certs.put("server", sCert);
+
+    KeyStoreTestUtil.createTrustStore(trustKS, trustPassword, certs);
+
+    Configuration clientSSLConf = createClientSSLConfig(clientKS, clientPassword,
+      clientPassword, trustKS);
+    Configuration serverSSLConf = createServerSSLConfig(serverKS, serverPassword,
+      serverPassword, trustKS);
+
+    saveConfig(sslClientConfFile, clientSSLConf);
+    saveConfig(sslServerConfFile, serverSSLConf);
+
+    conf.set(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, "ALLOW_ALL");
+    conf.set(SSLFactory.SSL_CLIENT_CONF_KEY, sslClientConfFile.getName());
+    conf.set(SSLFactory.SSL_SERVER_CONF_KEY, sslServerConfFile.getName());
+    conf.setBoolean(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY, useClientCert);
+  }
+
+  /**
+   * Creates SSL configuration for a client.
+   * 
+   * @param clientKS String client keystore file
+   * @param password String store password, or null to avoid setting store
+   *   password
+   * @param keyPassword String key password, or null to avoid setting key
+   *   password
+   * @param trustKS String truststore file
+   * @return Configuration for client SSL
+   */
+  public static Configuration createClientSSLConfig(String clientKS,
+      String password, String keyPassword, String trustKS) {
+    Configuration clientSSLConf = createSSLConfig(SSLFactory.Mode.CLIENT,
+      clientKS, password, keyPassword, trustKS);
+    return clientSSLConf;
+  }
+
+  /**
+   * Creates SSL configuration for a server.
+   * 
+   * @param serverKS String server keystore file
+   * @param password String store password, or null to avoid setting store
+   *   password
+   * @param keyPassword String key password, or null to avoid setting key
+   *   password
+   * @param trustKS String truststore file
+   * @return Configuration for server SSL
+   */
+  public static Configuration createServerSSLConfig(String serverKS,
+      String password, String keyPassword, String trustKS) throws IOException {
+    Configuration serverSSLConf = createSSLConfig(SSLFactory.Mode.SERVER,
+      serverKS, password, keyPassword, trustKS);
+    return serverSSLConf;
+  }
+
+  /**
+   * Creates SSL configuration.
+   * 
+   * @param mode SSLFactory.Mode mode to configure
+   * @param keystore String keystore file
+   * @param password String store password, or null to avoid setting store
+   *   password
+   * @param keyPassword String key password, or null to avoid setting key
+   *   password
+   * @param trustKS String truststore file
+   * @return Configuration for SSL
+   */
+  private static Configuration createSSLConfig(SSLFactory.Mode mode,
+      String keystore, String password, String keyPassword, String trustKS) {
+    String trustPassword = "trustP";
+
+    Configuration sslConf = new Configuration(false);
+    if (keystore != null) {
+      sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+        FileBasedKeyStoresFactory.SSL_KEYSTORE_LOCATION_TPL_KEY), keystore);
+    }
+    if (password != null) {
+      sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+        FileBasedKeyStoresFactory.SSL_KEYSTORE_PASSWORD_TPL_KEY), password);
+    }
+    if (keyPassword != null) {
+      sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+        FileBasedKeyStoresFactory.SSL_KEYSTORE_KEYPASSWORD_TPL_KEY),
+        keyPassword);
+    }
+    if (trustKS != null) {
+      sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+        FileBasedKeyStoresFactory.SSL_TRUSTSTORE_LOCATION_TPL_KEY), trustKS);
+    }
+    if (trustPassword != null) {
+      sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+        FileBasedKeyStoresFactory.SSL_TRUSTSTORE_PASSWORD_TPL_KEY),
+        trustPassword);
+    }
+    sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+      FileBasedKeyStoresFactory.SSL_TRUSTSTORE_RELOAD_INTERVAL_TPL_KEY), "1000");
+
+    return sslConf;
+  }
+
+  /**
+   * Saves configuration to a file.
+   * 
+   * @param file File to save
+   * @param conf Configuration contents to write to file
+   * @throws IOException if there is an I/O error saving the file
+   */
+  public static void saveConfig(File file, Configuration conf)
+      throws IOException {
+    Writer writer = new FileWriter(file);
+    try {
+      conf.writeXml(writer);
+    } finally {
+      writer.close();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/26096f36/hbase-server/src/test/resources/webapps/static/test.css
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/resources/webapps/static/test.css b/hbase-server/src/test/resources/webapps/static/test.css
new file mode 100644
index 0000000..ae43828
--- /dev/null
+++ b/hbase-server/src/test/resources/webapps/static/test.css
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * Test CSS file for content type handling - empty, since we just check
+ * returned content type!
+ */