You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by bu...@apache.org on 2017/11/01 22:13:57 UTC

[12/15] hbase git commit: Revert "HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module"

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
deleted file mode 100644
index e23eecd..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
+++ /dev/null
@@ -1,175 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http.log;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.PrintWriter;
-import java.net.URL;
-import java.net.URLConnection;
-import java.util.regex.Pattern;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.impl.Jdk14Logger;
-import org.apache.commons.logging.impl.Log4JLogger;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
-import org.apache.hadoop.hbase.http.HttpServer;
-import org.apache.hadoop.util.ServletUtil;
-
-/**
- * Change log level in runtime.
- */
-@InterfaceStability.Evolving
-public class LogLevel {
-  public static final String USAGES = "\nUsage: General options are:\n"
-      + "\t[-getlevel <host:httpPort> <name>]\n"
-      + "\t[-setlevel <host:httpPort> <name> <level>]\n";
-
-  /**
-   * A command line implementation
-   */
-  public static void main(String[] args) {
-    if (args.length == 3 && "-getlevel".equals(args[0])) {
-      process("http://" + args[1] + "/logLevel?log=" + args[2]);
-      return;
-    }
-    else if (args.length == 4 && "-setlevel".equals(args[0])) {
-      process("http://" + args[1] + "/logLevel?log=" + args[2]
-              + "&level=" + args[3]);
-      return;
-    }
-
-    System.err.println(USAGES);
-    System.exit(-1);
-  }
-
-  private static void process(String urlstring) {
-    try {
-      URL url = new URL(urlstring);
-      System.out.println("Connecting to " + url);
-      URLConnection connection = url.openConnection();
-      connection.connect();
-      try (InputStreamReader streamReader = new InputStreamReader(connection.getInputStream());
-           BufferedReader bufferedReader = new BufferedReader(streamReader)) {
-        for(String line; (line = bufferedReader.readLine()) != null; ) {
-          if (line.startsWith(MARKER)) {
-            System.out.println(TAG.matcher(line).replaceAll(""));
-          }
-        }
-      }
-    } catch (IOException ioe) {
-      System.err.println("" + ioe);
-    }
-  }
-
-  static final String MARKER = "<!-- OUTPUT -->";
-  static final Pattern TAG = Pattern.compile("<[^>]*>");
-
-  /**
-   * A servlet implementation
-   */
-  @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
-  @InterfaceStability.Unstable
-  public static class Servlet extends HttpServlet {
-    private static final long serialVersionUID = 1L;
-
-    @Override
-    public void doGet(HttpServletRequest request, HttpServletResponse response
-        ) throws ServletException, IOException {
-
-      // Do the authorization
-      if (!HttpServer.hasAdministratorAccess(getServletContext(), request,
-          response)) {
-        return;
-      }
-
-      PrintWriter out = ServletUtil.initHTML(response, "Log Level");
-      String logName = ServletUtil.getParameter(request, "log");
-      String level = ServletUtil.getParameter(request, "level");
-
-      if (logName != null) {
-        out.println("<br /><hr /><h3>Results</h3>");
-        out.println(MARKER
-            + "Submitted Log Name: <b>" + logName + "</b><br />");
-
-        Log log = LogFactory.getLog(logName);
-        out.println(MARKER
-            + "Log Class: <b>" + log.getClass().getName() +"</b><br />");
-        if (level != null) {
-          out.println(MARKER + "Submitted Level: <b>" + level + "</b><br />");
-        }
-
-        if (log instanceof Log4JLogger) {
-          process(((Log4JLogger)log).getLogger(), level, out);
-        }
-        else if (log instanceof Jdk14Logger) {
-          process(((Jdk14Logger)log).getLogger(), level, out);
-        }
-        else {
-          out.println("Sorry, " + log.getClass() + " not supported.<br />");
-        }
-      }
-
-      out.println(FORMS);
-      out.println(ServletUtil.HTML_TAIL);
-    }
-
-    static final String FORMS = "\n<br /><hr /><h3>Get / Set</h3>"
-        + "\n<form>Log: <input type='text' size='50' name='log' /> "
-        + "<input type='submit' value='Get Log Level' />"
-        + "</form>"
-        + "\n<form>Log: <input type='text' size='50' name='log' /> "
-        + "Level: <input type='text' name='level' /> "
-        + "<input type='submit' value='Set Log Level' />"
-        + "</form>";
-
-    private static void process(org.apache.log4j.Logger log, String level,
-        PrintWriter out) throws IOException {
-      if (level != null) {
-        if (!level.equals(org.apache.log4j.Level.toLevel(level).toString())) {
-          out.println(MARKER + "Bad level : <b>" + level + "</b><br />");
-        } else {
-          log.setLevel(org.apache.log4j.Level.toLevel(level));
-          out.println(MARKER + "Setting Level to " + level + " ...<br />");
-        }
-      }
-      out.println(MARKER
-          + "Effective level: <b>" + log.getEffectiveLevel() + "</b><br />");
-    }
-
-    private static void process(java.util.logging.Logger log, String level,
-        PrintWriter out) throws IOException {
-      if (level != null) {
-        log.setLevel(java.util.logging.Level.parse(level));
-        out.println(MARKER + "Setting Level to " + level + " ...<br />");
-      }
-
-      java.util.logging.Level lev;
-      for(; (lev = log.getLevel()) == null; log = log.getParent());
-      out.println(MARKER + "Effective level: <b>" + lev + "</b><br />");
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/package-info.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/package-info.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/package-info.java
deleted file mode 100644
index f55e24b..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/package-info.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/**
- * <p>
- * Copied from hadoop source code.<br>
- * See https://issues.apache.org/jira/browse/HADOOP-10232 to know why.
- * </p>
- */
-@InterfaceStability.Unstable
-package org.apache.hadoop.hbase.http;
-
-import org.apache.yetus.audience.InterfaceStability;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java
deleted file mode 100644
index 69972a2..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java
+++ /dev/null
@@ -1,272 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.http;
-
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.authorize.AccessControlList;
-import org.junit.Assert;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.http.HttpServer.Builder;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.ServerSocket;
-import java.net.URI;
-import java.net.URL;
-import java.net.MalformedURLException;
-
-/**
- * This is a base class for functional tests of the {@link HttpServer}.
- * The methods are static for other classes to import statically.
- */
-public class HttpServerFunctionalTest extends Assert {
-  /** JVM property for the webapp test dir : {@value} */
-  public static final String TEST_BUILD_WEBAPPS = "test.build.webapps";
-  /** expected location of the test.build.webapps dir: {@value} */
-  private static final String BUILD_WEBAPPS_DIR = "src/main/resources/hbase-webapps";
-
-  /** name of the test webapp: {@value} */
-  private static final String TEST = "test";
-
-  /**
-   * Create but do not start the test webapp server. The test webapp dir is
-   * prepared/checked in advance.
-   *
-   * @return the server instance
-   *
-   * @throws IOException if a problem occurs
-   * @throws AssertionError if a condition was not met
-   */
-  public static HttpServer createTestServer() throws IOException {
-    prepareTestWebapp();
-    return createServer(TEST);
-  }
-
-  /**
-   * Create but do not start the test webapp server. The test webapp dir is
-   * prepared/checked in advance.
-   * @param conf the server configuration to use
-   * @return the server instance
-   *
-   * @throws IOException if a problem occurs
-   * @throws AssertionError if a condition was not met
-   */
-  public static HttpServer createTestServer(Configuration conf)
-      throws IOException {
-    prepareTestWebapp();
-    return createServer(TEST, conf);
-  }
-
-  public static HttpServer createTestServer(Configuration conf, AccessControlList adminsAcl)
-      throws IOException {
-    prepareTestWebapp();
-    return createServer(TEST, conf, adminsAcl);
-  }
-
-  /**
-   * Create but do not start the test webapp server. The test webapp dir is
-   * prepared/checked in advance.
-   * @param conf the server configuration to use
-   * @return the server instance
-   *
-   * @throws IOException if a problem occurs
-   * @throws AssertionError if a condition was not met
-   */
-  public static HttpServer createTestServer(Configuration conf,
-      String[] pathSpecs) throws IOException {
-    prepareTestWebapp();
-    return createServer(TEST, conf, pathSpecs);
-  }
-
-  public static HttpServer createTestServerWithSecurity(Configuration conf) throws IOException {
-    prepareTestWebapp();
-    return localServerBuilder(TEST).setFindPort(true).setConf(conf).setSecurityEnabled(true)
-        // InfoServer normally sets these for us
-        .setUsernameConfKey(HttpServer.HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY)
-        .setKeytabConfKey(HttpServer.HTTP_SPNEGO_AUTHENTICATION_KEYTAB_KEY)
-        .build();
-  }
-
-  /**
-   * Prepare the test webapp by creating the directory from the test properties
-   * fail if the directory cannot be created.
-   * @throws AssertionError if a condition was not met
-   */
-  protected static void prepareTestWebapp() {
-    String webapps = System.getProperty(TEST_BUILD_WEBAPPS, BUILD_WEBAPPS_DIR);
-    File testWebappDir = new File(webapps +
-        File.separatorChar + TEST);
-    try {
-    if (!testWebappDir.exists()) {
-      fail("Test webapp dir " + testWebappDir.getCanonicalPath() + " missing");
-    }
-    }
-    catch (IOException e) {
-    }
-  }
-
-  /**
-   * Create an HttpServer instance on the given address for the given webapp
-   * @param host to bind
-   * @param port to bind
-   * @return the server
-   * @throws IOException if it could not be created
-   */
-  public static HttpServer createServer(String host, int port)
-      throws IOException {
-    prepareTestWebapp();
-    return new HttpServer.Builder().setName(TEST)
-        .addEndpoint(URI.create("http://" + host + ":" + port))
-        .setFindPort(true).build();
-  }
-
-  /**
-   * Create an HttpServer instance for the given webapp
-   * @param webapp the webapp to work with
-   * @return the server
-   * @throws IOException if it could not be created
-   */
-  public static HttpServer createServer(String webapp) throws IOException {
-    return localServerBuilder(webapp).setFindPort(true).build();
-  }
-  /**
-   * Create an HttpServer instance for the given webapp
-   * @param webapp the webapp to work with
-   * @param conf the configuration to use for the server
-   * @return the server
-   * @throws IOException if it could not be created
-   */
-  public static HttpServer createServer(String webapp, Configuration conf)
-      throws IOException {
-    return localServerBuilder(webapp).setFindPort(true).setConf(conf).build();
-  }
-
-  public static HttpServer createServer(String webapp, Configuration conf, AccessControlList adminsAcl)
-      throws IOException {
-    return localServerBuilder(webapp).setFindPort(true).setConf(conf).setACL(adminsAcl).build();
-  }
-
-  private static Builder localServerBuilder(String webapp) {
-    return new HttpServer.Builder().setName(webapp).addEndpoint(
-        URI.create("http://localhost:0"));
-  }
-
-  /**
-   * Create an HttpServer instance for the given webapp
-   * @param webapp the webapp to work with
-   * @param conf the configuration to use for the server
-   * @param pathSpecs the paths specifications the server will service
-   * @return the server
-   * @throws IOException if it could not be created
-   */
-  public static HttpServer createServer(String webapp, Configuration conf,
-      String[] pathSpecs) throws IOException {
-    return localServerBuilder(webapp).setFindPort(true).setConf(conf).setPathSpec(pathSpecs).build();
-  }
-
-  /**
-   * Create and start a server with the test webapp
-   *
-   * @return the newly started server
-   *
-   * @throws IOException on any failure
-   * @throws AssertionError if a condition was not met
-   */
-  public static HttpServer createAndStartTestServer() throws IOException {
-    HttpServer server = createTestServer();
-    server.start();
-    return server;
-  }
-
-  /**
-   * If the server is non null, stop it
-   * @param server to stop
-   * @throws Exception on any failure
-   */
-  public static void stop(HttpServer server) throws Exception {
-    if (server != null) {
-      server.stop();
-    }
-  }
-
-  /**
-   * Pass in a server, return a URL bound to localhost and its port
-   * @param server server
-   * @return a URL bonded to the base of the server
-   * @throws MalformedURLException if the URL cannot be created.
-   */
-  public static URL getServerURL(HttpServer server)
-      throws MalformedURLException {
-    assertNotNull("No server", server);
-    return new URL("http://"
-        + NetUtils.getHostPortString(server.getConnectorAddress(0)));
-  }
-
-  /**
-   * Read in the content from a URL
-   * @param url URL To read
-   * @return the text from the output
-   * @throws IOException if something went wrong
-   */
-  protected static String readOutput(URL url) throws IOException {
-    StringBuilder out = new StringBuilder();
-    InputStream in = url.openConnection().getInputStream();
-    byte[] buffer = new byte[64 * 1024];
-    int len = in.read(buffer);
-    while (len > 0) {
-      out.append(new String(buffer, 0, len));
-      len = in.read(buffer);
-    }
-    return out.toString();
-  }
-
-  /**
-   * Recursively deletes a {@link File}.
-   */
-  protected static void deleteRecursively(File d) {
-    if (d.isDirectory()) {
-      for (String name : d.list()) {
-        File child = new File(d, name);
-        if (child.isFile()) {
-          child.delete();
-        } else {
-          deleteRecursively(child);
-        }
-      }
-    }
-    d.delete();
-  }
-
-  /**
-   * Picks a free port on the host by binding a Socket to '0'.
-   */
-  protected static int getFreePort() throws IOException {
-    ServerSocket s = new ServerSocket(0);
-    try {
-      s.setReuseAddress(true);
-      int port = s.getLocalPort();
-      return port;
-    } finally {
-      if (null != s) {
-        s.close();
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java
deleted file mode 100644
index 729dd06..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.URL;
-import java.net.URLConnection;
-import java.util.Set;
-import java.util.TreeSet;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.net.NetUtils;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestGlobalFilter extends HttpServerFunctionalTest {
-  private static final Log LOG = LogFactory.getLog(HttpServer.class);
-  static final Set<String> RECORDS = new TreeSet<>();
-
-  /** A very simple filter that records accessed uri's */
-  static public class RecordingFilter implements Filter {
-    private FilterConfig filterConfig = null;
-
-    @Override
-    public void init(FilterConfig filterConfig) {
-      this.filterConfig = filterConfig;
-    }
-
-    @Override
-    public void destroy() {
-      this.filterConfig = null;
-    }
-
-    @Override
-    public void doFilter(ServletRequest request, ServletResponse response,
-        FilterChain chain) throws IOException, ServletException {
-      if (filterConfig == null)
-         return;
-
-      String uri = ((HttpServletRequest)request).getRequestURI();
-      LOG.info("filtering " + uri);
-      RECORDS.add(uri);
-      chain.doFilter(request, response);
-    }
-
-    /** Configuration for RecordingFilter */
-    static public class Initializer extends FilterInitializer {
-      public Initializer() {}
-
-      @Override
-      public void initFilter(FilterContainer container, Configuration conf) {
-        container.addGlobalFilter("recording", RecordingFilter.class.getName(), null);
-      }
-    }
-  }
-
-
-  /** access a url, ignoring some IOException such as the page does not exist */
-  static void access(String urlstring) throws IOException {
-    LOG.warn("access " + urlstring);
-    URL url = new URL(urlstring);
-    URLConnection connection = url.openConnection();
-    connection.connect();
-
-    try {
-      BufferedReader in = new BufferedReader(new InputStreamReader(
-          connection.getInputStream()));
-      try {
-        for(; in.readLine() != null; );
-      } finally {
-        in.close();
-      }
-    } catch(IOException ioe) {
-      LOG.warn("urlstring=" + urlstring, ioe);
-    }
-  }
-
-  @Test
-  public void testServletFilter() throws Exception {
-    Configuration conf = new Configuration();
-
-    //start a http server with CountingFilter
-    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
-        RecordingFilter.Initializer.class.getName());
-    HttpServer http = createTestServer(conf);
-    http.start();
-
-    final String fsckURL = "/fsck";
-    final String stacksURL = "/stacks";
-    final String ajspURL = "/a.jsp";
-    final String listPathsURL = "/listPaths";
-    final String dataURL = "/data";
-    final String streamFile = "/streamFile";
-    final String rootURL = "/";
-    final String allURL = "/*";
-    final String outURL = "/static/a.out";
-    final String logURL = "/logs/a.log";
-
-    final String[] urls = {fsckURL, stacksURL, ajspURL, listPathsURL,
-        dataURL, streamFile, rootURL, allURL, outURL, logURL};
-
-    //access the urls
-    final String prefix = "http://"
-        + NetUtils.getHostPortString(http.getConnectorAddress(0));
-    try {
-      for(int i = 0; i < urls.length; i++) {
-        access(prefix + urls[i]);
-      }
-    } finally {
-      http.stop();
-    }
-
-    LOG.info("RECORDS = " + RECORDS);
-
-    //verify records
-    for(int i = 0; i < urls.length; i++) {
-      assertTrue(RECORDS.remove(urls[i]));
-    }
-    assertTrue(RECORDS.isEmpty());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHtmlQuoting.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHtmlQuoting.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHtmlQuoting.java
deleted file mode 100644
index 5bc026c..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHtmlQuoting.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import static org.junit.Assert.*;
-
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.mockito.Mockito;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestHtmlQuoting {
-
-  @Test public void testNeedsQuoting() throws Exception {
-    assertTrue(HtmlQuoting.needsQuoting("abcde>"));
-    assertTrue(HtmlQuoting.needsQuoting("<abcde"));
-    assertTrue(HtmlQuoting.needsQuoting("abc'de"));
-    assertTrue(HtmlQuoting.needsQuoting("abcde\""));
-    assertTrue(HtmlQuoting.needsQuoting("&"));
-    assertFalse(HtmlQuoting.needsQuoting(""));
-    assertFalse(HtmlQuoting.needsQuoting("ab\ncdef"));
-    assertFalse(HtmlQuoting.needsQuoting(null));
-  }
-
-  @Test public void testQuoting() throws Exception {
-    assertEquals("ab&lt;cd", HtmlQuoting.quoteHtmlChars("ab<cd"));
-    assertEquals("ab&gt;", HtmlQuoting.quoteHtmlChars("ab>"));
-    assertEquals("&amp;&amp;&amp;", HtmlQuoting.quoteHtmlChars("&&&"));
-    assertEquals(" &apos;\n", HtmlQuoting.quoteHtmlChars(" '\n"));
-    assertEquals("&quot;", HtmlQuoting.quoteHtmlChars("\""));
-    assertEquals(null, HtmlQuoting.quoteHtmlChars(null));
-  }
-
-  private void runRoundTrip(String str) throws Exception {
-    assertEquals(str,
-                 HtmlQuoting.unquoteHtmlChars(HtmlQuoting.quoteHtmlChars(str)));
-  }
-
-  @Test public void testRoundtrip() throws Exception {
-    runRoundTrip("");
-    runRoundTrip("<>&'\"");
-    runRoundTrip("ab>cd<ef&ghi'\"");
-    runRoundTrip("A string\n with no quotable chars in it!");
-    runRoundTrip(null);
-    StringBuilder buffer = new StringBuilder();
-    for(char ch=0; ch < 127; ++ch) {
-      buffer.append(ch);
-    }
-    runRoundTrip(buffer.toString());
-  }
-
-
-  @Test
-  public void testRequestQuoting() throws Exception {
-    HttpServletRequest mockReq = Mockito.mock(HttpServletRequest.class);
-    HttpServer.QuotingInputFilter.RequestQuoter quoter =
-      new HttpServer.QuotingInputFilter.RequestQuoter(mockReq);
-
-    Mockito.doReturn("a<b").when(mockReq).getParameter("x");
-    assertEquals("Test simple param quoting",
-        "a&lt;b", quoter.getParameter("x"));
-
-    Mockito.doReturn(null).when(mockReq).getParameter("x");
-    assertEquals("Test that missing parameters dont cause NPE",
-        null, quoter.getParameter("x"));
-
-    Mockito.doReturn(new String[]{"a<b", "b"}).when(mockReq).getParameterValues("x");
-    assertArrayEquals("Test escaping of an array",
-        new String[]{"a&lt;b", "b"}, quoter.getParameterValues("x"));
-
-    Mockito.doReturn(null).when(mockReq).getParameterValues("x");
-    assertArrayEquals("Test that missing parameters dont cause NPE for array",
-        null, quoter.getParameterValues("x"));
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLog.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLog.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLog.java
deleted file mode 100644
index b8d21d1..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLog.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.log4j.Logger;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-import org.eclipse.jetty.server.RequestLog;
-import org.eclipse.jetty.server.NCSARequestLog;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestHttpRequestLog {
-
-  @Test
-  public void testAppenderUndefined() {
-    RequestLog requestLog = HttpRequestLog.getRequestLog("test");
-    assertNull("RequestLog should be null", requestLog);
-  }
-
-  @Test
-  public void testAppenderDefined() {
-    HttpRequestLogAppender requestLogAppender = new HttpRequestLogAppender();
-    requestLogAppender.setName("testrequestlog");
-    Logger.getLogger("http.requests.test").addAppender(requestLogAppender);
-    RequestLog requestLog = HttpRequestLog.getRequestLog("test");
-    Logger.getLogger("http.requests.test").removeAppender(requestLogAppender);
-    assertNotNull("RequestLog should not be null", requestLog);
-    assertEquals("Class mismatch", NCSARequestLog.class, requestLog.getClass());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLogAppender.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLogAppender.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLogAppender.java
deleted file mode 100644
index a17b9e9..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLogAppender.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-import static org.junit.Assert.assertEquals;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestHttpRequestLogAppender {
-
-  @Test
-  public void testParameterPropagation() {
-
-    HttpRequestLogAppender requestLogAppender = new HttpRequestLogAppender();
-    requestLogAppender.setFilename("jetty-namenode-yyyy_mm_dd.log");
-    requestLogAppender.setRetainDays(17);
-    assertEquals("Filename mismatch", "jetty-namenode-yyyy_mm_dd.log",
-        requestLogAppender.getFilename());
-    assertEquals("Retain days mismatch", 17,
-        requestLogAppender.getRetainDays());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
deleted file mode 100644
index 2eb6a21..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
+++ /dev/null
@@ -1,621 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.net.HttpURLConnection;
-import java.net.URI;
-import java.net.URL;
-import java.util.Arrays;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.SortedSet;
-import java.util.TreeSet;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.Executor;
-import java.util.concurrent.Executors;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletRequestWrapper;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.hbase.http.HttpServer.QuotingInputFilter.RequestQuoter;
-import org.apache.hadoop.hbase.http.resource.JerseyResource;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.Groups;
-import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authorize.AccessControlList;
-import org.eclipse.jetty.server.ServerConnector;
-import org.eclipse.jetty.util.ajax.JSON;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.mockito.Mockito;
-import org.mockito.internal.util.reflection.Whitebox;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestHttpServer extends HttpServerFunctionalTest {
-  private static final Log LOG = LogFactory.getLog(TestHttpServer.class);
-  private static HttpServer server;
-  private static URL baseUrl;
-  // jetty 9.4.x needs this many threads to start, even in the small.
-  static final int MAX_THREADS = 16;
-
-  @SuppressWarnings("serial")
-  public static class EchoMapServlet extends HttpServlet {
-    @Override
-    public void doGet(HttpServletRequest request,
-                      HttpServletResponse response
-                      ) throws ServletException, IOException {
-      PrintWriter out = response.getWriter();
-      Map<String, String[]> params = request.getParameterMap();
-      SortedSet<String> keys = new TreeSet<>(params.keySet());
-      for(String key: keys) {
-        out.print(key);
-        out.print(':');
-        String[] values = params.get(key);
-        if (values.length > 0) {
-          out.print(values[0]);
-          for(int i=1; i < values.length; ++i) {
-            out.print(',');
-            out.print(values[i]);
-          }
-        }
-        out.print('\n');
-      }
-      out.close();
-    }
-  }
-
-  @SuppressWarnings("serial")
-  public static class EchoServlet extends HttpServlet {
-    @Override
-    public void doGet(HttpServletRequest request,
-                      HttpServletResponse response
-                      ) throws ServletException, IOException {
-      PrintWriter out = response.getWriter();
-      SortedSet<String> sortedKeys = new TreeSet<>();
-      Enumeration<String> keys = request.getParameterNames();
-      while(keys.hasMoreElements()) {
-        sortedKeys.add(keys.nextElement());
-      }
-      for(String key: sortedKeys) {
-        out.print(key);
-        out.print(':');
-        out.print(request.getParameter(key));
-        out.print('\n');
-      }
-      out.close();
-    }
-  }
-
-  @SuppressWarnings("serial")
-  public static class LongHeaderServlet extends HttpServlet {
-    @Override
-    public void doGet(HttpServletRequest request,
-                      HttpServletResponse response
-    ) throws ServletException, IOException {
-      Assert.assertEquals(63 * 1024, request.getHeader("longheader").length());
-      response.setStatus(HttpServletResponse.SC_OK);
-    }
-  }
-
-  @SuppressWarnings("serial")
-  public static class HtmlContentServlet extends HttpServlet {
-    @Override
-    public void doGet(HttpServletRequest request,
-                      HttpServletResponse response
-                      ) throws ServletException, IOException {
-      response.setContentType("text/html");
-      PrintWriter out = response.getWriter();
-      out.print("hello world");
-      out.close();
-    }
-  }
-
-  @BeforeClass public static void setup() throws Exception {
-    Configuration conf = new Configuration();
-    conf.setInt(HttpServer.HTTP_MAX_THREADS, MAX_THREADS);
-    server = createTestServer(conf);
-    server.addServlet("echo", "/echo", EchoServlet.class);
-    server.addServlet("echomap", "/echomap", EchoMapServlet.class);
-    server.addServlet("htmlcontent", "/htmlcontent", HtmlContentServlet.class);
-    server.addServlet("longheader", "/longheader", LongHeaderServlet.class);
-    server.addJerseyResourcePackage(
-        JerseyResource.class.getPackage().getName(), "/jersey/*");
-    server.start();
-    baseUrl = getServerURL(server);
-    LOG.info("HTTP server started: "+ baseUrl);
-  }
-
-  @AfterClass public static void cleanup() throws Exception {
-    server.stop();
-  }
-
-  /** Test the maximum number of threads cannot be exceeded. */
-  @Test public void testMaxThreads() throws Exception {
-    int clientThreads = MAX_THREADS * 10;
-    Executor executor = Executors.newFixedThreadPool(clientThreads);
-    // Run many clients to make server reach its maximum number of threads
-    final CountDownLatch ready = new CountDownLatch(clientThreads);
-    final CountDownLatch start = new CountDownLatch(1);
-    for (int i = 0; i < clientThreads; i++) {
-      executor.execute(new Runnable() {
-        @Override
-        public void run() {
-          ready.countDown();
-          try {
-            start.await();
-            assertEquals("a:b\nc:d\n",
-                         readOutput(new URL(baseUrl, "/echo?a=b&c=d")));
-            int serverThreads = server.webServer.getThreadPool().getThreads();
-            assertTrue("More threads are started than expected, Server Threads count: "
-                    + serverThreads, serverThreads <= MAX_THREADS);
-            System.out.println("Number of threads = " + serverThreads +
-                " which is less or equal than the max = " + MAX_THREADS);
-          } catch (Exception e) {
-            // do nothing
-          }
-        }
-      });
-    }
-    // Start the client threads when they are all ready
-    ready.await();
-    start.countDown();
-  }
-
-  @Test public void testEcho() throws Exception {
-    assertEquals("a:b\nc:d\n",
-                 readOutput(new URL(baseUrl, "/echo?a=b&c=d")));
-    assertEquals("a:b\nc&lt;:d\ne:&gt;\n",
-                 readOutput(new URL(baseUrl, "/echo?a=b&c<=d&e=>")));
-  }
-
-  /** Test the echo map servlet that uses getParameterMap. */
-  @Test public void testEchoMap() throws Exception {
-    assertEquals("a:b\nc:d\n",
-                 readOutput(new URL(baseUrl, "/echomap?a=b&c=d")));
-    assertEquals("a:b,&gt;\nc&lt;:d\n",
-                 readOutput(new URL(baseUrl, "/echomap?a=b&c<=d&a=>")));
-  }
-
-  /**
-   *  Test that verifies headers can be up to 64K long.
-   *  The test adds a 63K header leaving 1K for other headers.
-   *  This is because the header buffer setting is for ALL headers,
-   *  names and values included. */
-  @Test public void testLongHeader() throws Exception {
-    URL url = new URL(baseUrl, "/longheader");
-    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    StringBuilder sb = new StringBuilder();
-    for (int i = 0 ; i < 63 * 1024; i++) {
-      sb.append("a");
-    }
-    conn.setRequestProperty("longheader", sb.toString());
-    assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
-  }
-
-  @Test
-  public void testContentTypes() throws Exception {
-    // Static CSS files should have text/css
-    URL cssUrl = new URL(baseUrl, "/static/test.css");
-    HttpURLConnection conn = (HttpURLConnection)cssUrl.openConnection();
-    conn.connect();
-    assertEquals(200, conn.getResponseCode());
-    assertEquals("text/css", conn.getContentType());
-
-    // Servlets should have text/plain with proper encoding by default
-    URL servletUrl = new URL(baseUrl, "/echo?a=b");
-    conn = (HttpURLConnection)servletUrl.openConnection();
-    conn.connect();
-    assertEquals(200, conn.getResponseCode());
-    assertEquals("text/plain;charset=utf-8", conn.getContentType());
-
-    // We should ignore parameters for mime types - ie a parameter
-    // ending in .css should not change mime type
-    servletUrl = new URL(baseUrl, "/echo?a=b.css");
-    conn = (HttpURLConnection)servletUrl.openConnection();
-    conn.connect();
-    assertEquals(200, conn.getResponseCode());
-    assertEquals("text/plain;charset=utf-8", conn.getContentType());
-
-    // Servlets that specify text/html should get that content type
-    servletUrl = new URL(baseUrl, "/htmlcontent");
-    conn = (HttpURLConnection)servletUrl.openConnection();
-    conn.connect();
-    assertEquals(200, conn.getResponseCode());
-    assertEquals("text/html;charset=utf-8", conn.getContentType());
-
-    // JSPs should default to text/html with utf8
-    // JSPs do not work from unit tests
-    // servletUrl = new URL(baseUrl, "/testjsp.jsp");
-    // conn = (HttpURLConnection)servletUrl.openConnection();
-    // conn.connect();
-    // assertEquals(200, conn.getResponseCode());
-    // assertEquals("text/html; charset=utf-8", conn.getContentType());
-  }
-
-  /**
-   * Dummy filter that mimics as an authentication filter. Obtains user identity
-   * from the request parameter user.name. Wraps around the request so that
-   * request.getRemoteUser() returns the user identity.
-   *
-   */
-  public static class DummyServletFilter implements Filter {
-    @Override
-    public void destroy() { }
-
-    @Override
-    public void doFilter(ServletRequest request, ServletResponse response,
-        FilterChain filterChain) throws IOException, ServletException {
-      final String userName = request.getParameter("user.name");
-      ServletRequest requestModified =
-        new HttpServletRequestWrapper((HttpServletRequest) request) {
-        @Override
-        public String getRemoteUser() {
-          return userName;
-        }
-      };
-      filterChain.doFilter(requestModified, response);
-    }
-
-    @Override
-    public void init(FilterConfig arg0) throws ServletException { }
-  }
-
-  /**
-   * FilterInitializer that initialized the DummyFilter.
-   *
-   */
-  public static class DummyFilterInitializer extends FilterInitializer {
-    public DummyFilterInitializer() {
-    }
-
-    @Override
-    public void initFilter(FilterContainer container, Configuration conf) {
-      container.addFilter("DummyFilter", DummyServletFilter.class.getName(), null);
-    }
-  }
-
-  /**
-   * Access a URL and get the corresponding return Http status code. The URL
-   * will be accessed as the passed user, by sending user.name request
-   * parameter.
-   *
-   * @param urlstring
-   * @param userName
-   * @return
-   * @throws IOException
-   */
-  static int getHttpStatusCode(String urlstring, String userName)
-      throws IOException {
-    URL url = new URL(urlstring + "?user.name=" + userName);
-    System.out.println("Accessing " + url + " as user " + userName);
-    HttpURLConnection connection = (HttpURLConnection)url.openConnection();
-    connection.connect();
-    return connection.getResponseCode();
-  }
-
-  /**
-   * Custom user->group mapping service.
-   */
-  public static class MyGroupsProvider extends ShellBasedUnixGroupsMapping {
-    static Map<String, List<String>> mapping = new HashMap<>();
-
-    static void clearMapping() {
-      mapping.clear();
-    }
-
-    @Override
-    public List<String> getGroups(String user) throws IOException {
-      return mapping.get(user);
-    }
-  }
-
-  /**
-   * Verify the access for /logs, /stacks, /conf, /logLevel and /metrics
-   * servlets, when authentication filters are set, but authorization is not
-   * enabled.
-   * @throws Exception
-   */
-  @Test
-  @Ignore
-  public void testDisabledAuthorizationOfDefaultServlets() throws Exception {
-
-    Configuration conf = new Configuration();
-
-    // Authorization is disabled by default
-    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
-        DummyFilterInitializer.class.getName());
-    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
-        MyGroupsProvider.class.getName());
-    Groups.getUserToGroupsMappingService(conf);
-    MyGroupsProvider.clearMapping();
-    MyGroupsProvider.mapping.put("userA", Arrays.asList("groupA"));
-    MyGroupsProvider.mapping.put("userB", Arrays.asList("groupB"));
-
-    HttpServer myServer = new HttpServer.Builder().setName("test")
-        .addEndpoint(new URI("http://localhost:0")).setFindPort(true).build();
-    myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
-    myServer.start();
-    String serverURL = "http://" + NetUtils.getHostPortString(myServer.getConnectorAddress(0)) + "/";
-    for (String servlet : new String[] { "conf", "logs", "stacks",
-        "logLevel", "metrics" }) {
-      for (String user : new String[] { "userA", "userB" }) {
-        assertEquals(HttpURLConnection.HTTP_OK, getHttpStatusCode(serverURL
-            + servlet, user));
-      }
-    }
-    myServer.stop();
-  }
-
-  /**
-   * Verify the administrator access for /logs, /stacks, /conf, /logLevel and
-   * /metrics servlets.
-   *
-   * @throws Exception
-   */
-  @Test
-  @Ignore
-  public void testAuthorizationOfDefaultServlets() throws Exception {
-    Configuration conf = new Configuration();
-    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
-        true);
-    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN,
-        true);
-    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
-        DummyFilterInitializer.class.getName());
-
-    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
-        MyGroupsProvider.class.getName());
-    Groups.getUserToGroupsMappingService(conf);
-    MyGroupsProvider.clearMapping();
-    MyGroupsProvider.mapping.put("userA", Arrays.asList("groupA"));
-    MyGroupsProvider.mapping.put("userB", Arrays.asList("groupB"));
-    MyGroupsProvider.mapping.put("userC", Arrays.asList("groupC"));
-    MyGroupsProvider.mapping.put("userD", Arrays.asList("groupD"));
-    MyGroupsProvider.mapping.put("userE", Arrays.asList("groupE"));
-
-    HttpServer myServer = new HttpServer.Builder().setName("test")
-        .addEndpoint(new URI("http://localhost:0")).setFindPort(true).setConf(conf)
-        .setACL(new AccessControlList("userA,userB groupC,groupD")).build();
-    myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
-    myServer.start();
-
-    String serverURL = "http://"
-        + NetUtils.getHostPortString(myServer.getConnectorAddress(0)) + "/";
-    for (String servlet : new String[] { "conf", "logs", "stacks",
-        "logLevel", "metrics" }) {
-      for (String user : new String[] { "userA", "userB", "userC", "userD" }) {
-        assertEquals(HttpURLConnection.HTTP_OK, getHttpStatusCode(serverURL
-            + servlet, user));
-      }
-      assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, getHttpStatusCode(
-          serverURL + servlet, "userE"));
-    }
-    myServer.stop();
-  }
-
-  @Test
-  public void testRequestQuoterWithNull() throws Exception {
-    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
-    Mockito.doReturn(null).when(request).getParameterValues("dummy");
-    RequestQuoter requestQuoter = new RequestQuoter(request);
-    String[] parameterValues = requestQuoter.getParameterValues("dummy");
-    Assert.assertEquals("It should return null "
-        + "when there are no values for the parameter", null, parameterValues);
-  }
-
-  @Test
-  public void testRequestQuoterWithNotNull() throws Exception {
-    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
-    String[] values = new String[] { "abc", "def" };
-    Mockito.doReturn(values).when(request).getParameterValues("dummy");
-    RequestQuoter requestQuoter = new RequestQuoter(request);
-    String[] parameterValues = requestQuoter.getParameterValues("dummy");
-    Assert.assertTrue("It should return Parameter Values", Arrays.equals(
-        values, parameterValues));
-  }
-
-  @SuppressWarnings("unchecked")
-  private static Map<String, Object> parse(String jsonString) {
-    return (Map<String, Object>)JSON.parse(jsonString);
-  }
-
-  @Test public void testJersey() throws Exception {
-    LOG.info("BEGIN testJersey()");
-    final String js = readOutput(new URL(baseUrl, "/jersey/foo?op=bar"));
-    final Map<String, Object> m = parse(js);
-    LOG.info("m=" + m);
-    assertEquals("foo", m.get(JerseyResource.PATH));
-    assertEquals("bar", m.get(JerseyResource.OP));
-    LOG.info("END testJersey()");
-  }
-
-  @Test
-  public void testHasAdministratorAccess() throws Exception {
-    Configuration conf = new Configuration();
-    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false);
-    ServletContext context = Mockito.mock(ServletContext.class);
-    Mockito.when(context.getAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE)).thenReturn(conf);
-    Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(null);
-    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
-    Mockito.when(request.getRemoteUser()).thenReturn(null);
-    HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
-
-    //authorization OFF
-    Assert.assertTrue(HttpServer.hasAdministratorAccess(context, request, response));
-
-    //authorization ON & user NULL
-    response = Mockito.mock(HttpServletResponse.class);
-    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true);
-    Assert.assertFalse(HttpServer.hasAdministratorAccess(context, request, response));
-    Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
-
-    //authorization ON & user NOT NULL & ACLs NULL
-    response = Mockito.mock(HttpServletResponse.class);
-    Mockito.when(request.getRemoteUser()).thenReturn("foo");
-    Assert.assertTrue(HttpServer.hasAdministratorAccess(context, request, response));
-
-    //authorization ON & user NOT NULL & ACLs NOT NULL & user not in ACLs
-    response = Mockito.mock(HttpServletResponse.class);
-    AccessControlList acls = Mockito.mock(AccessControlList.class);
-    Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(false);
-    Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls);
-    Assert.assertFalse(HttpServer.hasAdministratorAccess(context, request, response));
-    Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
-
-    //authorization ON & user NOT NULL & ACLs NOT NULL & user in in ACLs
-    response = Mockito.mock(HttpServletResponse.class);
-    Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(true);
-    Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls);
-    Assert.assertTrue(HttpServer.hasAdministratorAccess(context, request, response));
-
-  }
-
-  @Test
-  public void testRequiresAuthorizationAccess() throws Exception {
-    Configuration conf = new Configuration();
-    ServletContext context = Mockito.mock(ServletContext.class);
-    Mockito.when(context.getAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE)).thenReturn(conf);
-    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
-    HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
-
-    //requires admin access to instrumentation, FALSE by default
-    Assert.assertTrue(HttpServer.isInstrumentationAccessAllowed(context, request, response));
-
-    //requires admin access to instrumentation, TRUE
-    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN, true);
-    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true);
-    AccessControlList acls = Mockito.mock(AccessControlList.class);
-    Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(false);
-    Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls);
-    Assert.assertFalse(HttpServer.isInstrumentationAccessAllowed(context, request, response));
-  }
-
-  @Test public void testBindAddress() throws Exception {
-    checkBindAddress("localhost", 0, false).stop();
-    // hang onto this one for a bit more testing
-    HttpServer myServer = checkBindAddress("localhost", 0, false);
-    HttpServer myServer2 = null;
-    try {
-      int port = myServer.getConnectorAddress(0).getPort();
-      // it's already in use, true = expect a higher port
-      myServer2 = checkBindAddress("localhost", port, true);
-      // try to reuse the port
-      port = myServer2.getConnectorAddress(0).getPort();
-      myServer2.stop();
-      assertNull(myServer2.getConnectorAddress(0)); // not bound
-      myServer2.openListeners();
-      assertEquals(port, myServer2.getConnectorAddress(0).getPort()); // expect same port
-    } finally {
-      myServer.stop();
-      if (myServer2 != null) {
-        myServer2.stop();
-      }
-    }
-  }
-
-  private HttpServer checkBindAddress(String host, int port, boolean findPort)
-      throws Exception {
-    HttpServer server = createServer(host, port);
-    try {
-      // not bound, ephemeral should return requested port (0 for ephemeral)
-      List<?> listeners = (List<?>) Whitebox.getInternalState(server,
-          "listeners");
-      ServerConnector listener = (ServerConnector) Whitebox.getInternalState(
-          listeners.get(0), "listener");
-
-      assertEquals(port, listener.getPort());
-      // verify hostname is what was given
-      server.openListeners();
-      assertEquals(host, server.getConnectorAddress(0).getHostName());
-
-      int boundPort = server.getConnectorAddress(0).getPort();
-      if (port == 0) {
-        assertTrue(boundPort != 0); // ephemeral should now return bound port
-      } else if (findPort) {
-        assertTrue(boundPort > port);
-        // allow a little wiggle room to prevent random test failures if
-        // some consecutive ports are already in use
-        assertTrue(boundPort - port < 8);
-      }
-    } catch (Exception e) {
-      server.stop();
-      throw e;
-    }
-    return server;
-  }
-
-  @Test
-  public void testXFrameHeaderSameOrigin() throws Exception {
-    Configuration conf = new Configuration();
-    conf.set("hbase.http.filter.xframeoptions.mode", "SAMEORIGIN");
-
-    HttpServer myServer = new HttpServer.Builder().setName("test")
-            .addEndpoint(new URI("http://localhost:0"))
-            .setFindPort(true).setConf(conf).build();
-    myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
-    myServer.addServlet("echo", "/echo", EchoServlet.class);
-    myServer.start();
-
-    String serverURL = "http://"
-            + NetUtils.getHostPortString(myServer.getConnectorAddress(0));
-    URL url = new URL(new URL(serverURL), "/echo?a=b&c=d");
-    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
-    assertEquals("SAMEORIGIN", conn.getHeaderField("X-Frame-Options"));
-    myServer.stop();
-  }
-
-
-
-  @Test
-  public void testNoCacheHeader() throws Exception {
-    URL url = new URL(baseUrl, "/echo?a=b&c=d");
-    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
-    assertEquals("no-cache", conn.getHeaderField("Cache-Control"));
-    assertEquals("no-cache", conn.getHeaderField("Pragma"));
-    assertNotNull(conn.getHeaderField("Expires"));
-    assertNotNull(conn.getHeaderField("Date"));
-    assertEquals(conn.getHeaderField("Expires"), conn.getHeaderField("Date"));
-    assertEquals("DENY", conn.getHeaderField("X-Frame-Options"));
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java
deleted file mode 100644
index d0f2825..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestHttpServerLifecycle extends HttpServerFunctionalTest {
-
-  /**
-   * Check that a server is alive by probing the {@link HttpServer#isAlive()} method
-   * and the text of its toString() description
-   * @param server server
-   */
-  private void assertAlive(HttpServer server) {
-    assertTrue("Server is not alive", server.isAlive());
-    assertToStringContains(server, HttpServer.STATE_DESCRIPTION_ALIVE);
-  }
-
-  private void assertNotLive(HttpServer server) {
-    assertTrue("Server should not be live", !server.isAlive());
-    assertToStringContains(server, HttpServer.STATE_DESCRIPTION_NOT_LIVE);
-  }
-
-  /**
-   * Test that the server is alive once started
-   *
-   * @throws Throwable on failure
-   */
-  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
-  public void testCreatedServerIsNotAlive() throws Throwable {
-    HttpServer server = createTestServer();
-    assertNotLive(server);
-  }
-
-  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
-  public void testStopUnstartedServer() throws Throwable {
-    HttpServer server = createTestServer();
-    stop(server);
-  }
-
-  /**
-   * Test that the server is alive once started
-   *
-   * @throws Throwable on failure
-   */
-  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
-  public void testStartedServerIsAlive() throws Throwable {
-    HttpServer server = null;
-    server = createTestServer();
-    assertNotLive(server);
-    server.start();
-    assertAlive(server);
-    stop(server);
-  }
-
-  /**
-   * Assert that the result of {@link HttpServer#toString()} contains the specific text
-   * @param server server to examine
-   * @param text text to search for
-   */
-  private void assertToStringContains(HttpServer server, String text) {
-    String description = server.toString();
-    assertTrue("Did not find \"" + text + "\" in \"" + description + "\"",
-               description.contains(text));
-  }
-
-  /**
-   * Test that the server is not alive once stopped
-   *
-   * @throws Throwable on failure
-   */
-  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
-  public void testStoppedServerIsNotAlive() throws Throwable {
-    HttpServer server = createAndStartTestServer();
-    assertAlive(server);
-    stop(server);
-    assertNotLive(server);
-  }
-
-  /**
-   * Test that the server is not alive once stopped
-   *
-   * @throws Throwable on failure
-   */
-  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
-  public void testStoppingTwiceServerIsAllowed() throws Throwable {
-    HttpServer server = createAndStartTestServer();
-    assertAlive(server);
-    stop(server);
-    assertNotLive(server);
-    stop(server);
-    assertNotLive(server);
-  }
-
-  /**
-   * Test that the server is alive once started
-   *
-   * @throws Throwable
-   *           on failure
-   */
-  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
-  public void testWepAppContextAfterServerStop() throws Throwable {
-    HttpServer server = null;
-    String key = "test.attribute.key";
-    String value = "test.attribute.value";
-    server = createTestServer();
-    assertNotLive(server);
-    server.start();
-    server.setAttribute(key, value);
-    assertAlive(server);
-    assertEquals(value, server.getAttribute(key));
-    stop(server);
-    assertNull("Server context should have cleared", server.getAttribute(key));
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
deleted file mode 100644
index db394a8..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.Log;
-
-import java.io.FileNotFoundException;
-
-/**
- * Test webapp loading
- */
-@Category({MiscTests.class, SmallTests.class})
-public class TestHttpServerWebapps extends HttpServerFunctionalTest {
-  private static final Log log = LogFactory.getLog(TestHttpServerWebapps.class);
-
-  /**
-   * Test that the test server is loadable on the classpath
-   * @throws Throwable if something went wrong
-   */
-  @Test
-  public void testValidServerResource() throws Throwable {
-    HttpServer server = null;
-    try {
-      server = createServer("test");
-    } finally {
-      stop(server);
-    }
-  }
-
-  /**
-   * Test that an invalid webapp triggers an exception
-   * @throws Throwable if something went wrong
-   */
-  @Test
-  public void testMissingServerResource() throws Throwable {
-    try {
-      HttpServer server = createServer("NoSuchWebapp");
-      //should not have got here.
-      //close the server
-      String serverDescription = server.toString();
-      stop(server);
-      fail("Expected an exception, got " + serverDescription);
-    } catch (FileNotFoundException expected) {
-      log.debug("Expected exception " + expected, expected);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
deleted file mode 100644
index 5eff2b4..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.URL;
-import java.net.URLConnection;
-import java.util.Set;
-import java.util.TreeSet;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.net.NetUtils;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestPathFilter extends HttpServerFunctionalTest {
-  private static final Log LOG = LogFactory.getLog(HttpServer.class);
-  static final Set<String> RECORDS = new TreeSet<>();
-
-  /** A very simple filter that records accessed uri's */
-  static public class RecordingFilter implements Filter {
-    private FilterConfig filterConfig = null;
-
-    @Override
-    public void init(FilterConfig filterConfig) {
-      this.filterConfig = filterConfig;
-    }
-
-    @Override
-    public void destroy() {
-      this.filterConfig = null;
-    }
-
-    @Override
-    public void doFilter(ServletRequest request, ServletResponse response,
-        FilterChain chain) throws IOException, ServletException {
-      if (filterConfig == null)
-         return;
-
-      String uri = ((HttpServletRequest)request).getRequestURI();
-      LOG.info("filtering " + uri);
-      RECORDS.add(uri);
-      chain.doFilter(request, response);
-    }
-
-    /** Configuration for RecordingFilter */
-    static public class Initializer extends FilterInitializer {
-      public Initializer() {}
-
-      @Override
-      public void initFilter(FilterContainer container, Configuration conf) {
-        container.addFilter("recording", RecordingFilter.class.getName(), null);
-      }
-    }
-  }
-
-
-  /** access a url, ignoring some IOException such as the page does not exist */
-  static void access(String urlstring) throws IOException {
-    LOG.warn("access " + urlstring);
-    URL url = new URL(urlstring);
-
-    URLConnection connection = url.openConnection();
-    connection.connect();
-
-    try {
-      BufferedReader in = new BufferedReader(new InputStreamReader(
-          connection.getInputStream()));
-      try {
-        for(; in.readLine() != null; );
-      } finally {
-        in.close();
-      }
-    } catch(IOException ioe) {
-      LOG.warn("urlstring=" + urlstring, ioe);
-    }
-  }
-
-  @Test
-  public void testPathSpecFilters() throws Exception {
-    Configuration conf = new Configuration();
-
-    //start a http server with CountingFilter
-    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
-        RecordingFilter.Initializer.class.getName());
-    String[] pathSpecs = { "/path", "/path/*" };
-    HttpServer http = createTestServer(conf, pathSpecs);
-    http.start();
-
-    final String baseURL = "/path";
-    final String baseSlashURL = "/path/";
-    final String addedURL = "/path/nodes";
-    final String addedSlashURL = "/path/nodes/";
-    final String longURL = "/path/nodes/foo/job";
-    final String rootURL = "/";
-    final String allURL = "/*";
-
-    final String[] filteredUrls = {baseURL, baseSlashURL, addedURL,
-        addedSlashURL, longURL};
-    final String[] notFilteredUrls = {rootURL, allURL};
-
-    // access the urls and verify our paths specs got added to the
-    // filters
-    final String prefix = "http://"
-        + NetUtils.getHostPortString(http.getConnectorAddress(0));
-    try {
-      for(int i = 0; i < filteredUrls.length; i++) {
-        access(prefix + filteredUrls[i]);
-      }
-      for(int i = 0; i < notFilteredUrls.length; i++) {
-        access(prefix + notFilteredUrls[i]);
-      }
-    } finally {
-      http.stop();
-    }
-
-    LOG.info("RECORDS = " + RECORDS);
-
-    //verify records
-    for(int i = 0; i < filteredUrls.length; i++) {
-      assertTrue(RECORDS.remove(filteredUrls[i]));
-    }
-    assertTrue(RECORDS.isEmpty());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
deleted file mode 100644
index b599350..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.InputStream;
-import java.net.URI;
-import java.net.URL;
-
-import javax.net.ssl.HttpsURLConnection;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.ssl.SSLFactory;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-/**
- * This testcase issues SSL certificates configures the HttpServer to serve
- * HTTPS using the created certficates and calls an echo servlet using the
- * corresponding HTTPS URL.
- */
-@Category({MiscTests.class, SmallTests.class})
-public class TestSSLHttpServer extends HttpServerFunctionalTest {
-  private static final String BASEDIR = System.getProperty("test.build.dir",
-      "target/test-dir") + "/" + TestSSLHttpServer.class.getSimpleName();
-
-  private static final Log LOG = LogFactory.getLog(TestSSLHttpServer.class);
-  private static Configuration conf;
-  private static HttpServer server;
-  private static URL baseUrl;
-  private static String keystoresDir;
-  private static String sslConfDir;
-  private static SSLFactory clientSslFactory;
-
-  @BeforeClass
-  public static void setup() throws Exception {
-    conf = new Configuration();
-    conf.setInt(HttpServer.HTTP_MAX_THREADS, TestHttpServer.MAX_THREADS);
-
-    File base = new File(BASEDIR);
-    FileUtil.fullyDelete(base);
-    base.mkdirs();
-    keystoresDir = new File(BASEDIR).getAbsolutePath();
-    sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class);
-
-    KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
-    Configuration sslConf = new Configuration(false);
-    sslConf.addResource("ssl-server.xml");
-    sslConf.addResource("ssl-client.xml");
-
-    clientSslFactory = new SSLFactory(SSLFactory.Mode.CLIENT, sslConf);
-    clientSslFactory.init();
-
-    server = new HttpServer.Builder()
-        .setName("test")
-        .addEndpoint(new URI("https://localhost"))
-        .setConf(conf)
-        .keyPassword(HBaseConfiguration.getPassword(sslConf, "ssl.server.keystore.keypassword",
-            null))
-        .keyStore(sslConf.get("ssl.server.keystore.location"),
-            HBaseConfiguration.getPassword(sslConf, "ssl.server.keystore.password", null),
-            sslConf.get("ssl.server.keystore.type", "jks"))
-        .trustStore(sslConf.get("ssl.server.truststore.location"),
-            HBaseConfiguration.getPassword(sslConf, "ssl.server.truststore.password", null),
-            sslConf.get("ssl.server.truststore.type", "jks")).build();
-    server.addServlet("echo", "/echo", TestHttpServer.EchoServlet.class);
-    server.start();
-    baseUrl = new URL("https://"
-        + NetUtils.getHostPortString(server.getConnectorAddress(0)));
-    LOG.info("HTTP server started: " + baseUrl);
-  }
-
-  @AfterClass
-  public static void cleanup() throws Exception {
-    server.stop();
-    FileUtil.fullyDelete(new File(BASEDIR));
-    KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, sslConfDir);
-    clientSslFactory.destroy();
-  }
-
-  @Test
-  public void testEcho() throws Exception {
-    assertEquals("a:b\nc:d\n", readOut(new URL(baseUrl, "/echo?a=b&c=d")));
-    assertEquals("a:b\nc&lt;:d\ne:&gt;\n", readOut(new URL(baseUrl,
-        "/echo?a=b&c<=d&e=>")));
-  }
-
-  private static String readOut(URL url) throws Exception {
-    HttpsURLConnection conn = (HttpsURLConnection) url.openConnection();
-    conn.setSSLSocketFactory(clientSslFactory.createSSLSocketFactory());
-    InputStream in = conn.getInputStream();
-    ByteArrayOutputStream out = new ByteArrayOutputStream();
-    IOUtils.copyBytes(in, out, 1024);
-    return out.toString();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f01df45f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
deleted file mode 100644
index 32bc03e..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
+++ /dev/null
@@ -1,217 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.URL;
-import java.net.URLConnection;
-import java.util.Random;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.util.StringUtils;
-import org.junit.Assert;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestServletFilter extends HttpServerFunctionalTest {
-  private static final Log LOG = LogFactory.getLog(HttpServer.class);
-  static volatile String uri = null;
-
-  /** A very simple filter which record the uri filtered. */
-  static public class SimpleFilter implements Filter {
-    private FilterConfig filterConfig = null;
-
-    @Override
-    public void init(FilterConfig filterConfig) throws ServletException {
-      this.filterConfig = filterConfig;
-    }
-
-    @Override
-    public void destroy() {
-      this.filterConfig = null;
-    }
-
-    @Override
-    public void doFilter(ServletRequest request, ServletResponse response,
-        FilterChain chain) throws IOException, ServletException {
-      if (filterConfig == null)
-         return;
-
-      uri = ((HttpServletRequest)request).getRequestURI();
-      LOG.info("filtering " + uri);
-      chain.doFilter(request, response);
-    }
-
-    /** Configuration for the filter */
-    static public class Initializer extends FilterInitializer {
-      public Initializer() {}
-
-      @Override
-      public void initFilter(FilterContainer container, Configuration conf) {
-        container.addFilter("simple", SimpleFilter.class.getName(), null);
-      }
-    }
-  }
-
-  public static void assertExceptionContains(String string, Throwable t) {
-    String msg = t.getMessage();
-    Assert.assertTrue(
-        "Expected to find '" + string + "' but got unexpected exception:"
-        + StringUtils.stringifyException(t), msg.contains(string));
-  }
-
-  /** access a url, ignoring some IOException such as the page does not exist */
-  static void access(String urlstring) throws IOException {
-    LOG.warn("access " + urlstring);
-    URL url = new URL(urlstring);
-    URLConnection connection = url.openConnection();
-    connection.connect();
-
-    try {
-      BufferedReader in = new BufferedReader(new InputStreamReader(
-          connection.getInputStream()));
-      try {
-        for(; in.readLine() != null; );
-      } finally {
-        in.close();
-      }
-    } catch(IOException ioe) {
-      LOG.warn("urlstring=" + urlstring, ioe);
-    }
-  }
-
-  @Test
-  @Ignore
-  //From stack
-  // Its a 'foreign' test, one that came in from hadoop when we copy/pasted http
-  // It's second class. Could comment it out if only failing test (as per @nkeywal – sort of)
-  public void testServletFilter() throws Exception {
-    Configuration conf = new Configuration();
-
-    //start a http server with CountingFilter
-    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
-        SimpleFilter.Initializer.class.getName());
-    HttpServer http = createTestServer(conf);
-    http.start();
-
-    final String fsckURL = "/fsck";
-    final String stacksURL = "/stacks";
-    final String ajspURL = "/a.jsp";
-    final String logURL = "/logs/a.log";
-    final String hadooplogoURL = "/static/hadoop-logo.jpg";
-
-    final String[] urls = {fsckURL, stacksURL, ajspURL, logURL, hadooplogoURL};
-    final Random ran = new Random();
-    final int[] sequence = new int[50];
-
-    //generate a random sequence and update counts
-    for(int i = 0; i < sequence.length; i++) {
-      sequence[i] = ran.nextInt(urls.length);
-    }
-
-    //access the urls as the sequence
-    final String prefix = "http://"
-        + NetUtils.getHostPortString(http.getConnectorAddress(0));
-    try {
-      for(int i = 0; i < sequence.length; i++) {
-        access(prefix + urls[sequence[i]]);
-
-        //make sure everything except fsck get filtered
-        if (sequence[i] == 0) {
-          assertEquals(null, uri);
-        } else {
-          assertEquals(urls[sequence[i]], uri);
-          uri = null;
-        }
-      }
-    } finally {
-      http.stop();
-    }
-  }
-
-  static public class ErrorFilter extends SimpleFilter {
-    @Override
-    public void init(FilterConfig arg0) throws ServletException {
-      throw new ServletException("Throwing the exception from Filter init");
-    }
-
-    /** Configuration for the filter */
-    static public class Initializer extends FilterInitializer {
-      public Initializer() {
-      }
-
-      @Override
-      public void initFilter(FilterContainer container, Configuration conf) {
-        container.addFilter("simple", ErrorFilter.class.getName(), null);
-      }
-    }
-  }
-
-  @Test
-  public void testServletFilterWhenInitThrowsException() throws Exception {
-    Configuration conf = new Configuration();
-    // start a http server with ErrorFilter
-    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
-        ErrorFilter.Initializer.class.getName());
-    HttpServer http = createTestServer(conf);
-    try {
-      http.start();
-      fail("expecting exception");
-    } catch (IOException e) {
-      assertExceptionContains("Problem starting http server", e);
-    }
-  }
-
-  /**
-   * Similar to the above test case, except that it uses a different API to add the
-   * filter. Regression test for HADOOP-8786.
-   */
-  @Test
-  public void testContextSpecificServletFilterWhenInitThrowsException()
-      throws Exception {
-    Configuration conf = new Configuration();
-    HttpServer http = createTestServer(conf);
-    HttpServer.defineFilter(http.webAppContext,
-        "ErrorFilter", ErrorFilter.class.getName(),
-        null, null);
-    try {
-      http.start();
-      fail("expecting exception");
-    } catch (IOException e) {
-      assertExceptionContains("Unable to initialize WebAppContext", e);
-    }
-  }
-
-}