You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by bu...@apache.org on 2017/11/02 00:16:17 UTC

[12/13] hbase git commit: Revert "HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module"

Revert "HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module"

This reverts commit 3969b853b272c9d898024b3e86308c964e6fe6d0.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/851f239f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/851f239f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/851f239f

Branch: refs/heads/HBASE-19124
Commit: 851f239f1e8288e099b76cc69e54c14f84041857
Parents: 6ea4288
Author: Sean Busbey <bu...@apache.org>
Authored: Wed Nov 1 09:19:06 2017 -0500
Committer: Sean Busbey <bu...@apache.org>
Committed: Wed Nov 1 19:14:30 2017 -0500

----------------------------------------------------------------------
 hbase-common/pom.xml                            |    4 -
 .../org/apache/hadoop/hbase/util/JSONBean.java  |  351 -----
 .../hadoop/hbase/util/JSONMetricUtil.java       |  214 ---
 hbase-endpoint/pom.xml                          |    6 -
 hbase-http/pom.xml                              |  515 -------
 .../hbase/http/AdminAuthorizedServlet.java      |   49 -
 .../http/ClickjackingPreventionFilter.java      |   55 -
 .../hadoop/hbase/http/FilterContainer.java      |   41 -
 .../hadoop/hbase/http/FilterInitializer.java    |   32 -
 .../apache/hadoop/hbase/http/HtmlQuoting.java   |  215 ---
 .../apache/hadoop/hbase/http/HttpConfig.java    |   80 -
 .../hadoop/hbase/http/HttpRequestLog.java       |   93 --
 .../hbase/http/HttpRequestLogAppender.java      |   63 -
 .../apache/hadoop/hbase/http/HttpServer.java    | 1387 ------------------
 .../hadoop/hbase/http/HttpServerUtil.java       |   52 -
 .../apache/hadoop/hbase/http/InfoServer.java    |  112 --
 .../apache/hadoop/hbase/http/NoCacheFilter.java |   56 -
 .../hbase/http/ServerConfigurationKeys.java     |   47 -
 .../hadoop/hbase/http/conf/ConfServlet.java     |  107 --
 .../hadoop/hbase/http/jmx/JMXJsonServlet.java   |  240 ---
 .../hadoop/hbase/http/jmx/package-info.java     |   26 -
 .../hbase/http/lib/StaticUserWebFilter.java     |  155 --
 .../hadoop/hbase/http/lib/package-info.java     |   38 -
 .../apache/hadoop/hbase/http/log/LogLevel.java  |  175 ---
 .../apache/hadoop/hbase/http/package-info.java  |   27 -
 .../hbase/http/HttpServerFunctionalTest.java    |  272 ----
 .../hadoop/hbase/http/TestGlobalFilter.java     |  151 --
 .../hadoop/hbase/http/TestHtmlQuoting.java      |   94 --
 .../hadoop/hbase/http/TestHttpRequestLog.java   |   52 -
 .../hbase/http/TestHttpRequestLogAppender.java  |   41 -
 .../hadoop/hbase/http/TestHttpServer.java       |  617 --------
 .../hbase/http/TestHttpServerLifecycle.java     |  135 --
 .../hbase/http/TestHttpServerWebapps.java       |   68 -
 .../hadoop/hbase/http/TestPathFilter.java       |  155 --
 .../hadoop/hbase/http/TestSSLHttpServer.java    |  124 --
 .../hadoop/hbase/http/TestServletFilter.java    |  217 ---
 .../hadoop/hbase/http/TestSpnegoHttpServer.java |  258 ----
 .../hadoop/hbase/http/conf/TestConfServlet.java |  116 --
 .../hbase/http/jmx/TestJMXJsonServlet.java      |  134 --
 .../hbase/http/lib/TestStaticUserWebFilter.java |   86 --
 .../hadoop/hbase/http/log/TestLogLevel.java     |   92 --
 .../hbase/http/resource/JerseyResource.java     |   64 -
 .../hadoop/hbase/http/ssl/KeyStoreTestUtil.java |  342 -----
 hbase-http/src/test/resources/log4j.properties  |   68 -
 .../src/test/resources/webapps/static/test.css  |   21 -
 .../src/test/resources/webapps/test/testjsp.jsp |   21 -
 .../apache/hadoop/hbase/rest/RESTServer.java    |    4 +-
 .../hbase/rest/HBaseRESTTestingUtility.java     |    4 +-
 hbase-server/pom.xml                            |   45 +-
 .../hbase/http/AdminAuthorizedServlet.java      |   49 +
 .../http/ClickjackingPreventionFilter.java      |   55 +
 .../hadoop/hbase/http/FilterContainer.java      |   41 +
 .../hadoop/hbase/http/FilterInitializer.java    |   32 +
 .../apache/hadoop/hbase/http/HtmlQuoting.java   |  215 +++
 .../apache/hadoop/hbase/http/HttpConfig.java    |   80 +
 .../hadoop/hbase/http/HttpRequestLog.java       |   93 ++
 .../hbase/http/HttpRequestLogAppender.java      |   63 +
 .../apache/hadoop/hbase/http/HttpServer.java    | 1387 ++++++++++++++++++
 .../apache/hadoop/hbase/http/InfoServer.java    |  112 ++
 .../apache/hadoop/hbase/http/NoCacheFilter.java |   56 +
 .../hbase/http/ServerConfigurationKeys.java     |   47 +
 .../hadoop/hbase/http/conf/ConfServlet.java     |  107 ++
 .../hadoop/hbase/http/jmx/JMXJsonServlet.java   |  240 +++
 .../hadoop/hbase/http/jmx/package-info.java     |   26 +
 .../hbase/http/lib/StaticUserWebFilter.java     |  155 ++
 .../hadoop/hbase/http/lib/package-info.java     |   38 +
 .../apache/hadoop/hbase/http/log/LogLevel.java  |  175 +++
 .../apache/hadoop/hbase/http/package-info.java  |   27 +
 .../regionserver/DumpRegionServerMetrics.java   |   60 -
 .../hbase/regionserver/HRegionServer.java       |    3 +-
 .../hadoop/hbase/util/HttpServerUtil.java       |   52 +
 .../org/apache/hadoop/hbase/util/JSONBean.java  |  387 +++++
 .../hadoop/hbase/util/JSONMetricUtil.java       |  214 +++
 .../apache/hadoop/hbase/GenericTestUtils.java   |   74 +-
 .../hbase/http/HttpServerFunctionalTest.java    |  272 ++++
 .../hadoop/hbase/http/TestGlobalFilter.java     |  151 ++
 .../hadoop/hbase/http/TestHtmlQuoting.java      |   94 ++
 .../hadoop/hbase/http/TestHttpRequestLog.java   |   52 +
 .../hbase/http/TestHttpRequestLogAppender.java  |   41 +
 .../hadoop/hbase/http/TestHttpServer.java       |  617 ++++++++
 .../hbase/http/TestHttpServerLifecycle.java     |  135 ++
 .../hbase/http/TestHttpServerWebapps.java       |   68 +
 .../hadoop/hbase/http/TestPathFilter.java       |  155 ++
 .../hadoop/hbase/http/TestSSLHttpServer.java    |  124 ++
 .../hadoop/hbase/http/TestServletFilter.java    |  210 +++
 .../hadoop/hbase/http/TestSpnegoHttpServer.java |  258 ++++
 .../hadoop/hbase/http/conf/TestConfServlet.java |  116 ++
 .../hbase/http/jmx/TestJMXJsonServlet.java      |  134 ++
 .../hbase/http/lib/TestStaticUserWebFilter.java |   86 ++
 .../hadoop/hbase/http/log/TestLogLevel.java     |   92 ++
 .../hbase/http/resource/JerseyResource.java     |   64 +
 .../hadoop/hbase/http/ssl/KeyStoreTestUtil.java |  342 +++++
 hbase-shaded/hbase-shaded-mapreduce/pom.xml     |    4 -
 pom.xml                                         |   23 -
 94 files changed, 6745 insertions(+), 7402 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-common/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml
index b732bbe..93a09b3 100644
--- a/hbase-common/pom.xml
+++ b/hbase-common/pom.xml
@@ -264,10 +264,6 @@
       <artifactId>findbugs-annotations</artifactId>
     </dependency>
     <dependency>
-      <groupId>com.fasterxml.jackson.core</groupId>
-      <artifactId>jackson-databind</artifactId>
-    </dependency>
-    <dependency>
       <groupId>org.mockito</groupId>
       <artifactId>mockito-core</artifactId>
       <scope>test</scope>

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
deleted file mode 100644
index 0571a08..0000000
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
+++ /dev/null
@@ -1,351 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.util;
-
-import java.io.Closeable;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.lang.management.ManagementFactory;
-import java.lang.reflect.Array;
-import java.util.Iterator;
-import java.util.Set;
-
-import javax.management.AttributeNotFoundException;
-import javax.management.InstanceNotFoundException;
-import javax.management.IntrospectionException;
-import javax.management.MBeanAttributeInfo;
-import javax.management.MBeanException;
-import javax.management.MBeanInfo;
-import javax.management.MBeanServer;
-import javax.management.MalformedObjectNameException;
-import javax.management.ObjectName;
-import javax.management.ReflectionException;
-import javax.management.RuntimeErrorException;
-import javax.management.RuntimeMBeanException;
-import javax.management.openmbean.CompositeData;
-import javax.management.openmbean.CompositeType;
-import javax.management.openmbean.TabularData;
-
-import com.fasterxml.jackson.core.JsonFactory;
-import com.fasterxml.jackson.core.JsonGenerator;
-import com.fasterxml.jackson.core.JsonGenerationException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-/**
- * Utility for doing JSON and MBeans.
- */
-public class JSONBean {
-  private static final Log LOG = LogFactory.getLog(JSONBean.class);
-  private final JsonFactory jsonFactory;
-
-  public JSONBean() {
-    this.jsonFactory = new JsonFactory();
-  }
-
-  /**
-   * Use dumping out mbeans as JSON.
-   */
-  public interface Writer extends Closeable {
-    void write(final String key, final String value) throws JsonGenerationException, IOException;
-    int write(final MBeanServer mBeanServer, ObjectName qry, String attribute,
-        final boolean description) throws IOException;
-    void flush() throws IOException;
-  }
-
-  public Writer open(final PrintWriter writer) throws IOException {
-    final JsonGenerator jg = jsonFactory.createJsonGenerator(writer);
-    jg.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET);
-    jg.useDefaultPrettyPrinter();
-    jg.writeStartObject();
-    return new Writer() {
-      @Override
-      public void flush() throws IOException {
-        jg.flush();
-      }
-
-      @Override
-      public void close() throws IOException {
-        jg.close();
-      }
-
-      @Override
-      public void write(String key, String value) throws JsonGenerationException, IOException {
-        jg.writeStringField(key, value);
-      }
-
-      @Override
-      public int write(MBeanServer mBeanServer, ObjectName qry, String attribute,
-          boolean description)
-      throws IOException {
-        return JSONBean.write(jg, mBeanServer, qry, attribute, description);
-      }
-    };
-  }
-
-  /**
-   * @return Return non-zero if failed to find bean. 0
-   */
-  private static int write(final JsonGenerator jg,
-      final MBeanServer mBeanServer, ObjectName qry, String attribute,
-      final boolean description)
-  throws IOException {
-    LOG.trace("Listing beans for "+qry);
-    Set<ObjectName> names = null;
-    names = mBeanServer.queryNames(qry, null);
-    jg.writeArrayFieldStart("beans");
-    Iterator<ObjectName> it = names.iterator();
-    while (it.hasNext()) {
-      ObjectName oname = it.next();
-      MBeanInfo minfo;
-      String code = "";
-      String descriptionStr = null;
-      Object attributeinfo = null;
-      try {
-        minfo = mBeanServer.getMBeanInfo(oname);
-        code = minfo.getClassName();
-        if (description) descriptionStr = minfo.getDescription();
-        String prs = "";
-        try {
-          if ("org.apache.commons.modeler.BaseModelMBean".equals(code)) {
-            prs = "modelerType";
-            code = (String) mBeanServer.getAttribute(oname, prs);
-          }
-          if (attribute != null) {
-            prs = attribute;
-            attributeinfo = mBeanServer.getAttribute(oname, prs);
-          }
-        } catch (RuntimeMBeanException e) {
-         // UnsupportedOperationExceptions happen in the normal course of business,
-         // so no need to log them as errors all the time.
-         if (e.getCause() instanceof UnsupportedOperationException) {
-           if (LOG.isTraceEnabled()) {
-             LOG.trace("Getting attribute " + prs + " of " + oname + " threw " + e);
-           }
-         } else {
-           LOG.error("Getting attribute " + prs + " of " + oname + " threw an exception", e);
-         }
-         return 0;
-        } catch (AttributeNotFoundException e) {
-          // If the modelerType attribute was not found, the class name is used
-          // instead.
-          LOG.error("getting attribute " + prs + " of " + oname
-              + " threw an exception", e);
-        } catch (MBeanException e) {
-          // The code inside the attribute getter threw an exception so log it,
-          // and fall back on the class name
-          LOG.error("getting attribute " + prs + " of " + oname
-              + " threw an exception", e);
-        } catch (RuntimeException e) {
-          // For some reason even with an MBeanException available to them
-          // Runtime exceptionscan still find their way through, so treat them
-          // the same as MBeanException
-          LOG.error("getting attribute " + prs + " of " + oname
-              + " threw an exception", e);
-        } catch (ReflectionException e) {
-          // This happens when the code inside the JMX bean (setter?? from the
-          // java docs) threw an exception, so log it and fall back on the
-          // class name
-          LOG.error("getting attribute " + prs + " of " + oname
-              + " threw an exception", e);
-        }
-      } catch (InstanceNotFoundException e) {
-        //Ignored for some reason the bean was not found so don't output it
-        continue;
-      } catch (IntrospectionException e) {
-        // This is an internal error, something odd happened with reflection so
-        // log it and don't output the bean.
-        LOG.error("Problem while trying to process JMX query: " + qry
-            + " with MBean " + oname, e);
-        continue;
-      } catch (ReflectionException e) {
-        // This happens when the code inside the JMX bean threw an exception, so
-        // log it and don't output the bean.
-        LOG.error("Problem while trying to process JMX query: " + qry
-            + " with MBean " + oname, e);
-        continue;
-      }
-
-      jg.writeStartObject();
-      jg.writeStringField("name", oname.toString());
-      if (description && descriptionStr != null && descriptionStr.length() > 0) {
-        jg.writeStringField("description", descriptionStr);
-      }
-      jg.writeStringField("modelerType", code);
-      if (attribute != null && attributeinfo == null) {
-        jg.writeStringField("result", "ERROR");
-        jg.writeStringField("message", "No attribute with name " + attribute + " was found.");
-        jg.writeEndObject();
-        jg.writeEndArray();
-        jg.close();
-        return -1;
-      }
-
-      if (attribute != null) {
-        writeAttribute(jg, attribute, descriptionStr, attributeinfo);
-      } else {
-        MBeanAttributeInfo[] attrs = minfo.getAttributes();
-        for (int i = 0; i < attrs.length; i++) {
-          writeAttribute(jg, mBeanServer, oname, description, attrs[i]);
-        }
-      }
-      jg.writeEndObject();
-    }
-    jg.writeEndArray();
-    return 0;
-  }
-
-  private static void writeAttribute(final JsonGenerator jg,
-      final MBeanServer mBeanServer, ObjectName oname,
-      final boolean description, final MBeanAttributeInfo attr)
-  throws IOException {
-    if (!attr.isReadable()) {
-      return;
-    }
-    String attName = attr.getName();
-    if ("modelerType".equals(attName)) {
-      return;
-    }
-    if (attName.indexOf("=") >= 0 || attName.indexOf(":") >= 0 || attName.indexOf(" ") >= 0) {
-      return;
-    }
-    String descriptionStr = description? attr.getDescription(): null;
-    Object value = null;
-    try {
-      value = mBeanServer.getAttribute(oname, attName);
-    } catch (RuntimeMBeanException e) {
-      // UnsupportedOperationExceptions happen in the normal course of business,
-      // so no need to log them as errors all the time.
-      if (e.getCause() instanceof UnsupportedOperationException) {
-        if (LOG.isTraceEnabled()) {
-          LOG.trace("Getting attribute " + attName + " of " + oname + " threw " + e);
-        }
-      } else {
-        LOG.error("getting attribute "+attName+" of "+oname+" threw an exception", e);
-      }
-      return;
-    } catch (RuntimeErrorException e) {
-      // RuntimeErrorException happens when an unexpected failure occurs in getAttribute
-      // for example https://issues.apache.org/jira/browse/DAEMON-120
-      LOG.debug("getting attribute "+attName+" of "+oname+" threw an exception", e);
-      return;
-    } catch (AttributeNotFoundException e) {
-      //Ignored the attribute was not found, which should never happen because the bean
-      //just told us that it has this attribute, but if this happens just don't output
-      //the attribute.
-      return;
-    } catch (MBeanException e) {
-      //The code inside the attribute getter threw an exception so log it, and
-      // skip outputting the attribute
-      LOG.error("getting attribute "+attName+" of "+oname+" threw an exception", e);
-      return;
-    } catch (RuntimeException e) {
-      //For some reason even with an MBeanException available to them Runtime exceptions
-      //can still find their way through, so treat them the same as MBeanException
-      LOG.error("getting attribute "+attName+" of "+oname+" threw an exception", e);
-      return;
-    } catch (ReflectionException e) {
-      //This happens when the code inside the JMX bean (setter?? from the java docs)
-      //threw an exception, so log it and skip outputting the attribute
-      LOG.error("getting attribute "+attName+" of "+oname+" threw an exception", e);
-      return;
-    } catch (InstanceNotFoundException e) {
-      //Ignored the mbean itself was not found, which should never happen because we
-      //just accessed it (perhaps something unregistered in-between) but if this
-      //happens just don't output the attribute.
-      return;
-    }
-
-    writeAttribute(jg, attName, descriptionStr, value);
-  }
-
-  private static void writeAttribute(JsonGenerator jg, String attName, final String descriptionStr,
-      Object value)
-  throws IOException {
-    boolean description = false;
-    if (descriptionStr != null && descriptionStr.length() > 0 && !attName.equals(descriptionStr)) {
-      description = true;
-      jg.writeFieldName(attName);
-      jg.writeStartObject();
-      jg.writeFieldName("description");
-      jg.writeString(descriptionStr);
-      jg.writeFieldName("value");
-      writeObject(jg, description, value);
-      jg.writeEndObject();
-    } else {
-      jg.writeFieldName(attName);
-      writeObject(jg, description, value);
-    }
-  }
-
-  private static void writeObject(final JsonGenerator jg, final boolean description, Object value)
-  throws IOException {
-    if(value == null) {
-      jg.writeNull();
-    } else {
-      Class<?> c = value.getClass();
-      if (c.isArray()) {
-        jg.writeStartArray();
-        int len = Array.getLength(value);
-        for (int j = 0; j < len; j++) {
-          Object item = Array.get(value, j);
-          writeObject(jg, description, item);
-        }
-        jg.writeEndArray();
-      } else if(value instanceof Number) {
-        Number n = (Number)value;
-        jg.writeNumber(n.toString());
-      } else if(value instanceof Boolean) {
-        Boolean b = (Boolean)value;
-        jg.writeBoolean(b);
-      } else if(value instanceof CompositeData) {
-        CompositeData cds = (CompositeData)value;
-        CompositeType comp = cds.getCompositeType();
-        Set<String> keys = comp.keySet();
-        jg.writeStartObject();
-        for (String key: keys) {
-          writeAttribute(jg, key, null, cds.get(key));
-        }
-        jg.writeEndObject();
-      } else if(value instanceof TabularData) {
-        TabularData tds = (TabularData)value;
-        jg.writeStartArray();
-        for(Object entry : tds.values()) {
-          writeObject(jg, description, entry);
-        }
-        jg.writeEndArray();
-      } else {
-        jg.writeString(value.toString());
-      }
-    }
-  }
-
-  /**
-   * Dump out all registered mbeans as json on System.out.
-   * @throws IOException
-   * @throws MalformedObjectNameException
-   */
-  public static void dumpAllBeans() throws IOException, MalformedObjectNameException {
-    try (PrintWriter writer = new PrintWriter(System.out)) {
-      JSONBean dumper = new JSONBean();
-      try (JSONBean.Writer jsonBeanWriter = dumper.open(writer)) {
-        MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer();
-        jsonBeanWriter.write(mbeanServer, new ObjectName("*:*"), null, false);
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
deleted file mode 100644
index d10610e..0000000
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
+++ /dev/null
@@ -1,214 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * */
-package org.apache.hadoop.hbase.util;
-
-import java.beans.IntrospectionException;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.lang.management.GarbageCollectorMXBean;
-import java.lang.management.ManagementFactory;
-import java.lang.management.MemoryPoolMXBean;
-import java.lang.management.RuntimeMXBean;
-import java.util.Hashtable;
-import java.util.List;
-import java.util.Set;
-
-import javax.management.InstanceNotFoundException;
-import javax.management.MBeanAttributeInfo;
-import javax.management.MBeanInfo;
-import javax.management.MBeanServer;
-import javax.management.MalformedObjectNameException;
-import javax.management.ObjectName;
-import javax.management.ReflectionException;
-import javax.management.openmbean.CompositeData;
-
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.codehaus.jettison.json.JSONException;
-
-public final class JSONMetricUtil {
-
-  private static final Log LOG = LogFactory.getLog(JSONMetricUtil.class);
-
-  private static MBeanServer mbServer = ManagementFactory.getPlatformMBeanServer();
-  //MBeans ObjectName domain names
-  public static final String JAVA_LANG_DOMAIN = "java.lang";
-  public static final String JAVA_NIO_DOMAIN = "java.nio";
-  public static final String SUN_MGMT_DOMAIN = "com.sun.management";
-  public static final String HADOOP_DOMAIN = "Hadoop";
-
-  //MBeans ObjectName properties key names
-  public static final String TYPE_KEY = "type";
-  public static final String NAME_KEY = "name";
-  public static final String SERVICE_KEY = "service";
-  public static final String SUBSYSTEM_KEY = "sub";
-
-/**
- * Utility for getting metric values. Collection of static methods intended for
- * easier access to metric values.
- */
-  private JSONMetricUtil() {
-    // Not to be called
-  }
-
-  public static MBeanAttributeInfo[] getMBeanAttributeInfo(ObjectName bean)
-      throws IntrospectionException, InstanceNotFoundException, ReflectionException,
-      IntrospectionException, javax.management.IntrospectionException {
-    MBeanInfo mbinfo = mbServer.getMBeanInfo(bean);
-    return mbinfo.getAttributes();
-  }
-
-  public static Object getValueFromMBean(ObjectName bean, String attribute) {
-    Object value = null;
-    try {
-      value = mbServer.getAttribute(bean, attribute);
-    }
-    catch(Exception e) {
-      LOG.error("Unable to get value from MBean= "+ bean.toString() +
-        "for attribute=" + attribute + " " + e.getMessage());
-    }
-    return value;
-  }
-
-  /**
-   * Returns a subset of mbeans defined by qry.
-   * Modeled after DumpRegionServerMetrics#dumpMetrics.
-   * Example: String qry= "java.lang:type=Memory"
-   * @throws MalformedObjectNameException if json have bad format
-   * @throws IOException /
-   * @return String representation of json array.
-   */
-  public static String dumpBeanToString(String qry) throws MalformedObjectNameException,
-  IOException {
-    StringWriter sw = new StringWriter(1024 * 100); // Guess this size
-    try (PrintWriter writer = new PrintWriter(sw)) {
-      JSONBean dumper = new JSONBean();
-      try (JSONBean.Writer jsonBeanWriter = dumper.open(writer)) {
-        MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer();
-        jsonBeanWriter.write(mbeanServer,
-          new ObjectName(qry), null, false);
-      }
-    }
-    sw.close();
-    return sw.toString();
-  }
-
-  public static JsonNode mappStringToJsonNode(String jsonString)
-      throws JsonProcessingException, IOException {
-    ObjectMapper mapper = new ObjectMapper();
-    JsonNode node = mapper.readTree(jsonString);
-    return node;
-  }
-
-
-  public static JsonNode searchJson(JsonNode tree, String searchKey)
-      throws JsonProcessingException, IOException {
-    if (tree == null) {
-      return null;
-    }
-    if(tree.has(searchKey)) {
-      return tree.get(searchKey);
-    }
-    if(tree.isContainerNode()) {
-      for(JsonNode branch: tree) {
-        JsonNode branchResult = searchJson(branch, searchKey);
-        if (branchResult != null && !branchResult.isMissingNode()) {
-          return branchResult;
-        }
-      }
-    }
-    return null;
-  }
-
-  /**
-   * Method for building hashtable used for constructing ObjectName.
-   * Mapping is done with arrays indices
-   * @param keys Hashtable keys
-   * @param values Hashtable values
-   * @return Hashtable or null if arrays are empty * or have different number of elements
-   */
-  public static Hashtable<String, String> buldKeyValueTable(String[] keys, String[] values) {
-    if (keys.length != values.length) {
-      LOG.error("keys and values arrays must be same size");
-      return null;
-    }
-    if (keys.length == 0 || values.length == 0) {
-      LOG.error("keys and values arrays can not be empty;");
-      return null;
-    }
-    Hashtable<String, String> table = new Hashtable<String, String>();
-    for(int i = 0; i < keys.length; i++) {
-      table.put(keys[i], values[i]);
-    }
-    return table;
-  }
-
-  public static ObjectName buildObjectName(String pattern) throws MalformedObjectNameException {
-    return new ObjectName(pattern);
-  }
-
-  public static ObjectName buildObjectName(String domain, Hashtable<String, String> keyValueTable)
-      throws MalformedObjectNameException {
-    return new ObjectName(domain, keyValueTable);
-  }
-
-  public static Set<ObjectName> getRegistredMBeans(ObjectName name, MBeanServer mbs) {
-    return mbs.queryNames(name, null);
-  }
-
-  public static String getProcessPID() {
-    return ManagementFactory.getRuntimeMXBean().getName().split("@")[0];
-  }
-
-  public static String getCommmand() throws MalformedObjectNameException,
-  IOException, JSONException {
-    RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean();
-    return runtimeBean.getSystemProperties().get("sun.java.command");
-  }
-
-  public static List<GarbageCollectorMXBean> getGcCollectorBeans() {
-    List<GarbageCollectorMXBean> gcBeans = ManagementFactory.getGarbageCollectorMXBeans();
-    return gcBeans;
-  }
-
-  public static long getLastGcDuration(ObjectName gcCollector) {
-    long lastGcDuration = 0;
-    Object lastGcInfo = getValueFromMBean(gcCollector, "LastGcInfo");
-    if (lastGcInfo != null && lastGcInfo instanceof CompositeData) {
-      CompositeData cds = (CompositeData)lastGcInfo;
-      lastGcDuration = (long) cds.get("duration");
-    }
-    return lastGcDuration;
-  }
-
-  public static List<MemoryPoolMXBean> getMemoryPools() {
-    List<MemoryPoolMXBean> mPools = ManagementFactory.getMemoryPoolMXBeans();
-    return mPools;
-  }
-
-  public static float calcPercentage(long a, long b) {
-    if (a == 0 || b == 0) {
-      return 0;
-    }
-    return ((float)a / (float)b) *100;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-endpoint/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml
index 101bfdb..2686702 100644
--- a/hbase-endpoint/pom.xml
+++ b/hbase-endpoint/pom.xml
@@ -174,12 +174,6 @@
       <type>test-jar</type>
       <scope>test</scope>
     </dependency>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-http</artifactId>
-      <type>test-jar</type>
-      <scope>test</scope>
-    </dependency>
     <!-- The coprocessor.Export needs mapreduce.Import and mapreduce.Export to run the unit tests -->
     <!-- see org.apache.hadoop.hbase.coprocessor.TestImportExport -->
     <dependency>

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-http/pom.xml b/hbase-http/pom.xml
deleted file mode 100644
index 9bd6fcd..0000000
--- a/hbase-http/pom.xml
+++ /dev/null
@@ -1,515 +0,0 @@
-<?xml version="1.0"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-  <!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-  <modelVersion>4.0.0</modelVersion>
-  <parent>
-    <artifactId>hbase-build-configuration</artifactId>
-    <groupId>org.apache.hbase</groupId>
-    <version>3.0.0-SNAPSHOT</version>
-    <relativePath>../hbase-build-configuration</relativePath>
-  </parent>
-  <artifactId>hbase-http</artifactId>
-  <name>Apache HBase - HTTP</name>
-  <description>HTTP functionality for HBase Servers</description>
-
-  <build>
-    <!-- Makes sure the resources get added before they are processed
-      by placing this first -->
-    <testResources>
-      <!-- Our test artifact has different license info than our source/bin ones -->
-      <testResource>
-        <directory>src/test/resources/META-INF/</directory>
-        <targetPath>META-INF/</targetPath>
-        <includes>
-          <include>NOTICE</include>
-        </includes>
-        <filtering>true</filtering>
-      </testResource>
-      <testResource>
-        <directory>src/test/resources</directory>
-        <includes>
-          <include>**/**</include>
-        </includes>
-      </testResource>
-    </testResources>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-site-plugin</artifactId>
-        <configuration>
-          <skip>true</skip>
-        </configuration>
-      </plugin>
-      <!-- licensing info from our bundled works -->
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-remote-resources-plugin</artifactId>
-        <version>1.5</version>
-        <executions>
-          <execution>
-            <id>default</id>
-            <configuration>
-              <attachToTest>false</attachToTest>
-              <properties>
-                <copyright-end-year>${build.year}</copyright-end-year>
-                <debug-print-included-work-info>${license.debug.print.included}</debug-print-included-work-info>
-                <bundled-dependencies>${license.bundles.dependencies}</bundled-dependencies>
-                <bundled-jquery>${license.bundles.jquery}</bundled-jquery>
-                <bundled-logo>${license.bundles.logo}</bundled-logo>
-                <bundled-bootstrap>${license.bundles.bootstrap}</bundled-bootstrap>
-              </properties>
-              <resourceBundles>
-                <resourceBundle>${project.groupId}:hbase-resource-bundle:${project.version}</resourceBundle>
-              </resourceBundles>
-              <supplementalModelArtifacts>
-                <supplementalModelArtifact>${project.groupId}:hbase-resource-bundle:${project.version}</supplementalModelArtifact>
-              </supplementalModelArtifacts>
-              <supplementalModels>
-                <supplementalModel>supplemental-models.xml</supplementalModel>
-              </supplementalModels>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <!-- Run with -Dmaven.test.skip.exec=true to build -tests.jar without running
-        tests (this is needed for upstream projects whose tests need this jar simply for
-        compilation) -->
-      <plugin>
-        <!--Make it so assembly:single does nothing in here-->
-        <artifactId>maven-assembly-plugin</artifactId>
-        <configuration>
-          <skipAssembly>true</skipAssembly>
-        </configuration>
-      </plugin>
-      <!-- Make a jar and put the sources in the jar -->
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-source-plugin</artifactId>
-        <executions>
-          <execution>
-            <phase>package</phase>
-            <goals>
-              <goal>jar</goal>
-              <goal>test-jar</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <!-- Run findbugs -->
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>findbugs-maven-plugin</artifactId>
-      </plugin>
-      <!-- Testing plugins -->
-      <plugin>
-        <artifactId>maven-surefire-plugin</artifactId>
-        <configuration>
-          <properties>
-            <property>
-              <name>listener</name>
-              <value>org.apache.hadoop.hbase.ResourceCheckerJUnitListener</value>
-            </property>
-          </properties>
-          <systemPropertyVariables>
-            <test.build.webapps>target/test-classes/webapps</test.build.webapps>
-          </systemPropertyVariables>
-        </configuration>
-      </plugin>
-    </plugins>
-    <!-- General Resources -->
-    <pluginManagement>
-       <plugins>
-         <!--This plugin's configuration is used to store Eclipse m2e settings
-             only. It has no influence on the Maven build itself and needs to
-             be kept in plugin management, not in the actual plugins. -->
-        <plugin>
-          <groupId>org.eclipse.m2e</groupId>
-          <artifactId>lifecycle-mapping</artifactId>
-          <configuration>
-            <lifecycleMappingMetadata>
-              <pluginExecutions>
-                <pluginExecution>
-                  <pluginExecutionFilter>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-antrun-plugin</artifactId>
-                    <versionRange>[1.6,)</versionRange>
-                    <goals>
-                      <goal>run</goal>
-                    </goals>
-                  </pluginExecutionFilter>
-                  <action>
-                    <execute>
-                      <runOnIncremental>false</runOnIncremental>
-                      <runOnConfiguration>true</runOnConfiguration>
-                    </execute>
-                  </action>
-                </pluginExecution>
-                <pluginExecution>
-                  <pluginExecutionFilter>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-dependency-plugin</artifactId>
-                    <versionRange>[2.8,)</versionRange>
-                    <goals>
-                      <goal>build-classpath</goal>
-                    </goals>
-                  </pluginExecutionFilter>
-                  <action>
-                    <ignore></ignore>
-                  </action>
-                </pluginExecution>
-                <pluginExecution>
-                  <pluginExecutionFilter>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-compiler-plugin</artifactId>
-                    <versionRange>[3.2,)</versionRange>
-                    <goals>
-                      <goal>compile</goal>
-                    </goals>
-                  </pluginExecutionFilter>
-                  <action>
-                    <ignore></ignore>
-                  </action>
-                </pluginExecution>
-              </pluginExecutions>
-            </lifecycleMappingMetadata>
-          </configuration>
-        </plugin>
-      </plugins>
-    </pluginManagement>
-  </build>
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.hbase.thirdparty</groupId>
-      <artifactId>hbase-shaded-miscellaneous</artifactId>
-    </dependency>
-    <!-- Intra-project dependencies -->
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-common</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-common</artifactId>
-      <type>test-jar</type>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-annotations</artifactId>
-      <type>test-jar</type>
-      <scope>test</scope>
-    </dependency>
-    <!-- resource bundle only needed at build time -->
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-resource-bundle</artifactId>
-      <version>${project.version}</version>
-      <optional>true</optional>
-    </dependency>
-    <dependency>
-      <groupId>org.eclipse.jetty</groupId>
-      <artifactId>jetty-server</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.eclipse.jetty</groupId>
-      <artifactId>jetty-servlet</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.eclipse.jetty</groupId>
-      <artifactId>jetty-util</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.eclipse.jetty</groupId>
-      <artifactId>jetty-util-ajax</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.eclipse.jetty</groupId>
-      <artifactId>jetty-webapp</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.eclipse.jetty</groupId>
-      <artifactId>jetty-http</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.eclipse.jetty</groupId>
-      <artifactId>jetty-security</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.glassfish.jersey.core</groupId>
-      <artifactId>jersey-server</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.glassfish.jersey.containers</groupId>
-      <artifactId>jersey-container-servlet-core</artifactId>
-    </dependency>
-    <!-- General dependencies -->
-    <dependency>
-      <groupId>commons-logging</groupId>
-      <artifactId>commons-logging</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>javax.servlet</groupId>
-      <artifactId>javax.servlet-api</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>javax.ws.rs</groupId>
-      <artifactId>javax.ws.rs-api</artifactId>
-    </dependency>
-
-    <!-- Test dependencies -->
-    <dependency>
-      <groupId>org.apache.kerby</groupId>
-      <artifactId>kerb-simplekdc</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.bouncycastle</groupId>
-      <artifactId>bcprov-jdk16</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.kerby</groupId>
-      <artifactId>kerb-core</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.httpcomponents</groupId>
-      <artifactId>httpclient</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.httpcomponents</groupId>
-      <artifactId>httpcore</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.mockito</groupId>
-      <artifactId>mockito-core</artifactId>
-      <scope>test</scope>
-    </dependency>
-  </dependencies>
-  <profiles>
-    <!-- Needs to make the profile in apache parent pom -->
-    <profile>
-      <id>apache-release</id>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-resources-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>license-javadocs</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>copy-resources</goal>
-                </goals>
-                <configuration>
-                  <outputDirectory>${project.build.directory}/apidocs</outputDirectory>
-                  <resources>
-                    <resource>
-                      <directory>src/main/javadoc/META-INF/</directory>
-                      <targetPath>META-INF/</targetPath>
-                      <includes>
-                        <include>LICENSE</include>
-                        <include>NOTICE</include>
-                      </includes>
-                      <filtering>true</filtering>
-                    </resource>
-                  </resources>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-    <!-- Skip the tests in this module -->
-    <profile>
-      <id>skipServerTests</id>
-      <activation>
-        <property>
-          <name>skipServerTests</name>
-        </property>
-      </activation>
-      <properties>
-        <surefire.skipFirstPart>true</surefire.skipFirstPart>
-        <surefire.skipSecondPart>true</surefire.skipSecondPart>
-      </properties>
-    </profile>
-    <!-- Special builds -->
-    <profile>
-      <id>native</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>make</id>
-                <phase>compile</phase>
-                <goals><goal>run</goal></goals>
-                <configuration>
-                  <target>
-                    <mkdir dir="${project.build.directory}/native"/>
-                    <exec executable="cmake" dir="${project.build.directory}/native" failonerror="true">
-                      <arg line="${basedir}/src/main/native -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}"/>
-                    </exec>
-                    <exec executable="make" dir="${project.build.directory}/native" failonerror="true">
-                      <arg line="VERBOSE=1"/>
-                    </exec>
-                  </target>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-    <!-- Profiles for building against different hadoop versions -->
-    <!-- There are a lot of common dependencies used here, should investigate
-    if we can combine these profiles somehow -->
-
-    <!-- profile for building against Hadoop 2.x.  This is the default.  -->
-    <profile>
-      <id>hadoop-2.0</id>
-      <activation>
-        <property>
-            <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
-            <!--h2--><name>!hadoop.profile</name>
-        </property>
-      </activation>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-minicluster</artifactId>
-          <exclusions>
-            <exclusion>
-              <groupId>com.google.guava</groupId>
-              <artifactId>guava</artifactId>
-            </exclusion>
-          </exclusions>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-auth</artifactId>
-        </dependency>
-      </dependencies>
-      <build>
-        <plugins>
-          <plugin>
-            <artifactId>maven-dependency-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>create-mrapp-generated-classpath</id>
-                <phase>generate-test-resources</phase>
-                <goals>
-                  <goal>build-classpath</goal>
-                </goals>
-                <configuration>
-                  <!-- needed to run the unit test for DS to generate
-                  the required classpath that is required in the env
-                  of the launch container in the mini mr/yarn cluster
-                  -->
-                  <outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-    <!--
-      profile for building against Hadoop 3.0.x. Activate using:
-       mvn -Dhadoop.profile=3.0
-    -->
-    <profile>
-      <id>hadoop-3.0</id>
-      <activation>
-        <property>
-          <name>hadoop.profile</name>
-          <value>3.0</value>
-        </property>
-      </activation>
-      <properties>
-        <hadoop.version>${hadoop-three.version}</hadoop.version>
-      </properties>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-minicluster</artifactId>
-          <exclusions>
-            <exclusion>
-              <groupId>com.google.guava</groupId>
-              <artifactId>guava</artifactId>
-            </exclusion>
-          </exclusions>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-      <build>
-        <plugins>
-          <plugin>
-            <artifactId>maven-dependency-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>create-mrapp-generated-classpath</id>
-                <phase>generate-test-resources</phase>
-                <goals>
-                  <goal>build-classpath</goal>
-                </goals>
-                <configuration>
-                  <!-- needed to run the unit test for DS to generate
-                  the required classpath that is required in the env
-                  of the launch container in the mini mr/yarn cluster
-                  -->
-                  <outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-  </profiles>
-</project>

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedServlet.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedServlet.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedServlet.java
deleted file mode 100644
index bd8570e..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedServlet.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.IOException;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
-import org.eclipse.jetty.servlet.DefaultServlet;
-
-/**
- * General servlet which is admin-authorized.
- */
-@InterfaceAudience.Private
-@InterfaceStability.Evolving
-public class AdminAuthorizedServlet extends DefaultServlet {
-
-  private static final long serialVersionUID = 1L;
-
-  @Override
-  protected void doGet(HttpServletRequest request, HttpServletResponse response)
-    throws ServletException, IOException {
-    // Do the authorization
-    if (HttpServer.hasAdministratorAccess(getServletContext(), request,
-        response)) {
-      // Authorization is done. Just call super.
-      super.doGet(request, response);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ClickjackingPreventionFilter.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ClickjackingPreventionFilter.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ClickjackingPreventionFilter.java
deleted file mode 100644
index 9944d29..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ClickjackingPreventionFilter.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletResponse;
-import java.io.IOException;
-
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-
-@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
-public class ClickjackingPreventionFilter implements Filter {
-
-    private FilterConfig filterConfig;
-
-    @Override
-    public void init(FilterConfig filterConfig) throws ServletException {
-        this.filterConfig = filterConfig;
-    }
-
-    @Override
-    public void doFilter(ServletRequest req, ServletResponse res,
-                         FilterChain chain)
-            throws IOException, ServletException {
-        HttpServletResponse httpRes = (HttpServletResponse) res;
-        httpRes.addHeader("X-Frame-Options", filterConfig.getInitParameter("xframeoptions"));
-        chain.doFilter(req, res);
-    }
-
-    @Override
-    public void destroy() {
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/FilterContainer.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/FilterContainer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/FilterContainer.java
deleted file mode 100644
index 7a79acc..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/FilterContainer.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.util.Map;
-
-/**
- * A container interface to add javax.servlet.Filter.
- */
-public interface FilterContainer {
-  /**
-   * Add a filter to the container.
-   * @param name Filter name
-   * @param classname Filter class name
-   * @param parameters a map from parameter names to initial values
-   */
-  void addFilter(String name, String classname, Map<String, String> parameters);
-  /**
-   * Add a global filter to the container - This global filter will be
-   * applied to all available web contexts.
-   * @param name filter name
-   * @param classname filter class name
-   * @param parameters a map from parameter names to initial values
-   */
-  void addGlobalFilter(String name, String classname, Map<String, String> parameters);
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/FilterInitializer.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/FilterInitializer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/FilterInitializer.java
deleted file mode 100644
index d317343..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/FilterInitializer.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import org.apache.hadoop.conf.Configuration;
-
-/**
- * Initialize a javax.servlet.Filter.
- */
-public abstract class FilterInitializer {
-  /**
-   * Initialize a Filter to a FilterContainer.
-   * @param container The filter container
-   * @param conf Configuration for run-time parameters
-   */
-  public abstract void initFilter(FilterContainer container, Configuration conf);
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HtmlQuoting.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HtmlQuoting.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HtmlQuoting.java
deleted file mode 100644
index 7f4bb83..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HtmlQuoting.java
+++ /dev/null
@@ -1,215 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-
-/**
- * This class is responsible for quoting HTML characters.
- */
-public class HtmlQuoting {
-  private static final byte[] ampBytes = "&amp;".getBytes();
-  private static final byte[] aposBytes = "&apos;".getBytes();
-  private static final byte[] gtBytes = "&gt;".getBytes();
-  private static final byte[] ltBytes = "&lt;".getBytes();
-  private static final byte[] quotBytes = "&quot;".getBytes();
-
-  /**
-   * Does the given string need to be quoted?
-   * @param data the string to check
-   * @param off the starting position
-   * @param len the number of bytes to check
-   * @return does the string contain any of the active html characters?
-   */
-  public static boolean needsQuoting(byte[] data, int off, int len) {
-    if (off+len > data.length) {
-        throw new IllegalStateException("off+len=" + off+len + " should be lower"
-                + " than data length=" + data.length);
-    }
-    for(int i=off; i< off+len; ++i) {
-      switch(data[i]) {
-      case '&':
-      case '<':
-      case '>':
-      case '\'':
-      case '"':
-        return true;
-      default:
-        break;
-      }
-    }
-    return false;
-  }
-
-  /**
-   * Does the given string need to be quoted?
-   * @param str the string to check
-   * @return does the string contain any of the active html characters?
-   */
-  public static boolean needsQuoting(String str) {
-    if (str == null) {
-      return false;
-    }
-    byte[] bytes = str.getBytes();
-    return needsQuoting(bytes, 0 , bytes.length);
-  }
-
-  /**
-   * Quote all of the active HTML characters in the given string as they
-   * are added to the buffer.
-   * @param output the stream to write the output to
-   * @param buffer the byte array to take the characters from
-   * @param off the index of the first byte to quote
-   * @param len the number of bytes to quote
-   */
-  public static void quoteHtmlChars(OutputStream output, byte[] buffer,
-                                    int off, int len) throws IOException {
-    for(int i=off; i < off+len; i++) {
-      switch (buffer[i]) {
-      case '&': output.write(ampBytes); break;
-      case '<': output.write(ltBytes); break;
-      case '>': output.write(gtBytes); break;
-      case '\'': output.write(aposBytes); break;
-      case '"': output.write(quotBytes); break;
-      default: output.write(buffer, i, 1);
-      }
-    }
-  }
-
-  /**
-   * Quote the given item to make it html-safe.
-   * @param item the string to quote
-   * @return the quoted string
-   */
-  public static String quoteHtmlChars(String item) {
-    if (item == null) {
-      return null;
-    }
-    byte[] bytes = item.getBytes();
-    if (needsQuoting(bytes, 0, bytes.length)) {
-      ByteArrayOutputStream buffer = new ByteArrayOutputStream();
-      try {
-        quoteHtmlChars(buffer, bytes, 0, bytes.length);
-      } catch (IOException ioe) {
-        // Won't happen, since it is a bytearrayoutputstream
-      }
-      return buffer.toString();
-    } else {
-      return item;
-    }
-  }
-
-  /**
-   * Return an output stream that quotes all of the output.
-   * @param out the stream to write the quoted output to
-   * @return a new stream that the application show write to
-   * @throws IOException if the underlying output fails
-   */
-  public static OutputStream quoteOutputStream(final OutputStream out
-                                               ) throws IOException {
-    return new OutputStream() {
-      private byte[] data = new byte[1];
-      @Override
-      public void write(byte[] data, int off, int len) throws IOException {
-        quoteHtmlChars(out, data, off, len);
-      }
-
-      @Override
-      public void write(int b) throws IOException {
-        data[0] = (byte) b;
-        quoteHtmlChars(out, data, 0, 1);
-      }
-
-      @Override
-      public void flush() throws IOException {
-        out.flush();
-      }
-
-      @Override
-      public void close() throws IOException {
-        out.close();
-      }
-    };
-  }
-
-  /**
-   * Remove HTML quoting from a string.
-   * @param item the string to unquote
-   * @return the unquoted string
-   */
-  public static String unquoteHtmlChars(String item) {
-    if (item == null) {
-      return null;
-    }
-    int next = item.indexOf('&');
-    // nothing was quoted
-    if (next == -1) {
-      return item;
-    }
-    int len = item.length();
-    int posn = 0;
-    StringBuilder buffer = new StringBuilder();
-    while (next != -1) {
-      buffer.append(item.substring(posn, next));
-      if (item.startsWith("&amp;", next)) {
-        buffer.append('&');
-        next += 5;
-      } else if (item.startsWith("&apos;", next)) {
-        buffer.append('\'');
-        next += 6;
-      } else if (item.startsWith("&gt;", next)) {
-        buffer.append('>');
-        next += 4;
-      } else if (item.startsWith("&lt;", next)) {
-        buffer.append('<');
-        next += 4;
-      } else if (item.startsWith("&quot;", next)) {
-        buffer.append('"');
-        next += 6;
-      } else {
-        int end = item.indexOf(';', next)+1;
-        if (end == 0) {
-          end = len;
-        }
-        throw new IllegalArgumentException("Bad HTML quoting for " +
-                                           item.substring(next,end));
-      }
-      posn = next;
-      next = item.indexOf('&', posn);
-    }
-    buffer.append(item.substring(posn, len));
-    return buffer.toString();
-  }
-
-  public static void main(String[] args) throws Exception {
-    if (args.length == 0) {
-        throw new IllegalArgumentException("Please provide some arguments");
-    }
-    for(String arg:args) {
-      System.out.println("Original: " + arg);
-      String quoted = quoteHtmlChars(arg);
-      System.out.println("Quoted: "+ quoted);
-      String unquoted = unquoteHtmlChars(quoted);
-      System.out.println("Unquoted: " + unquoted);
-      System.out.println();
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpConfig.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpConfig.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpConfig.java
deleted file mode 100644
index b9dde23..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpConfig.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
-
-/**
- * Statics to get access to Http related configuration.
- */
-@InterfaceAudience.Private
-@InterfaceStability.Unstable
-public class HttpConfig {
-  private Policy policy;
-  public enum Policy {
-    HTTP_ONLY,
-    HTTPS_ONLY,
-    HTTP_AND_HTTPS;
-
-    public Policy fromString(String value) {
-      if (HTTPS_ONLY.name().equalsIgnoreCase(value)) {
-        return HTTPS_ONLY;
-      } else if (HTTP_AND_HTTPS.name().equalsIgnoreCase(value)) {
-        return HTTP_AND_HTTPS;
-      }
-      return HTTP_ONLY;
-    }
-
-    public boolean isHttpEnabled() {
-      return this == HTTP_ONLY || this == HTTP_AND_HTTPS;
-    }
-
-    public boolean isHttpsEnabled() {
-      return this == HTTPS_ONLY || this == HTTP_AND_HTTPS;
-    }
-  }
-
-   public HttpConfig(final Configuration conf) {
-    boolean sslEnabled = conf.getBoolean(
-      ServerConfigurationKeys.HBASE_SSL_ENABLED_KEY,
-      ServerConfigurationKeys.HBASE_SSL_ENABLED_DEFAULT);
-    policy = sslEnabled ? Policy.HTTPS_ONLY : Policy.HTTP_ONLY;
-    if (sslEnabled) {
-      conf.addResource("ssl-server.xml");
-      conf.addResource("ssl-client.xml");
-    }
-  }
-
-  public void setPolicy(Policy policy) {
-    this.policy = policy;
-  }
-
-  public boolean isSecure() {
-    return policy == Policy.HTTPS_ONLY;
-  }
-
-  public String getSchemePrefix() {
-    return (isSecure()) ? "https://" : "http://";
-  }
-
-  public String getScheme(Policy policy) {
-    return policy == Policy.HTTPS_ONLY ? "https://" : "http://";
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java
deleted file mode 100644
index cfc0640..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.util.HashMap;
-
-import org.apache.commons.logging.impl.Log4JLogger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogConfigurationException;
-import org.apache.commons.logging.LogFactory;
-import org.apache.log4j.Appender;
-import org.apache.log4j.Logger;
-
-import org.eclipse.jetty.server.RequestLog;
-import org.eclipse.jetty.server.NCSARequestLog;
-
-/**
- * RequestLog object for use with Http
- */
-public class HttpRequestLog {
-
-  private static final Log LOG = LogFactory.getLog(HttpRequestLog.class);
-  private static final HashMap<String, String> serverToComponent;
-
-  static {
-    serverToComponent = new HashMap<>();
-    serverToComponent.put("master", "master");
-    serverToComponent.put("region", "regionserver");
-  }
-
-  public static RequestLog getRequestLog(String name) {
-
-    String lookup = serverToComponent.get(name);
-    if (lookup != null) {
-      name = lookup;
-    }
-    String loggerName = "http.requests." + name;
-    String appenderName = name + "requestlog";
-    Log logger = LogFactory.getLog(loggerName);
-
-    if (logger instanceof Log4JLogger) {
-      Log4JLogger httpLog4JLog = (Log4JLogger)logger;
-      Logger httpLogger = httpLog4JLog.getLogger();
-      Appender appender = null;
-
-      try {
-        appender = httpLogger.getAppender(appenderName);
-      } catch (LogConfigurationException e) {
-        LOG.warn("Http request log for " + loggerName
-            + " could not be created");
-        throw e;
-      }
-
-      if (appender == null) {
-        LOG.info("Http request log for " + loggerName
-            + " is not defined");
-        return null;
-      }
-
-      if (appender instanceof HttpRequestLogAppender) {
-        HttpRequestLogAppender requestLogAppender
-          = (HttpRequestLogAppender)appender;
-        NCSARequestLog requestLog = new NCSARequestLog();
-        requestLog.setFilename(requestLogAppender.getFilename());
-        requestLog.setRetainDays(requestLogAppender.getRetainDays());
-        return requestLog;
-      } else {
-        LOG.warn("Jetty request log for " + loggerName
-            + " was of the wrong class");
-        return null;
-      }
-    }
-    else {
-      LOG.warn("Jetty request log can only be enabled using Log4j");
-      return null;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLogAppender.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLogAppender.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLogAppender.java
deleted file mode 100644
index 8039b34..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLogAppender.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import org.apache.log4j.spi.LoggingEvent;
-import org.apache.log4j.AppenderSkeleton;
-
-/**
- * Log4j Appender adapter for HttpRequestLog
- */
-public class HttpRequestLogAppender extends AppenderSkeleton {
-
-  private String filename;
-  private int retainDays;
-
-  public HttpRequestLogAppender() {
-  }
-
-  public void setRetainDays(int retainDays) {
-    this.retainDays = retainDays;
-  }
-
-  public int getRetainDays() {
-    return retainDays;
-  }
-
-  public void setFilename(String filename) {
-    this.filename = filename;
-  }
-
-  public String getFilename() {
-    return filename;
-  }
-
-  @Override
-  public void append(LoggingEvent event) {
-  }
-
-  @Override
-  public void close() {
-      // Do nothing, we don't have close() on AppenderSkeleton.
-  }
-
-  @Override
-  public boolean requiresLayout() {
-    return false;
-  }
-}