You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by bu...@apache.org on 2017/11/02 00:16:06 UTC

[01/13] hbase git commit: HBASE-18925 Update mockito dependency from mockito-all:1.10.19 to mockito-core:2.1.0 for JDK8 support. [Forced Update!]

Repository: hbase
Updated Branches:
  refs/heads/HBASE-19124 8b5200cbb -> c192b9bb6 (forced update)


http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java
index e2e641e..ea86b40 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java
@@ -117,7 +117,7 @@ public class TestProcedureMember {
         member.receivedReachedGlobalBarrier(op);
         return null;
       }
-    }).when(mockMemberComms).sendMemberAcquired(any(Subprocedure.class));
+    }).when(mockMemberComms).sendMemberAcquired(any());
   }
 
   /**
@@ -147,7 +147,7 @@ public class TestProcedureMember {
     order.verify(spy).insideBarrier();
     order.verify(mockMemberComms).sendMemberCompleted(eq(spy), eq(data));
     order.verify(mockMemberComms, never()).sendMemberAborted(eq(spy),
-        any(ForeignException.class));
+        any());
   }
 
   /**
@@ -182,8 +182,8 @@ public class TestProcedureMember {
     order.verify(spySub, never()).insideBarrier();
     order.verify(mockMemberComms, never()).sendMemberCompleted(eq(spySub), eq(data));
     // error recovery path exercised
-    order.verify(spySub).cancel(anyString(), any(Exception.class));
-    order.verify(spySub).cleanup(any(Exception.class));
+    order.verify(spySub).cancel(anyString(), any());
+    order.verify(spySub).cleanup(any());
   }
 
   /**
@@ -200,7 +200,7 @@ public class TestProcedureMember {
           public Void answer(InvocationOnMock invocation) throws Throwable {
             throw new IOException("Forced IOException in memeber prepare");
           }
-        }).when(mockMemberComms).sendMemberAcquired(any(Subprocedure.class));
+        }).when(mockMemberComms).sendMemberAcquired(any());
 
     // run the operation
     // build a new operation
@@ -218,8 +218,8 @@ public class TestProcedureMember {
     order.verify(spySub, never()).insideBarrier();
     order.verify(mockMemberComms, never()).sendMemberCompleted(eq(spySub), eq(data));
     // error recovery path exercised
-    order.verify(spySub).cancel(anyString(), any(Exception.class));
-    order.verify(spySub).cleanup(any(Exception.class));
+    order.verify(spySub).cancel(anyString(), any());
+    order.verify(spySub).cleanup(any());
   }
 
   /**
@@ -261,8 +261,8 @@ public class TestProcedureMember {
     order.verify(spySub, never()).insideBarrier();
     order.verify(mockMemberComms, never()).sendMemberCompleted(eq(spySub), eq(data));
     // error recovery path exercised
-    order.verify(spySub).cancel(anyString(), any(Exception.class));
-    order.verify(spySub).cleanup(any(Exception.class));
+    order.verify(spySub).cancel(anyString(), any());
+    order.verify(spySub).cleanup(any());
   }
 
   /**
@@ -302,8 +302,8 @@ public class TestProcedureMember {
     // Later phases not run
     order.verify(mockMemberComms, never()).sendMemberCompleted(eq(spySub), eq(data));
     // error recovery path exercised
-    order.verify(spySub).cancel(anyString(), any(Exception.class));
-    order.verify(spySub).cleanup(any(Exception.class));
+    order.verify(spySub).cancel(anyString(), any());
+    order.verify(spySub).cleanup(any());
   }
 
   /**
@@ -328,7 +328,7 @@ public class TestProcedureMember {
             Thread.sleep(WAKE_FREQUENCY);
             return null;
           }
-        }).when(mockMemberComms).sendMemberCompleted(any(Subprocedure.class), eq(data));
+        }).when(mockMemberComms).sendMemberCompleted(any(), eq(data));
 
     // run the operation
     // build a new operation
@@ -344,8 +344,8 @@ public class TestProcedureMember {
     order.verify(spySub).insideBarrier();
     order.verify(mockMemberComms).sendMemberCompleted(eq(spySub), eq(data));
     // error recovery path exercised
-    order.verify(spySub).cancel(anyString(), any(Exception.class));
-    order.verify(spySub).cleanup(any(Exception.class));
+    order.verify(spySub).cancel(anyString(), any());
+    order.verify(spySub).cleanup(any());
   }
 
   /**
@@ -369,7 +369,7 @@ public class TestProcedureMember {
     doThrow(new ForeignException("SRC", "prepare exception")).when(spy).acquireBarrier();
     // and throw a connection error when we try to tell the controller about it
     doThrow(new IOException("Controller is down!")).when(mockMemberComms)
-        .sendMemberAborted(eq(spy), any(ForeignException.class));
+        .sendMemberAborted(eq(spy), any());
 
 
     // run the operation
@@ -388,9 +388,9 @@ public class TestProcedureMember {
     // TODO Need to do another refactor to get this to propagate to the coordinator.
     // make sure we pass a remote exception back the controller
 //    order.verify(mockMemberComms).sendMemberAborted(eq(spy),
-//      any(ExternalException.class));
+//      any());
 //    order.verify(dispSpy).receiveError(anyString(),
-//        any(ExternalException.class), any());
+//        any(), any());
   }
 
   /**
@@ -427,7 +427,7 @@ public class TestProcedureMember {
     verifyZeroInteractions(pool);
     // get two abort requests
     // TODO Need to do another refactor to get this to propagate to the coordinator.
-    // verify(mockMemberComms, times(2)).sendMemberAborted(any(Subprocedure.class), any(ExternalException.class));
+    // verify(mockMemberComms, times(2)).sendMemberAborted(any(), any());
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java
index 856e449..14d7eab 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java
@@ -284,7 +284,7 @@ public class TestZKProcedure {
     Procedure coordinatorTask = Mockito.spy(new Procedure(coordinator,
         coordinatorTaskErrorMonitor, WAKE_FREQUENCY, TIMEOUT,
         opName, data, expected));
-    when(coordinator.createProcedure(any(ForeignExceptionDispatcher.class), eq(opName), eq(data), anyListOf(String.class)))
+    when(coordinator.createProcedure(any(), eq(opName), eq(data), anyListOf(String.class)))
       .thenReturn(coordinatorTask);
     // count down the error latch when we get the remote error
     Mockito.doAnswer(new Answer<Void>() {
@@ -296,7 +296,7 @@ public class TestZKProcedure {
         coordinatorReceivedErrorLatch.countDown();
         return null;
       }
-    }).when(coordinatorTask).receive(Mockito.any(ForeignException.class));
+    }).when(coordinatorTask).receive(Mockito.any());
 
     // ----------------------------
     // start running the operation

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java
index 8b947ee..4ab9685 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java
@@ -31,7 +31,6 @@ import java.util.concurrent.CountDownLatch;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.errorhandling.ForeignException;
 import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -131,10 +130,10 @@ public class TestZKProcedureControllers {
     LOG.debug("Commit node:" + commit + ", exists:" + ZKUtil.checkExists(watcher, commit));
     committed.await();
 
-    verify(monitor, never()).receive(Mockito.any(ForeignException.class));
+    verify(monitor, never()).receive(Mockito.any());
     // XXX: broken due to composition.
 //    verify(member, never()).getManager().controllerConnectionFailure(Mockito.anyString(),
-//      Mockito.any(IOException.class));
+//      Mockito.any());
     // cleanup after the test
     ZKUtil.deleteNodeRecursively(watcher, controller.getZkController().getBaseZnode());
     assertEquals("Didn't delete prepare node", -1, ZKUtil.checkExists(watcher, prepare));
@@ -363,7 +362,7 @@ public class TestZKProcedureControllers {
 //    verify(member, Mockito.times(cohortSize)).submitSubprocedure(Mockito.eq(operationName),
 //      (byte[]) Mockito.argThat(new ArrayEquals(data)));
     Mockito.verify(member,
-      Mockito.atLeast(cohortSize)).submitSubprocedure(Mockito.any(Subprocedure.class));
+      Mockito.atLeast(cohortSize)).submitSubprocedure(Mockito.any());
 
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestFileSystemUtilizationChore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestFileSystemUtilizationChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestFileSystemUtilizationChore.java
index ebd2436..f1e41cd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestFileSystemUtilizationChore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestFileSystemUtilizationChore.java
@@ -61,7 +61,7 @@ public class TestFileSystemUtilizationChore {
     final FileSystemUtilizationChore chore = new FileSystemUtilizationChore(rs);
     doAnswer(new ExpectedRegionSizeSummationAnswer(sum(regionSizes)))
         .when(rs)
-        .reportRegionSizesForQuotas((Map<RegionInfo,Long>) any(Map.class));
+        .reportRegionSizesForQuotas((Map<RegionInfo,Long>) any());
 
     final Region region = mockRegionWithSize(regionSizes);
     Mockito.doReturn(Arrays.asList(region)).when(rs).getRegions();
@@ -78,7 +78,7 @@ public class TestFileSystemUtilizationChore {
     final FileSystemUtilizationChore chore = new FileSystemUtilizationChore(rs);
     doAnswer(new ExpectedRegionSizeSummationAnswer(sum(regionSizes)))
         .when(rs)
-        .reportRegionSizesForQuotas((Map<RegionInfo,Long>) any(Map.class));
+        .reportRegionSizesForQuotas((Map<RegionInfo,Long>) any());
 
     final Region region = mockRegionWithSize(regionSizes);
     Mockito.doReturn(Arrays.asList(region)).when(rs).getRegions();
@@ -102,7 +102,7 @@ public class TestFileSystemUtilizationChore {
     final FileSystemUtilizationChore chore = new FileSystemUtilizationChore(rs);
     doAnswer(new ExpectedRegionSizeSummationAnswer(sum(Arrays.asList(r1Sum, r2Sum, r3Sum))))
         .when(rs)
-        .reportRegionSizesForQuotas((Map<RegionInfo,Long>) any(Map.class));
+        .reportRegionSizesForQuotas((Map<RegionInfo,Long>) any());
 
     final Region r1 = mockRegionWithSize(r1Sizes);
     final Region r2 = mockRegionWithSize(r2Sizes);
@@ -167,7 +167,7 @@ public class TestFileSystemUtilizationChore {
     };
     doAnswer(new ExpectedRegionSizeSummationAnswer(sum(Arrays.asList(leftover1Sum, leftover2Sum))))
         .when(rs)
-        .reportRegionSizesForQuotas((Map<RegionInfo,Long>) any(Map.class));
+        .reportRegionSizesForQuotas((Map<RegionInfo,Long>) any());
 
     // We shouldn't compute all of these region sizes, just the leftovers
     final Region r1 = mockRegionWithSize(Arrays.asList(1024L, 2048L));
@@ -199,7 +199,7 @@ public class TestFileSystemUtilizationChore {
     };
     doAnswer(new ExpectedRegionSizeSummationAnswer(sum(Arrays.asList(leftover1Sum))))
         .when(rs)
-        .reportRegionSizesForQuotas((Map<RegionInfo,Long>) any(Map.class));
+        .reportRegionSizesForQuotas((Map<RegionInfo,Long>) any());
 
     // We shouldn't compute all of these region sizes, just the leftovers
     final Region r1 = mockRegionWithSize(Arrays.asList(1024L, 2048L));
@@ -225,7 +225,7 @@ public class TestFileSystemUtilizationChore {
     final FileSystemUtilizationChore chore = new FileSystemUtilizationChore(rs);
     doAnswer(new ExpectedRegionSizeSummationAnswer(sum(Arrays.asList(r1Sum))))
         .when(rs)
-        .reportRegionSizesForQuotas((Map<RegionInfo,Long>) any(Map.class));
+        .reportRegionSizesForQuotas((Map<RegionInfo,Long>) any());
 
     final Region r1 = mockRegionWithSize(r1Sizes);
     final Region r2 = mockSplitParentRegionWithSize(r2Sizes);
@@ -247,7 +247,7 @@ public class TestFileSystemUtilizationChore {
     final FileSystemUtilizationChore chore = new FileSystemUtilizationChore(rs);
     doAnswer(new ExpectedRegionSizeSummationAnswer(r1Sum))
         .when(rs)
-        .reportRegionSizesForQuotas((Map<RegionInfo,Long>) any(Map.class));
+        .reportRegionSizesForQuotas((Map<RegionInfo,Long>) any());
 
     final Region r1 = mockRegionWithSize(r1Sizes);
     final Region r2 = mockRegionReplicaWithSize(r2Sizes);
@@ -274,7 +274,7 @@ public class TestFileSystemUtilizationChore {
     final FileSystemUtilizationChore chore = new FileSystemUtilizationChore(rs);
     doAnswer(new ExpectedRegionSizeSummationAnswer(
         sum(Arrays.asList(r1HFileSizeSum, r2HFileSizeSum))))
-        .when(rs).reportRegionSizesForQuotas((Map<RegionInfo,Long>) any(Map.class));
+        .when(rs).reportRegionSizesForQuotas((Map<RegionInfo,Long>) any());
 
     final Region r1 = mockRegionWithHFileLinks(r1StoreFileSizes, r1HFileSizes);
     final Region r2 = mockRegionWithHFileLinks(r2StoreFileSizes, r2HFileSizes);

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestMasterSpaceQuotaObserverWithMocks.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestMasterSpaceQuotaObserverWithMocks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestMasterSpaceQuotaObserverWithMocks.java
index 271e5bb..36b5ae1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestMasterSpaceQuotaObserverWithMocks.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestMasterSpaceQuotaObserverWithMocks.java
@@ -51,7 +51,7 @@ public class TestMasterSpaceQuotaObserverWithMocks {
     conf = HBaseConfiguration.create();
     master = mock(HMaster.class);
     doCallRealMethod().when(master).updateConfigurationForSpaceQuotaObserver(
-        any(Configuration.class));
+        any());
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestNamespaceQuotaViolationStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestNamespaceQuotaViolationStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestNamespaceQuotaViolationStore.java
index 303dad0..5ce2ec4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestNamespaceQuotaViolationStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestNamespaceQuotaViolationStore.java
@@ -73,7 +73,7 @@ public class TestNamespaceQuotaViolationStore {
   @Test
   public void testGetSpaceQuota() throws Exception {
     NamespaceQuotaSnapshotStore mockStore = mock(NamespaceQuotaSnapshotStore.class);
-    when(mockStore.getSpaceQuota(any(String.class))).thenCallRealMethod();
+    when(mockStore.getSpaceQuota(any())).thenCallRealMethod();
 
     Quotas quotaWithSpace = Quotas.newBuilder().setSpace(
         SpaceQuota.newBuilder()
@@ -84,7 +84,7 @@ public class TestNamespaceQuotaViolationStore {
     Quotas quotaWithoutSpace = Quotas.newBuilder().build();
 
     AtomicReference<Quotas> quotaRef = new AtomicReference<>();
-    when(mockStore.getQuotaForNamespace(any(String.class))).then(new Answer<Quotas>() {
+    when(mockStore.getQuotaForNamespace(any())).then(new Answer<Quotas>() {
       @Override
       public Quotas answer(InvocationOnMock invocation) throws Throwable {
         return quotaRef.get();

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotaViolationPolicyRefresherChore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotaViolationPolicyRefresherChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotaViolationPolicyRefresherChore.java
index 7f0f9ad..2ddb5a3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotaViolationPolicyRefresherChore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotaViolationPolicyRefresherChore.java
@@ -77,8 +77,8 @@ public class TestSpaceQuotaViolationPolicyRefresherChore {
     when(chore.getConnection()).thenReturn(conn);
     when(chore.getManager()).thenReturn(manager);
     doCallRealMethod().when(chore).chore();
-    when(chore.isInViolation(any(SpaceQuotaSnapshot.class))).thenCallRealMethod();
-    doCallRealMethod().when(chore).extractQuotaSnapshot(any(Result.class), any(Map.class));
+    when(chore.isInViolation(any())).thenCallRealMethod();
+    doCallRealMethod().when(chore).extractQuotaSnapshot(any(), any());
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTableQuotaViolationStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTableQuotaViolationStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTableQuotaViolationStore.java
index 5ba830d..64b2442 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTableQuotaViolationStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTableQuotaViolationStore.java
@@ -165,7 +165,7 @@ public class TestTableQuotaViolationStore {
   @Test
   public void testGetSpaceQuota() throws Exception {
     TableQuotaSnapshotStore mockStore = mock(TableQuotaSnapshotStore.class);
-    when(mockStore.getSpaceQuota(any(TableName.class))).thenCallRealMethod();
+    when(mockStore.getSpaceQuota(any())).thenCallRealMethod();
 
     Quotas quotaWithSpace = Quotas.newBuilder().setSpace(
         SpaceQuota.newBuilder()
@@ -176,7 +176,7 @@ public class TestTableQuotaViolationStore {
     Quotas quotaWithoutSpace = Quotas.newBuilder().build();
 
     AtomicReference<Quotas> quotaRef = new AtomicReference<>();
-    when(mockStore.getQuotaForTable(any(TableName.class))).then(new Answer<Quotas>() {
+    when(mockStore.getQuotaForTable(any())).then(new Answer<Quotas>() {
       @Override
       public Quotas answer(InvocationOnMock invocation) throws Throwable {
         return quotaRef.get();

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTableSpaceQuotaViolationNotifier.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTableSpaceQuotaViolationNotifier.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTableSpaceQuotaViolationNotifier.java
index 6626ab5..fd34bda 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTableSpaceQuotaViolationNotifier.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTableSpaceQuotaViolationNotifier.java
@@ -77,22 +77,13 @@ public class TestTableSpaceQuotaViolationNotifier {
 
     notifier.transitionTable(tn, snapshot);
 
-    verify(quotaTable).put(argThat(new SingleCellPutMatcher(expectedPut)));
-  }
-
-  /**
-   * Parameterized for Puts.
-   */
-  private static class SingleCellPutMatcher extends SingleCellMutationMatcher<Put> {
-    private SingleCellPutMatcher(Put expected) {
-      super(expected);
-    }
+    verify(quotaTable).put(argThat(new SingleCellMutationMatcher<Put>(expectedPut)));
   }
 
   /**
    * Quick hack to verify a Mutation with one column.
    */
-  private static class SingleCellMutationMatcher<T> extends ArgumentMatcher<T> {
+  final private static class SingleCellMutationMatcher<T> implements ArgumentMatcher<T> {
     private final Mutation expected;
 
     private SingleCellMutationMatcher(Mutation expected) {
@@ -100,7 +91,7 @@ public class TestTableSpaceQuotaViolationNotifier {
     }
 
     @Override
-    public boolean matches(Object argument) {
+    public boolean matches(T argument) {
       if (!expected.getClass().isAssignableFrom(argument.getClass())) {
         return false;
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/StatefulStoreMockMaker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/StatefulStoreMockMaker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/StatefulStoreMockMaker.java
index a1fe87b..5af7d96 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/StatefulStoreMockMaker.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/StatefulStoreMockMaker.java
@@ -18,8 +18,8 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
+import static org.mockito.ArgumentMatchers.anyInt;
 import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyInt;
 import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
@@ -27,8 +27,6 @@ import static org.mockito.Mockito.when;
 import java.util.Optional;
 
 import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
-import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
-import org.apache.hadoop.hbase.security.User;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 
@@ -42,37 +40,23 @@ public class StatefulStoreMockMaker {
     return Optional.empty();
   }
 
-  public void cancelCompaction(Object originalContext) {
-  }
+  public void cancelCompaction(Object originalContext) {}
 
   public int getPriority() {
     return 0;
   }
-
-  private class SelectAnswer implements Answer<Optional<CompactionContext>> {
-    public Optional<CompactionContext> answer(InvocationOnMock invocation) throws Throwable {
-      return selectCompaction();
-    }
-  }
-
-  private class PriorityAnswer implements Answer<Integer> {
-    public Integer answer(InvocationOnMock invocation) throws Throwable {
-      return getPriority();
-    }
-  }
   private class CancelAnswer implements Answer<Object> {
     public CompactionContext answer(InvocationOnMock invocation) throws Throwable {
-      cancelCompaction(invocation.getArguments()[0]); return null;
+      cancelCompaction(invocation.getArgument(0));
+      return null;
     }
   }
 
   public HStore createStoreMock(String name) throws Exception {
     HStore store = mock(HStore.class, name);
-    when(store.requestCompaction(anyInt(), any(CompactionLifeCycleTracker.class), any(User.class)))
-        .then(new SelectAnswer());
-    when(store.getCompactPriority()).then(new PriorityAnswer());
-    doAnswer(new CancelAnswer()).when(store)
-        .cancelRequestedCompaction(any(CompactionContext.class));
+    when(store.requestCompaction(anyInt(), any(), any())).then(inv -> selectCompaction());
+    when(store.getCompactPriority()).then(inv -> getPriority());
+    doAnswer(new CancelAnswer()).when(store).cancelRequestedCompaction(any());
     return store;
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
index 32c5e13..98bf48d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
@@ -22,6 +22,13 @@ import static java.util.Arrays.asList;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyBoolean;
+import static org.mockito.ArgumentMatchers.anyLong;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+import static org.mockito.hamcrest.MockitoHamcrest.argThat;
 
 import java.io.File;
 import java.io.FileNotFoundException;
@@ -31,7 +38,6 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Random;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.Path;
@@ -55,17 +61,6 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.wal.WAL;
 import org.apache.hadoop.hbase.wal.WALKey;
-import org.hamcrest.Description;
-import org.hamcrest.Matcher;
-import org.hamcrest.TypeSafeMatcher;
-
-import static org.mockito.Matchers.*;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-import org.mockito.invocation.InvocationOnMock;
-import org.mockito.stubbing.Answer;
-
 import org.junit.Before;
 import org.junit.ClassRule;
 import org.junit.Rule;
@@ -73,6 +68,11 @@ import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.TemporaryFolder;
 import org.junit.rules.TestName;
+import org.hamcrest.Description;
+import org.hamcrest.Matcher;
+import org.hamcrest.TypeSafeMatcher;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
 
 /**
  * This class attempts to unit test bulk HLog loading.
@@ -108,19 +108,19 @@ public class TestBulkLoad {
     storeFileName = (new Path(storeFileName)).getName();
     List<String> storeFileNames = new ArrayList<>();
     storeFileNames.add(storeFileName);
-    when(log.append(any(HRegionInfo.class), any(WALKey.class),
+    when(log.append(any(), any(),
             argThat(bulkLogWalEdit(WALEdit.BULK_LOAD, tableName.toBytes(),
                     familyName, storeFileNames)),
-            any(boolean.class))).thenAnswer(new Answer() {
-      public Object answer(InvocationOnMock invocation) {
-        WALKey walKey = invocation.getArgumentAt(1, WALKey.class);
-        MultiVersionConcurrencyControl mvcc = walKey.getMvcc();
-        if (mvcc != null) {
-          MultiVersionConcurrencyControl.WriteEntry we = mvcc.begin();
-          walKey.setWriteEntry(we);
-        }
-        return 01L;
-      };
+            anyBoolean())).thenAnswer(new Answer() {
+              public Object answer(InvocationOnMock invocation) {
+                WALKey walKey = invocation.getArgument(1);
+                MultiVersionConcurrencyControl mvcc = walKey.getMvcc();
+                if (mvcc != null) {
+                  MultiVersionConcurrencyControl.WriteEntry we = mvcc.begin();
+                  walKey.setWriteEntry(we);
+                }
+                return 01L;
+              };
     });
     testRegionWithFamiliesAndSpecifiedTableName(tableName, family1)
         .bulkLoadHFiles(familyPaths, false, null);
@@ -134,18 +134,18 @@ public class TestBulkLoad {
 
   @Test
   public void shouldBulkLoadSingleFamilyHLog() throws IOException {
-    when(log.append(any(HRegionInfo.class),
-            any(WALKey.class), argThat(bulkLogWalEditType(WALEdit.BULK_LOAD)),
-            any(boolean.class))).thenAnswer(new Answer() {
-      public Object answer(InvocationOnMock invocation) {
-        WALKey walKey = invocation.getArgumentAt(1, WALKey.class);
-        MultiVersionConcurrencyControl mvcc = walKey.getMvcc();
-        if (mvcc != null) {
-          MultiVersionConcurrencyControl.WriteEntry we = mvcc.begin();
-          walKey.setWriteEntry(we);
-        }
-        return 01L;
-      };
+    when(log.append(any(),
+            any(), argThat(bulkLogWalEditType(WALEdit.BULK_LOAD)),
+            anyBoolean())).thenAnswer(new Answer() {
+              public Object answer(InvocationOnMock invocation) {
+                WALKey walKey = invocation.getArgument(1);
+                MultiVersionConcurrencyControl mvcc = walKey.getMvcc();
+                if (mvcc != null) {
+                  MultiVersionConcurrencyControl.WriteEntry we = mvcc.begin();
+                  walKey.setWriteEntry(we);
+                }
+                return 01L;
+              };
     });
     testRegionWithFamilies(family1).bulkLoadHFiles(withFamilyPathsFor(family1), false, null);
     verify(log).sync(anyLong());
@@ -153,11 +153,11 @@ public class TestBulkLoad {
 
   @Test
   public void shouldBulkLoadManyFamilyHLog() throws IOException {
-    when(log.append(any(HRegionInfo.class),
-            any(WALKey.class), argThat(bulkLogWalEditType(WALEdit.BULK_LOAD)),
-            any(boolean.class))).thenAnswer(new Answer() {
+    when(log.append(any(),
+            any(), argThat(bulkLogWalEditType(WALEdit.BULK_LOAD)),
+            anyBoolean())).thenAnswer(new Answer() {
               public Object answer(InvocationOnMock invocation) {
-                WALKey walKey = invocation.getArgumentAt(1, WALKey.class);
+                WALKey walKey = invocation.getArgument(1);
                 MultiVersionConcurrencyControl mvcc = walKey.getMvcc();
                 if (mvcc != null) {
                   MultiVersionConcurrencyControl.WriteEntry we = mvcc.begin();
@@ -173,18 +173,18 @@ public class TestBulkLoad {
 
   @Test
   public void shouldBulkLoadManyFamilyHLogEvenWhenTableNameNamespaceSpecified() throws IOException {
-    when(log.append(any(HRegionInfo.class),
-            any(WALKey.class), argThat(bulkLogWalEditType(WALEdit.BULK_LOAD)),
-            any(boolean.class))).thenAnswer(new Answer() {
-      public Object answer(InvocationOnMock invocation) {
-        WALKey walKey = invocation.getArgumentAt(1, WALKey.class);
-        MultiVersionConcurrencyControl mvcc = walKey.getMvcc();
-        if (mvcc != null) {
-          MultiVersionConcurrencyControl.WriteEntry we = mvcc.begin();
-          walKey.setWriteEntry(we);
-        }
-        return 01L;
-      };
+    when(log.append(any(),
+            any(), argThat(bulkLogWalEditType(WALEdit.BULK_LOAD)),
+            anyBoolean())).thenAnswer(new Answer() {
+              public Object answer(InvocationOnMock invocation) {
+                WALKey walKey = invocation.getArgument(1);
+                MultiVersionConcurrencyControl mvcc = walKey.getMvcc();
+                if (mvcc != null) {
+                  MultiVersionConcurrencyControl.WriteEntry we = mvcc.begin();
+                  walKey.setWriteEntry(we);
+                }
+                return 01L;
+              };
     });
     TableName tableName = TableName.valueOf("test", "test");
     testRegionWithFamiliesAndSpecifiedTableName(tableName, family1, family2)

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
index 7248f56..afe3228 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
@@ -387,7 +387,7 @@ public class TestCompaction {
     thread.interruptIfNecessary();
   }
 
-  private class StoreMockMaker extends StatefulStoreMockMaker {
+  class StoreMockMaker extends StatefulStoreMockMaker {
     public ArrayList<HStoreFile> compacting = new ArrayList<>();
     public ArrayList<HStoreFile> notCompacting = new ArrayList<>();
     private ArrayList<Integer> results;
@@ -556,20 +556,19 @@ public class TestCompaction {
     // Set up the region mock that redirects compactions.
     HRegion r = mock(HRegion.class);
     when(
-      r.compact(any(CompactionContext.class), any(HStore.class),
-        any(ThroughputController.class), any(User.class))).then(new Answer<Boolean>() {
-      @Override
-      public Boolean answer(InvocationOnMock invocation) throws Throwable {
-        invocation.getArgumentAt(0, CompactionContext.class).compact(
-          invocation.getArgumentAt(2, ThroughputController.class), null);
-        return true;
-      }
+      r.compact(any(), any(), any(), any())).then(new Answer<Boolean>() {
+        @Override
+        public Boolean answer(InvocationOnMock invocation) throws Throwable {
+          invocation.<CompactionContext>getArgument(0).compact(invocation.getArgument(2), null);
+          return true;
+        }
     });
 
     // Set up store mocks for 2 "real" stores and the one we use for blocking CST.
     ArrayList<Integer> results = new ArrayList<>();
     StoreMockMaker sm = new StoreMockMaker(results), sm2 = new StoreMockMaker(results);
-    HStore store = sm.createStoreMock("store1"), store2 = sm2.createStoreMock("store2");
+    HStore store = sm.createStoreMock("store1");
+    HStore store2 = sm2.createStoreMock("store2");
     BlockingStoreMockMaker blocker = new BlockingStoreMockMaker();
 
     // First, block the compaction thread so that we could muck with queue.

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveIOException.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveIOException.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveIOException.java
index 0a47762..9a9c6c0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveIOException.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveIOException.java
@@ -186,7 +186,7 @@ public class TestCompactionArchiveIOException {
     // none of the other files are cleared from the compactedfiles list.
     // Simulate this condition with a dummy file
     doThrow(new IOException("Error for test"))
-        .when(errFS).rename(eq(new Path(storeDir, ERROR_FILE)), any(Path.class));
+        .when(errFS).rename(eq(new Path(storeDir, ERROR_FILE)), any());
 
     HRegionFileSystem fs = new HRegionFileSystem(conf, errFS, tableDir, info);
     final Configuration walConf = new Configuration(conf);

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
index 421bd3f..2e3edf1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
@@ -1104,19 +1104,19 @@ public class TestHRegion {
     }
   }
 
-  class IsFlushWALMarker extends ArgumentMatcher<WALEdit> {
+  class IsFlushWALMarker implements ArgumentMatcher<WALEdit> {
     volatile FlushAction[] actions;
     public IsFlushWALMarker(FlushAction... actions) {
       this.actions = actions;
     }
     @Override
-    public boolean matches(Object edit) {
-      List<Cell> cells = ((WALEdit)edit).getCells();
+    public boolean matches(WALEdit edit) {
+      List<Cell> cells = edit.getCells();
       if (cells.isEmpty()) {
         return false;
       }
       if (WALEdit.isMetaEditFamily(cells.get(0))) {
-        FlushDescriptor desc = null;
+        FlushDescriptor desc;
         try {
           desc = WALEdit.getFlushDescriptor(cells.get(0));
         } catch (IOException e) {
@@ -2441,8 +2441,7 @@ public class TestHRegion {
     Mockito.doAnswer(new Answer() {
       @Override
       public Object answer(InvocationOnMock invocation) throws Throwable {
-        MiniBatchOperationInProgress<Mutation> mb = invocation.getArgumentAt(0,
-                MiniBatchOperationInProgress.class);
+        MiniBatchOperationInProgress<Mutation> mb = invocation.getArgument(0);
         mb.addOperationsFromCP(0, new Mutation[]{addPut});
         return null;
       }
@@ -5960,7 +5959,7 @@ public class TestHRegion {
       thenAnswer(new Answer<Long>() {
         @Override
         public Long answer(InvocationOnMock invocation) throws Throwable {
-          WALKey key = invocation.getArgumentAt(1, WALKey.class);
+          WALKey key = invocation.getArgument(1);
           MultiVersionConcurrencyControl.WriteEntry we = key.getMvcc().begin();
           key.setWriteEntry(we);
           return 1L;

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java
index 4a414d2..6ec091a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java
@@ -1026,7 +1026,7 @@ public class TestHStore {
     // call first time after files changed
     spiedStore.refreshStoreFiles();
     assertEquals(2, this.store.getStorefilesCount());
-    verify(spiedStore, times(1)).replaceStoreFiles(any(Collection.class), any(Collection.class));
+    verify(spiedStore, times(1)).replaceStoreFiles(any(), any());
 
     // call second time
     spiedStore.refreshStoreFiles();

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerRegionSpaceUseReport.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerRegionSpaceUseReport.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerRegionSpaceUseReport.java
index 44ab24e..5043f59 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerRegionSpaceUseReport.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerRegionSpaceUseReport.java
@@ -69,8 +69,8 @@ public class TestRegionServerRegionSpaceUseReport {
 
     // Call the real method to convert the map into a protobuf
     HRegionServer rs = mock(HRegionServer.class);
-    doCallRealMethod().when(rs).buildRegionSpaceUseReportRequest(any(Map.class));
-    doCallRealMethod().when(rs).convertRegionSize(any(RegionInfo.class), anyLong());
+    doCallRealMethod().when(rs).buildRegionSpaceUseReportRequest(any());
+    doCallRealMethod().when(rs).convertRegionSize(any(), anyLong());
 
     RegionSpaceUseReportRequest requests = rs.buildRegionSpaceUseReportRequest(sizes);
     assertEquals(sizes.size(), requests.getSpaceUseCount());
@@ -87,8 +87,8 @@ public class TestRegionServerRegionSpaceUseReport {
   public void testNullMap() {
     // Call the real method to convert the map into a protobuf
     HRegionServer rs = mock(HRegionServer.class);
-    doCallRealMethod().when(rs).buildRegionSpaceUseReportRequest(any(Map.class));
-    doCallRealMethod().when(rs).convertRegionSize(any(RegionInfo.class), anyLong());
+    doCallRealMethod().when(rs).buildRegionSpaceUseReportRequest(any());
+    doCallRealMethod().when(rs).convertRegionSize(any(), anyLong());
 
     rs.buildRegionSpaceUseReportRequest(null);
   }
@@ -105,8 +105,8 @@ public class TestRegionServerRegionSpaceUseReport {
 
     // Call the real method to convert the map into a protobuf
     HRegionServer rs = mock(HRegionServer.class);
-    doCallRealMethod().when(rs).buildRegionSpaceUseReportRequest(any(Map.class));
-    doCallRealMethod().when(rs).convertRegionSize(any(RegionInfo.class), anyLong());
+    doCallRealMethod().when(rs).buildRegionSpaceUseReportRequest(any());
+    doCallRealMethod().when(rs).convertRegionSize(any(), anyLong());
 
     rs.buildRegionSpaceUseReportRequest(sizes);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreEngine.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreEngine.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreEngine.java
index e2925ed..6b5e2f2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreEngine.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreEngine.java
@@ -41,8 +41,6 @@ import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl;
 import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy;
 import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactor;
 import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;
-import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
-import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.junit.Test;
@@ -76,9 +74,9 @@ public class TestStripeStoreEngine {
     StripeCompactor mockCompactor = mock(StripeCompactor.class);
     se.setCompactorOverride(mockCompactor);
     when(
-      mockCompactor.compact(any(CompactionRequestImpl.class), anyInt(), anyLong(), any(byte[].class),
-        any(byte[].class), any(byte[].class), any(byte[].class),
-        any(ThroughputController.class), any(User.class)))
+      mockCompactor.compact(any(), anyInt(), anyLong(), any(),
+        any(), any(), any(),
+        any(), any()))
         .thenReturn(new ArrayList<>());
 
     // Produce 3 L0 files.
@@ -105,7 +103,7 @@ public class TestStripeStoreEngine {
 
   private static HStoreFile createFile() throws Exception {
     HStoreFile sf = mock(HStoreFile.class);
-    when(sf.getMetadataValue(any(byte[].class)))
+    when(sf.getMetadataValue(any()))
       .thenReturn(StripeStoreFileManager.INVALID_KEY);
     when(sf.getReader()).thenReturn(mock(StoreFileReader.class));
     when(sf.getPath()).thenReturn(new Path("moo"));

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
index 67a7519..932664b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
@@ -93,22 +93,22 @@ public class TestCompactor {
       StoreFileWriter writer = mock(StoreFileWriter.class);
       doAnswer(new Answer<Object>() {
         public Object answer(InvocationOnMock invocation) {
-          return realWriter.kvs.add((KeyValue) invocation.getArguments()[0]);
+          return realWriter.kvs.add((KeyValue) invocation.getArgument(0));
         }
-      }).when(writer).append(any(KeyValue.class));
+      }).when(writer).append(any());
       doAnswer(new Answer<Object>() {
         public Object answer(InvocationOnMock invocation) {
           Object[] args = invocation.getArguments();
           return realWriter.data.put((byte[]) args[0], (byte[]) args[1]);
         }
-      }).when(writer).appendFileInfo(any(byte[].class), any(byte[].class));
+      }).when(writer).appendFileInfo(any(), any());
       doAnswer(new Answer<Void>() {
         @Override
         public Void answer(InvocationOnMock invocation) throws Throwable {
           realWriter.hasMetadata = true;
           return null;
         }
-      }).when(writer).appendMetadata(any(long.class), any(boolean.class));
+      }).when(writer).appendMetadata(anyLong(), anyBoolean());
       doAnswer(new Answer<Path>() {
         @Override
         public Path answer(InvocationOnMock invocation) throws Throwable {

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestDateTieredCompactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestDateTieredCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestDateTieredCompactor.java
index 95c2c56..834373c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestDateTieredCompactor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestDateTieredCompactor.java
@@ -42,7 +42,6 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.regionserver.HStore;
 import org.apache.hadoop.hbase.regionserver.HStoreFile;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
@@ -102,7 +101,7 @@ public class TestDateTieredCompactor {
     when(store.areWritesEnabled()).thenReturn(true);
     when(store.getFileSystem()).thenReturn(mock(FileSystem.class));
     when(store.getRegionInfo()).thenReturn(new HRegionInfo(TABLE_NAME));
-    when(store.createWriterInTmp(anyLong(), any(Compression.Algorithm.class), anyBoolean(),
+    when(store.createWriterInTmp(anyLong(), any(), anyBoolean(),
       anyBoolean(), anyBoolean(), anyBoolean())).thenAnswer(writers);
     when(store.getComparator()).thenReturn(CellComparatorImpl.COMPARATOR);
     OptionalLong maxSequenceId = StoreUtils.getMaxSequenceIdInList(storefiles);

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
index 48e560c..6176597 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
@@ -52,7 +52,6 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.regionserver.HStore;
@@ -71,7 +70,6 @@ import org.apache.hadoop.hbase.regionserver.StripeStoreFlusher;
 import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy.StripeInformationProvider;
 import org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture;
 import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;
-import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -197,7 +195,7 @@ public class TestStripeCompactionPolicy {
     si = createStripesWithSizes(0, 0,
         new Long[] { 5L }, new Long[] { 3L, 2L, 2L, 1L }, new Long[] { 3L, 2L, 2L });
     verifySingleStripeCompaction(policy, si, 1, null);
-    // Or with smallest files, if the count is the same 
+    // Or with smallest files, if the count is the same
     si = createStripesWithSizes(0, 0,
         new Long[] { 3L, 3L, 3L }, new Long[] { 3L, 1L, 2L }, new Long[] { 3L, 2L, 2L });
     verifySingleStripeCompaction(policy, si, 1, null);
@@ -237,7 +235,7 @@ public class TestStripeCompactionPolicy {
     scr.execute(sc, NoLimitThroughputController.INSTANCE, null);
     verify(sc, only()).compact(eq(scr.getRequest()), anyInt(), anyLong(), aryEq(OPEN_KEY),
       aryEq(OPEN_KEY), aryEq(OPEN_KEY), aryEq(OPEN_KEY),
-      any(NoLimitThroughputController.class), any(User.class));
+      any(), any());
   }
 
   @Test
@@ -551,9 +549,8 @@ public class TestStripeCompactionPolicy {
     scr.execute(sc, NoLimitThroughputController.INSTANCE, null);
     verify(sc, times(1)).compact(eq(scr.getRequest()), argThat(new ArgumentMatcher<List<byte[]>>() {
       @Override
-      public boolean matches(Object argument) {
-        @SuppressWarnings("unchecked")
-        List<byte[]> other = (List<byte[]>) argument;
+      public boolean matches(List<byte[]> argument) {
+        List<byte[]> other = argument;
         if (other.size() != boundaries.size()) return false;
         for (int i = 0; i < other.size(); ++i) {
           if (!Bytes.equals(other.get(i), boundaries.get(i))) return false;
@@ -562,7 +559,7 @@ public class TestStripeCompactionPolicy {
       }
     }), dropDeletesFrom == null ? isNull(byte[].class) : aryEq(dropDeletesFrom),
       dropDeletesTo == null ? isNull(byte[].class) : aryEq(dropDeletesTo),
-      any(NoLimitThroughputController.class), any(User.class));
+      any(), any());
   }
 
   /**
@@ -574,7 +571,7 @@ public class TestStripeCompactionPolicy {
    * @param count Expected # of resulting stripes, null if not checked.
    * @param size Expected target stripe size, null if not checked.
    * @param start Left boundary of the compaction.
-   * @param righr Right boundary of the compaction.
+   * @param end Right boundary of the compaction.
    */
   private void verifyCompaction(StripeCompactionPolicy policy, StripeInformationProvider si,
       Collection<HStoreFile> sfs, Boolean dropDeletes, Integer count, Long size,
@@ -588,7 +585,7 @@ public class TestStripeCompactionPolicy {
       count == null ? anyInt() : eq(count.intValue()),
       size == null ? anyLong() : eq(size.longValue()), aryEq(start), aryEq(end),
       dropDeletesMatcher(dropDeletes, start), dropDeletesMatcher(dropDeletes, end),
-      any(NoLimitThroughputController.class), any(User.class));
+      any(), any());
   }
 
   /** Verify arbitrary flush. */
@@ -612,7 +609,7 @@ public class TestStripeCompactionPolicy {
 
 
   private byte[] dropDeletesMatcher(Boolean dropDeletes, byte[] value) {
-    return dropDeletes == null ? any(byte[].class)
+    return dropDeletes == null ? any()
             : (dropDeletes.booleanValue() ? aryEq(value) : isNull(byte[].class));
   }
 
@@ -780,7 +777,7 @@ public class TestStripeCompactionPolicy {
     when(store.getColumnFamilyDescriptor()).thenReturn(col);
     when(store.getRegionInfo()).thenReturn(info);
     when(
-      store.createWriterInTmp(anyLong(), any(Compression.Algorithm.class), anyBoolean(),
+      store.createWriterInTmp(anyLong(), any(), anyBoolean(),
         anyBoolean(), anyBoolean(), anyBoolean())).thenAnswer(writers);
 
     Configuration conf = HBaseConfiguration.create();

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactor.java
index 772a674..302a4eb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactor.java
@@ -41,7 +41,6 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.regionserver.HStore;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.regionserver.ScanInfo;
@@ -201,7 +200,7 @@ public class TestStripeCompactor {
     when(store.areWritesEnabled()).thenReturn(true);
     when(store.getFileSystem()).thenReturn(mock(FileSystem.class));
     when(store.getRegionInfo()).thenReturn(new HRegionInfo(TABLE_NAME));
-    when(store.createWriterInTmp(anyLong(), any(Compression.Algorithm.class), anyBoolean(),
+    when(store.createWriterInTmp(anyLong(), any(), anyBoolean(),
       anyBoolean(), anyBoolean(), anyBoolean())).thenAnswer(writers);
     when(store.getComparator()).thenReturn(CellComparatorImpl.COMPARATOR);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java
index 3f6fa3b..f18ad77 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java
@@ -22,6 +22,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
+import static org.mockito.ArgumentMatchers.anyInt;
 import static org.mockito.Matchers.any;
 import static org.mockito.Matchers.eq;
 import static org.mockito.Mockito.doAnswer;
@@ -997,7 +998,7 @@ public abstract class AbstractTestWALReplay {
             }
             return b;
           }
-        }).when(spyIn).read(any(byte[].class), any(int.class), any(int.class));
+        }).when(spyIn).read(any(byte[].class), anyInt(), anyInt());
         doAnswer(new Answer<Void>() {
 
           @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
index ae34d1b..e8bda89 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
@@ -45,12 +45,16 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.ScheduledThreadPoolExecutor;
 
 import static org.junit.Assert.assertNotEquals;
+import static org.mockito.ArgumentMatchers.argThat;
 import static org.mockito.Matchers.anyLong;
 import static org.mockito.Matchers.eq;
 import static org.mockito.Matchers.isA;
-import static org.mockito.Matchers.argThat;
 import static org.junit.Assert.assertEquals;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.atLeastOnce;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
 
 @RunWith(MockitoJUnitRunner.class)
 @Category({MediumTests.class})
@@ -151,14 +155,14 @@ public class TestCanaryTool {
     // One table's timeout is set for 0 ms and thus, should lead to an error.
     verify(mockAppender, times(1)).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() {
       @Override
-      public boolean matches(Object argument) {
+      public boolean matches(LoggingEvent argument) {
         return ((LoggingEvent) argument).getRenderedMessage().contains("exceeded the configured read timeout.");
       }
     }));
     verify(mockAppender, times(2)).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() {
       @Override
-      public boolean matches(Object argument) {
-        return ((LoggingEvent) argument).getRenderedMessage().contains("The configured read timeout was");
+      public boolean matches(LoggingEvent argument) {
+        return argument.getRenderedMessage().contains("The configured read timeout was");
       }
     }));
   }
@@ -173,12 +177,13 @@ public class TestCanaryTool {
     assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
     assertNotEquals("verify non-null write latency", null, sink.getWriteLatency());
     assertNotEquals("verify non-zero write latency", 0L, sink.getWriteLatency());
-    verify(mockAppender, times(1)).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() {
-      @Override
-      public boolean matches(Object argument) {
-        return ((LoggingEvent) argument).getRenderedMessage().contains("The configured write timeout was");
-      }
-    }));
+    verify(mockAppender, times(1)).doAppend(argThat(
+        new ArgumentMatcher<LoggingEvent>() {
+          @Override
+          public boolean matches(LoggingEvent argument) {
+            return argument.getRenderedMessage().contains("The configured write timeout was");
+          }
+        }));
   }
 
   //no table created, so there should be no regions
@@ -187,8 +192,8 @@ public class TestCanaryTool {
     runRegionserverCanary();
     verify(mockAppender).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() {
       @Override
-      public boolean matches(Object argument) {
-        return ((LoggingEvent) argument).getRenderedMessage().contains("Regionserver not serving any regions");
+      public boolean matches(LoggingEvent argument) {
+        return argument.getRenderedMessage().contains("Regionserver not serving any regions");
       }
     }));
   }
@@ -201,8 +206,8 @@ public class TestCanaryTool {
     runRegionserverCanary();
     verify(mockAppender, never()).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() {
       @Override
-      public boolean matches(Object argument) {
-        return ((LoggingEvent) argument).getRenderedMessage().contains("Regionserver not serving any regions");
+      public boolean matches(LoggingEvent argument) {
+        return argument.getRenderedMessage().contains("Regionserver not serving any regions");
       }
     }));
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java
index 40f87c6..bf43982 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java
@@ -374,7 +374,7 @@ public class TestLoadIncrementalHFilesSplitRecovery {
         .when(
           hri.bulkLoadHFile((RpcController) Mockito.any(), (BulkLoadHFileRequest) Mockito.any()))
         .thenThrow(new ServiceException(new IOException("injecting bulk load error")));
-    Mockito.when(c.getClient(Mockito.any(ServerName.class))).thenReturn(hri);
+    Mockito.when(c.getClient(Mockito.any())).thenReturn(hri);
     return c;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
index b3fd308..c4bffd0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
@@ -1008,7 +1008,7 @@ public class TestWALSplit {
                 Thread.currentThread().interrupt();
               }
             }
-            Entry entry = (Entry) invocation.getArguments()[0];
+            Entry entry = (Entry) invocation.getArgument(0);
             WALEdit edit = entry.getEdit();
             List<Cell> cells = edit.getCells();
             assertEquals(1, cells.size());

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-shaded/hbase-shaded-check-invariants/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-shaded/hbase-shaded-check-invariants/pom.xml b/hbase-shaded/hbase-shaded-check-invariants/pom.xml
index 8592d71..d8bf7d2 100644
--- a/hbase-shaded/hbase-shaded-check-invariants/pom.xml
+++ b/hbase-shaded/hbase-shaded-check-invariants/pom.xml
@@ -63,7 +63,7 @@
     </dependency>
     <dependency>
       <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
+      <artifactId>mockito-core</artifactId>
       <scope>provided</scope>
     </dependency>
   </dependencies>

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 2a9b8c9..137e416 100755
--- a/pom.xml
+++ b/pom.xml
@@ -1435,7 +1435,7 @@
     <hamcrest.version>1.3</hamcrest.version>
     <htrace.version>3.2.0-incubating</htrace.version>
     <log4j.version>1.2.17</log4j.version>
-    <mockito-all.version>1.10.19</mockito-all.version>
+    <mockito-core.version>2.1.0</mockito-core.version>
     <!--Internally we use a different version of protobuf. See hbase-protocol-shaded-->
     <external.protobuf.version>2.5.0</external.protobuf.version>
     <protobuf.plugin.version>0.5.0</protobuf.plugin.version>
@@ -2040,8 +2040,8 @@
       </dependency>
       <dependency>
         <groupId>org.mockito</groupId>
-        <artifactId>mockito-all</artifactId>
-        <version>${mockito-all.version}</version>
+        <artifactId>mockito-core</artifactId>
+        <version>${mockito-core.version}</version>
         <scope>test</scope>
       </dependency>
       <dependency>
@@ -3366,8 +3366,8 @@
               <additionalDependencies>
                 <additionalDependency>
                   <groupId>org.mockito</groupId>
-                  <artifactId>mockito-all</artifactId>
-                  <version>${mockito-all.version}</version>
+                  <artifactId>mockito-core</artifactId>
+                  <version>${mockito-core.version}</version>
                 </additionalDependency>
                 <additionalDependency>
                   <groupId>org.hamcrest</groupId>
@@ -3409,8 +3409,8 @@
               <additionalDependencies>
                 <additionalDependency>
                   <groupId>org.mockito</groupId>
-                  <artifactId>mockito-all</artifactId>
-                  <version>${mockito-all.version}</version>
+                  <artifactId>mockito-core</artifactId>
+                  <version>${mockito-core.version}</version>
                 </additionalDependency>
                 <additionalDependency>
                   <groupId>org.hamcrest</groupId>
@@ -3463,8 +3463,8 @@
               <additionalDependencies>
                 <additionalDependency>
                   <groupId>org.mockito</groupId>
-                  <artifactId>mockito-all</artifactId>
-                  <version>${mockito-all.version}</version>
+                  <artifactId>mockito-core</artifactId>
+                  <version>${mockito-core.version}</version>
                 </additionalDependency>
                 <additionalDependency>
                   <groupId>org.hamcrest</groupId>
@@ -3516,8 +3516,8 @@
               <additionalDependencies>
                 <additionalDependency>
                   <groupId>org.mockito</groupId>
-                  <artifactId>mockito-all</artifactId>
-                  <version>${mockito-all.version}</version>
+                  <artifactId>mockito-core</artifactId>
+                  <version>${mockito-core.version}</version>
                 </additionalDependency>
                 <additionalDependency>
                   <groupId>org.hamcrest</groupId>

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/src/main/asciidoc/_chapters/unit_testing.adoc
----------------------------------------------------------------------
diff --git a/src/main/asciidoc/_chapters/unit_testing.adoc b/src/main/asciidoc/_chapters/unit_testing.adoc
index 50d4a71..55dedf4 100644
--- a/src/main/asciidoc/_chapters/unit_testing.adoc
+++ b/src/main/asciidoc/_chapters/unit_testing.adoc
@@ -117,8 +117,8 @@ First, add a dependency for Mockito to your Maven POM file.
 
 <dependency>
     <groupId>org.mockito</groupId>
-    <artifactId>mockito-all</artifactId>
-    <version>1.9.5</version>
+    <artifactId>mockito-core</artifactId>
+    <version>2.1.0</version>
     <scope>test</scope>
 </dependency>
 ----


[05/13] hbase git commit: Revert "HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module"

Posted by bu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/test/java/org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java
new file mode 100644
index 0000000..234bd7a
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java
@@ -0,0 +1,342 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.http.ssl;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.Writer;
+import java.math.BigInteger;
+import java.net.URL;
+import java.security.GeneralSecurityException;
+import java.security.InvalidKeyException;
+import java.security.Key;
+import java.security.KeyPair;
+import java.security.KeyPairGenerator;
+import java.security.KeyStore;
+import java.security.NoSuchAlgorithmException;
+import java.security.NoSuchProviderException;
+import java.security.SecureRandom;
+import java.security.SignatureException;
+import java.security.cert.Certificate;
+import java.security.cert.CertificateEncodingException;
+import java.security.cert.X509Certificate;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.security.auth.x500.X500Principal;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory;
+import org.apache.hadoop.security.ssl.SSLFactory;
+import org.bouncycastle.x509.X509V1CertificateGenerator;
+
+public class KeyStoreTestUtil {
+
+  public static String getClasspathDir(Class<?> klass) throws Exception {
+    String file = klass.getName();
+    file = file.replace('.', '/') + ".class";
+    URL url = Thread.currentThread().getContextClassLoader().getResource(file);
+    String baseDir = url.toURI().getPath();
+    baseDir = baseDir.substring(0, baseDir.length() - file.length() - 1);
+    return baseDir;
+  }
+
+  /**
+   * Create a self-signed X.509 Certificate.
+   *
+   * @param dn the X.509 Distinguished Name, eg "CN=Test, L=London, C=GB"
+   * @param pair the KeyPair
+   * @param days how many days from now the Certificate is valid for
+   * @param algorithm the signing algorithm, eg "SHA1withRSA"
+   * @return the self-signed certificate
+   */
+  public static X509Certificate generateCertificate(String dn, KeyPair pair, int days, String algorithm) 
+      throws CertificateEncodingException, InvalidKeyException, IllegalStateException, 
+      NoSuchProviderException, NoSuchAlgorithmException, SignatureException {
+    Date from = new Date();
+    Date to = new Date(from.getTime() + days * 86400000l);
+    BigInteger sn = new BigInteger(64, new SecureRandom());
+    KeyPair keyPair = pair;
+    X509V1CertificateGenerator certGen = new X509V1CertificateGenerator();
+    X500Principal  dnName = new X500Principal(dn);
+
+    certGen.setSerialNumber(sn);
+    certGen.setIssuerDN(dnName);
+    certGen.setNotBefore(from);
+    certGen.setNotAfter(to);
+    certGen.setSubjectDN(dnName);
+    certGen.setPublicKey(keyPair.getPublic());
+    certGen.setSignatureAlgorithm(algorithm);
+    X509Certificate cert = certGen.generate(pair.getPrivate());
+    return cert;
+  }
+
+  public static KeyPair generateKeyPair(String algorithm)
+    throws NoSuchAlgorithmException {
+    KeyPairGenerator keyGen = KeyPairGenerator.getInstance(algorithm);
+    keyGen.initialize(1024);
+    return keyGen.genKeyPair();
+  }
+
+  private static KeyStore createEmptyKeyStore()
+    throws GeneralSecurityException, IOException {
+    KeyStore ks = KeyStore.getInstance("JKS");
+    ks.load(null, null); // initialize
+    return ks;
+  }
+
+  private static void saveKeyStore(KeyStore ks, String filename,
+                                   String password)
+    throws GeneralSecurityException, IOException {
+    FileOutputStream out = new FileOutputStream(filename);
+    try {
+      ks.store(out, password.toCharArray());
+    } finally {
+      out.close();
+    }
+  }
+
+  public static void createKeyStore(String filename,
+                                    String password, String alias,
+                                    Key privateKey, Certificate cert)
+    throws GeneralSecurityException, IOException {
+    KeyStore ks = createEmptyKeyStore();
+    ks.setKeyEntry(alias, privateKey, password.toCharArray(),
+                   new Certificate[]{cert});
+    saveKeyStore(ks, filename, password);
+  }
+
+  /**
+   * Creates a keystore with a single key and saves it to a file.
+   * 
+   * @param filename String file to save
+   * @param password String store password to set on keystore
+   * @param keyPassword String key password to set on key
+   * @param alias String alias to use for the key
+   * @param privateKey Key to save in keystore
+   * @param cert Certificate to use as certificate chain associated to key
+   * @throws GeneralSecurityException for any error with the security APIs
+   * @throws IOException if there is an I/O error saving the file
+   */
+  public static void createKeyStore(String filename,
+                                    String password, String keyPassword, String alias,
+                                    Key privateKey, Certificate cert)
+    throws GeneralSecurityException, IOException {
+    KeyStore ks = createEmptyKeyStore();
+    ks.setKeyEntry(alias, privateKey, keyPassword.toCharArray(),
+                   new Certificate[]{cert});
+    saveKeyStore(ks, filename, password);
+  }
+
+  public static void createTrustStore(String filename,
+                                      String password, String alias,
+                                      Certificate cert)
+    throws GeneralSecurityException, IOException {
+    KeyStore ks = createEmptyKeyStore();
+    ks.setCertificateEntry(alias, cert);
+    saveKeyStore(ks, filename, password);
+  }
+
+  public static <T extends Certificate> void createTrustStore(
+    String filename, String password, Map<String, T> certs)
+    throws GeneralSecurityException, IOException {
+    KeyStore ks = createEmptyKeyStore();
+    for (Map.Entry<String, T> cert : certs.entrySet()) {
+      ks.setCertificateEntry(cert.getKey(), cert.getValue());
+    }
+    saveKeyStore(ks, filename, password);
+  }
+
+  public static void cleanupSSLConfig(String keystoresDir, String sslConfDir)
+    throws Exception {
+    File f = new File(keystoresDir + "/clientKS.jks");
+    f.delete();
+    f = new File(keystoresDir + "/serverKS.jks");
+    f.delete();
+    f = new File(keystoresDir + "/trustKS.jks");
+    f.delete();
+    f = new File(sslConfDir + "/ssl-client.xml");
+    f.delete();
+    f = new File(sslConfDir +  "/ssl-server.xml");
+    f.delete();
+  }
+
+  /**
+   * Performs complete setup of SSL configuration in preparation for testing an
+   * SSLFactory.  This includes keys, certs, keystores, truststores, the server
+   * SSL configuration file, the client SSL configuration file, and the master
+   * configuration file read by the SSLFactory.
+   * 
+   * @param keystoresDir String directory to save keystores
+   * @param sslConfDir String directory to save SSL configuration files
+   * @param conf Configuration master configuration to be used by an SSLFactory,
+   *   which will be mutated by this method
+   * @param useClientCert boolean true to make the client present a cert in the
+   *   SSL handshake
+   */
+  public static void setupSSLConfig(String keystoresDir, String sslConfDir,
+                                    Configuration conf, boolean useClientCert)
+    throws Exception {
+    String clientKS = keystoresDir + "/clientKS.jks";
+    String clientPassword = "clientP";
+    String serverKS = keystoresDir + "/serverKS.jks";
+    String serverPassword = "serverP";
+    String trustKS = keystoresDir + "/trustKS.jks";
+    String trustPassword = "trustP";
+
+    File sslClientConfFile = new File(sslConfDir + "/ssl-client.xml");
+    File sslServerConfFile = new File(sslConfDir + "/ssl-server.xml");
+
+    Map<String, X509Certificate> certs = new HashMap<>();
+
+    if (useClientCert) {
+      KeyPair cKP = KeyStoreTestUtil.generateKeyPair("RSA");
+      X509Certificate cCert =
+        KeyStoreTestUtil.generateCertificate("CN=localhost, O=client", cKP, 30,
+                                             "SHA1withRSA");
+      KeyStoreTestUtil.createKeyStore(clientKS, clientPassword, "client",
+                                      cKP.getPrivate(), cCert);
+      certs.put("client", cCert);
+    }
+
+    KeyPair sKP = KeyStoreTestUtil.generateKeyPair("RSA");
+    X509Certificate sCert =
+      KeyStoreTestUtil.generateCertificate("CN=localhost, O=server", sKP, 30,
+                                           "SHA1withRSA");
+    KeyStoreTestUtil.createKeyStore(serverKS, serverPassword, "server",
+                                    sKP.getPrivate(), sCert);
+    certs.put("server", sCert);
+
+    KeyStoreTestUtil.createTrustStore(trustKS, trustPassword, certs);
+
+    Configuration clientSSLConf = createClientSSLConfig(clientKS, clientPassword,
+      clientPassword, trustKS);
+    Configuration serverSSLConf = createServerSSLConfig(serverKS, serverPassword,
+      serverPassword, trustKS);
+
+    saveConfig(sslClientConfFile, clientSSLConf);
+    saveConfig(sslServerConfFile, serverSSLConf);
+
+    conf.set(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, "ALLOW_ALL");
+    conf.set(SSLFactory.SSL_CLIENT_CONF_KEY, sslClientConfFile.getName());
+    conf.set(SSLFactory.SSL_SERVER_CONF_KEY, sslServerConfFile.getName());
+    conf.setBoolean(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY, useClientCert);
+  }
+
+  /**
+   * Creates SSL configuration for a client.
+   * 
+   * @param clientKS String client keystore file
+   * @param password String store password, or null to avoid setting store
+   *   password
+   * @param keyPassword String key password, or null to avoid setting key
+   *   password
+   * @param trustKS String truststore file
+   * @return Configuration for client SSL
+   */
+  public static Configuration createClientSSLConfig(String clientKS,
+      String password, String keyPassword, String trustKS) {
+    Configuration clientSSLConf = createSSLConfig(SSLFactory.Mode.CLIENT,
+      clientKS, password, keyPassword, trustKS);
+    return clientSSLConf;
+  }
+
+  /**
+   * Creates SSL configuration for a server.
+   * 
+   * @param serverKS String server keystore file
+   * @param password String store password, or null to avoid setting store
+   *   password
+   * @param keyPassword String key password, or null to avoid setting key
+   *   password
+   * @param trustKS String truststore file
+   * @return Configuration for server SSL
+   */
+  public static Configuration createServerSSLConfig(String serverKS,
+      String password, String keyPassword, String trustKS) throws IOException {
+    Configuration serverSSLConf = createSSLConfig(SSLFactory.Mode.SERVER,
+      serverKS, password, keyPassword, trustKS);
+    return serverSSLConf;
+  }
+
+  /**
+   * Creates SSL configuration.
+   * 
+   * @param mode SSLFactory.Mode mode to configure
+   * @param keystore String keystore file
+   * @param password String store password, or null to avoid setting store
+   *   password
+   * @param keyPassword String key password, or null to avoid setting key
+   *   password
+   * @param trustKS String truststore file
+   * @return Configuration for SSL
+   */
+  private static Configuration createSSLConfig(SSLFactory.Mode mode,
+      String keystore, String password, String keyPassword, String trustKS) {
+    String trustPassword = "trustP";
+
+    Configuration sslConf = new Configuration(false);
+    if (keystore != null) {
+      sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+        FileBasedKeyStoresFactory.SSL_KEYSTORE_LOCATION_TPL_KEY), keystore);
+    }
+    if (password != null) {
+      sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+        FileBasedKeyStoresFactory.SSL_KEYSTORE_PASSWORD_TPL_KEY), password);
+    }
+    if (keyPassword != null) {
+      sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+        FileBasedKeyStoresFactory.SSL_KEYSTORE_KEYPASSWORD_TPL_KEY),
+        keyPassword);
+    }
+    if (trustKS != null) {
+      sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+        FileBasedKeyStoresFactory.SSL_TRUSTSTORE_LOCATION_TPL_KEY), trustKS);
+    }
+    if (trustPassword != null) {
+      sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+        FileBasedKeyStoresFactory.SSL_TRUSTSTORE_PASSWORD_TPL_KEY),
+        trustPassword);
+    }
+    sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+      FileBasedKeyStoresFactory.SSL_TRUSTSTORE_RELOAD_INTERVAL_TPL_KEY), "1000");
+
+    return sslConf;
+  }
+
+  /**
+   * Saves configuration to a file.
+   * 
+   * @param file File to save
+   * @param conf Configuration contents to write to file
+   * @throws IOException if there is an I/O error saving the file
+   */
+  public static void saveConfig(File file, Configuration conf)
+      throws IOException {
+    Writer writer = new FileWriter(file);
+    try {
+      conf.writeXml(writer);
+    } finally {
+      writer.close();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-shaded/hbase-shaded-mapreduce/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-shaded/hbase-shaded-mapreduce/pom.xml b/hbase-shaded/hbase-shaded-mapreduce/pom.xml
index cfcc357..1f2a2bf 100644
--- a/hbase-shaded/hbase-shaded-mapreduce/pom.xml
+++ b/hbase-shaded/hbase-shaded-mapreduce/pom.xml
@@ -142,10 +142,6 @@
                 <artifactId>javax.servlet.jsp</artifactId>
               </exclusion>
               <exclusion>
-                <groupId>org.glassfish.jersey.core</groupId>
-                <artifactId>jersey-server</artifactId>
-              </exclusion>
-              <exclusion>
                 <groupId>org.glassfish.jersey.containers</groupId>
                 <artifactId>jersey-container-servlet-core</artifactId>
               </exclusion>

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 137e416..03ce2b2 100755
--- a/pom.xml
+++ b/pom.xml
@@ -66,7 +66,6 @@
     <module>hbase-replication</module>
     <module>hbase-mapreduce</module>
     <module>hbase-resource-bundle</module>
-    <module>hbase-http</module>
     <module>hbase-server</module>
     <module>hbase-thrift</module>
     <module>hbase-shell</module>
@@ -1646,18 +1645,6 @@
         <version>${project.version}</version>
       </dependency>
       <dependency>
-        <artifactId>hbase-http</artifactId>
-        <groupId>org.apache.hbase</groupId>
-        <version>${project.version}</version>
-      </dependency>
-      <dependency>
-        <artifactId>hbase-http</artifactId>
-        <groupId>org.apache.hbase</groupId>
-        <version>${project.version}</version>
-        <type>test-jar</type>
-        <scope>test</scope>
-      </dependency>
-      <dependency>
         <artifactId>hbase-server</artifactId>
         <groupId>org.apache.hbase</groupId>
         <version>${project.version}</version>
@@ -2001,11 +1988,6 @@
         <version>${jersey.version}</version>
       </dependency>
       <dependency>
-        <groupId>org.glassfish.jersey.core</groupId>
-        <artifactId>jersey-server</artifactId>
-        <version>${jersey.version}</version>
-      </dependency>
-      <dependency>
         <!--This lib has JspC in it. Needed precompiling jsps in hbase-rest, etc.-->
         <groupId>org.glassfish.web</groupId>
         <artifactId>javax.servlet.jsp</artifactId>
@@ -2068,11 +2050,6 @@
       </dependency>
       <dependency>
         <groupId>org.apache.kerby</groupId>
-        <artifactId>kerb-core</artifactId>
-        <version>${kerby.version}</version>
-      </dependency>
-      <dependency>
-        <groupId>org.apache.kerby</groupId>
         <artifactId>kerb-client</artifactId>
         <version>${kerby.version}</version>
       </dependency>


[07/13] hbase git commit: Revert "HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module"

Posted by bu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
new file mode 100644
index 0000000..e23eecd
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
@@ -0,0 +1,175 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http.log;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.PrintWriter;
+import java.net.URL;
+import java.net.URLConnection;
+import java.util.regex.Pattern;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.impl.Jdk14Logger;
+import org.apache.commons.logging.impl.Log4JLogger;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.yetus.audience.InterfaceStability;
+import org.apache.hadoop.hbase.http.HttpServer;
+import org.apache.hadoop.util.ServletUtil;
+
+/**
+ * Change log level in runtime.
+ */
+@InterfaceStability.Evolving
+public class LogLevel {
+  public static final String USAGES = "\nUsage: General options are:\n"
+      + "\t[-getlevel <host:httpPort> <name>]\n"
+      + "\t[-setlevel <host:httpPort> <name> <level>]\n";
+
+  /**
+   * A command line implementation
+   */
+  public static void main(String[] args) {
+    if (args.length == 3 && "-getlevel".equals(args[0])) {
+      process("http://" + args[1] + "/logLevel?log=" + args[2]);
+      return;
+    }
+    else if (args.length == 4 && "-setlevel".equals(args[0])) {
+      process("http://" + args[1] + "/logLevel?log=" + args[2]
+              + "&level=" + args[3]);
+      return;
+    }
+
+    System.err.println(USAGES);
+    System.exit(-1);
+  }
+
+  private static void process(String urlstring) {
+    try {
+      URL url = new URL(urlstring);
+      System.out.println("Connecting to " + url);
+      URLConnection connection = url.openConnection();
+      connection.connect();
+      try (InputStreamReader streamReader = new InputStreamReader(connection.getInputStream());
+           BufferedReader bufferedReader = new BufferedReader(streamReader)) {
+        for(String line; (line = bufferedReader.readLine()) != null; ) {
+          if (line.startsWith(MARKER)) {
+            System.out.println(TAG.matcher(line).replaceAll(""));
+          }
+        }
+      }
+    } catch (IOException ioe) {
+      System.err.println("" + ioe);
+    }
+  }
+
+  static final String MARKER = "<!-- OUTPUT -->";
+  static final Pattern TAG = Pattern.compile("<[^>]*>");
+
+  /**
+   * A servlet implementation
+   */
+  @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+  @InterfaceStability.Unstable
+  public static class Servlet extends HttpServlet {
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public void doGet(HttpServletRequest request, HttpServletResponse response
+        ) throws ServletException, IOException {
+
+      // Do the authorization
+      if (!HttpServer.hasAdministratorAccess(getServletContext(), request,
+          response)) {
+        return;
+      }
+
+      PrintWriter out = ServletUtil.initHTML(response, "Log Level");
+      String logName = ServletUtil.getParameter(request, "log");
+      String level = ServletUtil.getParameter(request, "level");
+
+      if (logName != null) {
+        out.println("<br /><hr /><h3>Results</h3>");
+        out.println(MARKER
+            + "Submitted Log Name: <b>" + logName + "</b><br />");
+
+        Log log = LogFactory.getLog(logName);
+        out.println(MARKER
+            + "Log Class: <b>" + log.getClass().getName() +"</b><br />");
+        if (level != null) {
+          out.println(MARKER + "Submitted Level: <b>" + level + "</b><br />");
+        }
+
+        if (log instanceof Log4JLogger) {
+          process(((Log4JLogger)log).getLogger(), level, out);
+        }
+        else if (log instanceof Jdk14Logger) {
+          process(((Jdk14Logger)log).getLogger(), level, out);
+        }
+        else {
+          out.println("Sorry, " + log.getClass() + " not supported.<br />");
+        }
+      }
+
+      out.println(FORMS);
+      out.println(ServletUtil.HTML_TAIL);
+    }
+
+    static final String FORMS = "\n<br /><hr /><h3>Get / Set</h3>"
+        + "\n<form>Log: <input type='text' size='50' name='log' /> "
+        + "<input type='submit' value='Get Log Level' />"
+        + "</form>"
+        + "\n<form>Log: <input type='text' size='50' name='log' /> "
+        + "Level: <input type='text' name='level' /> "
+        + "<input type='submit' value='Set Log Level' />"
+        + "</form>";
+
+    private static void process(org.apache.log4j.Logger log, String level,
+        PrintWriter out) throws IOException {
+      if (level != null) {
+        if (!level.equals(org.apache.log4j.Level.toLevel(level).toString())) {
+          out.println(MARKER + "Bad level : <b>" + level + "</b><br />");
+        } else {
+          log.setLevel(org.apache.log4j.Level.toLevel(level));
+          out.println(MARKER + "Setting Level to " + level + " ...<br />");
+        }
+      }
+      out.println(MARKER
+          + "Effective level: <b>" + log.getEffectiveLevel() + "</b><br />");
+    }
+
+    private static void process(java.util.logging.Logger log, String level,
+        PrintWriter out) throws IOException {
+      if (level != null) {
+        log.setLevel(java.util.logging.Level.parse(level));
+        out.println(MARKER + "Setting Level to " + level + " ...<br />");
+      }
+
+      java.util.logging.Level lev;
+      for(; (lev = log.getLevel()) == null; log = log.getParent());
+      out.println(MARKER + "Effective level: <b>" + lev + "</b><br />");
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/package-info.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/package-info.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/package-info.java
new file mode 100644
index 0000000..f55e24b
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/package-info.java
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * <p>
+ * Copied from hadoop source code.<br>
+ * See https://issues.apache.org/jira/browse/HADOOP-10232 to know why.
+ * </p>
+ */
+@InterfaceStability.Unstable
+package org.apache.hadoop.hbase.http;
+
+import org.apache.yetus.audience.InterfaceStability;

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DumpRegionServerMetrics.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DumpRegionServerMetrics.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DumpRegionServerMetrics.java
deleted file mode 100644
index 2b07a64..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DumpRegionServerMetrics.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.regionserver;
-
-import org.apache.hadoop.hbase.util.JSONBean;
-
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.lang.management.ManagementFactory;
-import javax.management.MBeanServer;
-import javax.management.MalformedObjectNameException;
-import javax.management.ObjectName;
-
-/**
- * Utility for doing JSON and MBeans.
- */
-public class DumpRegionServerMetrics {
-  /**
-   * Dump out a subset of regionserver mbeans only, not all of them, as json on System.out.
-   */
-  public static String dumpMetrics() throws MalformedObjectNameException, IOException {
-    StringWriter sw = new StringWriter(1024 * 100); // Guess this size
-    try (PrintWriter writer = new PrintWriter(sw)) {
-      JSONBean dumper = new JSONBean();
-      try (JSONBean.Writer jsonBeanWriter = dumper.open(writer)) {
-        MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer();
-        jsonBeanWriter.write(mbeanServer,
-          new ObjectName("java.lang:type=Memory"), null, false);
-        jsonBeanWriter.write(mbeanServer,
-          new ObjectName("Hadoop:service=HBase,name=RegionServer,sub=IPC"), null, false);
-        jsonBeanWriter.write(mbeanServer,
-          new ObjectName("Hadoop:service=HBase,name=RegionServer,sub=Replication"), null, false);
-        jsonBeanWriter.write(mbeanServer,
-          new ObjectName("Hadoop:service=HBase,name=RegionServer,sub=Server"), null, false);
-      }
-    }
-    sw.close();
-    return sw.toString();
-  }
-
-  public static void main(String[] args) throws IOException, MalformedObjectNameException {
-    String str = dumpMetrics();
-    System.out.println(str);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index f384c1f..ef3f10d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -146,6 +146,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.FSTableDescriptors;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.HasThread;
+import org.apache.hadoop.hbase.util.JSONBean;
 import org.apache.hadoop.hbase.util.JvmPauseMonitor;
 import org.apache.hadoop.hbase.util.NettyEventLoopGroupConfig;
 import org.apache.hadoop.hbase.util.Pair;
@@ -2400,7 +2401,7 @@ public class HRegionServer extends HasThread implements
         CoprocessorHost.getLoadedCoprocessors());
     // Try and dump metrics if abort -- might give clue as to how fatal came about....
     try {
-      LOG.info("Dump of metrics as JSON on abort: " + DumpRegionServerMetrics.dumpMetrics());
+      LOG.info("Dump of metrics as JSON on abort: " + JSONBean.dumpRegionServerMetrics());
     } catch (MalformedObjectNameException | IOException e) {
       LOG.warn("Failed dumping metrics", e);
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HttpServerUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HttpServerUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HttpServerUtil.java
new file mode 100644
index 0000000..59c6ad6
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HttpServerUtil.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import org.eclipse.jetty.security.ConstraintSecurityHandler;
+import org.eclipse.jetty.util.security.Constraint;
+import org.eclipse.jetty.security.ConstraintMapping;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+
+/**
+ * HttpServer utility.
+ */
+public class HttpServerUtil {
+  /**
+   * Add constraints to a Jetty Context to disallow undesirable Http methods.
+   * @param ctxHandler The context to modify
+   */
+  public static void constrainHttpMethods(ServletContextHandler ctxHandler) {
+    Constraint c = new Constraint();
+    c.setAuthenticate(true);
+
+    ConstraintMapping cmt = new ConstraintMapping();
+    cmt.setConstraint(c);
+    cmt.setMethod("TRACE");
+    cmt.setPathSpec("/*");
+
+    ConstraintMapping cmo = new ConstraintMapping();
+    cmo.setConstraint(c);
+    cmo.setMethod("OPTIONS");
+    cmo.setPathSpec("/*");
+
+    ConstraintSecurityHandler securityHandler = new ConstraintSecurityHandler();
+    securityHandler.setConstraintMappings(new ConstraintMapping[]{ cmt, cmo });
+
+    ctxHandler.setSecurityHandler(securityHandler);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
new file mode 100644
index 0000000..f4a146e
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
@@ -0,0 +1,387 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.lang.management.ManagementFactory;
+import java.lang.reflect.Array;
+import java.util.Iterator;
+import java.util.Set;
+
+import javax.management.AttributeNotFoundException;
+import javax.management.InstanceNotFoundException;
+import javax.management.IntrospectionException;
+import javax.management.MBeanAttributeInfo;
+import javax.management.MBeanException;
+import javax.management.MBeanInfo;
+import javax.management.MBeanServer;
+import javax.management.MalformedObjectNameException;
+import javax.management.ObjectName;
+import javax.management.ReflectionException;
+import javax.management.RuntimeErrorException;
+import javax.management.RuntimeMBeanException;
+import javax.management.openmbean.CompositeData;
+import javax.management.openmbean.CompositeType;
+import javax.management.openmbean.TabularData;
+
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.JsonGenerationException;
+import com.fasterxml.jackson.core.JsonGenerator;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+/**
+ * Utility for doing JSON and MBeans.
+ */
+public class JSONBean {
+  private static final Log LOG = LogFactory.getLog(JSONBean.class);
+  private final JsonFactory jsonFactory;
+
+  public JSONBean() {
+    this.jsonFactory = new JsonFactory();
+  }
+
+  /**
+   * Use dumping out mbeans as JSON.
+   */
+  public interface Writer extends Closeable {
+    void write(final String key, final String value) throws JsonGenerationException, IOException;
+    int write(final MBeanServer mBeanServer, ObjectName qry, String attribute,
+        final boolean description) throws IOException;
+    void flush() throws IOException;
+  }
+
+  public Writer open(final PrintWriter writer) throws IOException {
+    final JsonGenerator jg = jsonFactory.createJsonGenerator(writer);
+    jg.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET);
+    jg.useDefaultPrettyPrinter();
+    jg.writeStartObject();
+    return new Writer() {
+      @Override
+      public void flush() throws IOException {
+        jg.flush();
+      }
+
+      @Override
+      public void close() throws IOException {
+        jg.close();
+      }
+
+      @Override
+      public void write(String key, String value) throws JsonGenerationException, IOException {
+        jg.writeStringField(key, value);
+      }
+
+      @Override
+      public int write(MBeanServer mBeanServer, ObjectName qry, String attribute,
+          boolean description)
+      throws IOException {
+        return JSONBean.write(jg, mBeanServer, qry, attribute, description);
+      }
+    };
+  }
+
+  /**
+   * @param mBeanServer
+   * @param qry
+   * @param attribute
+   * @param description
+   * @return Return non-zero if failed to find bean. 0
+   * @throws IOException
+   */
+  private static int write(final JsonGenerator jg,
+      final MBeanServer mBeanServer, ObjectName qry, String attribute,
+      final boolean description)
+  throws IOException {
+    LOG.trace("Listing beans for "+qry);
+    Set<ObjectName> names = null;
+    names = mBeanServer.queryNames(qry, null);
+    jg.writeArrayFieldStart("beans");
+    Iterator<ObjectName> it = names.iterator();
+    while (it.hasNext()) {
+      ObjectName oname = it.next();
+      MBeanInfo minfo;
+      String code = "";
+      String descriptionStr = null;
+      Object attributeinfo = null;
+      try {
+        minfo = mBeanServer.getMBeanInfo(oname);
+        code = minfo.getClassName();
+        if (description) descriptionStr = minfo.getDescription();
+        String prs = "";
+        try {
+          if ("org.apache.commons.modeler.BaseModelMBean".equals(code)) {
+            prs = "modelerType";
+            code = (String) mBeanServer.getAttribute(oname, prs);
+          }
+          if (attribute != null) {
+            prs = attribute;
+            attributeinfo = mBeanServer.getAttribute(oname, prs);
+          }
+        } catch (RuntimeMBeanException e) {
+         // UnsupportedOperationExceptions happen in the normal course of business,
+         // so no need to log them as errors all the time.
+         if (e.getCause() instanceof UnsupportedOperationException) {
+           if (LOG.isTraceEnabled()) {
+             LOG.trace("Getting attribute " + prs + " of " + oname + " threw " + e);
+           }
+         } else {
+           LOG.error("Getting attribute " + prs + " of " + oname + " threw an exception", e);
+         }
+         return 0;
+        } catch (AttributeNotFoundException e) {
+          // If the modelerType attribute was not found, the class name is used
+          // instead.
+          LOG.error("getting attribute " + prs + " of " + oname
+              + " threw an exception", e);
+        } catch (MBeanException e) {
+          // The code inside the attribute getter threw an exception so log it,
+          // and fall back on the class name
+          LOG.error("getting attribute " + prs + " of " + oname
+              + " threw an exception", e);
+        } catch (RuntimeException e) {
+          // For some reason even with an MBeanException available to them
+          // Runtime exceptionscan still find their way through, so treat them
+          // the same as MBeanException
+          LOG.error("getting attribute " + prs + " of " + oname
+              + " threw an exception", e);
+        } catch (ReflectionException e) {
+          // This happens when the code inside the JMX bean (setter?? from the
+          // java docs) threw an exception, so log it and fall back on the
+          // class name
+          LOG.error("getting attribute " + prs + " of " + oname
+              + " threw an exception", e);
+        }
+      } catch (InstanceNotFoundException e) {
+        //Ignored for some reason the bean was not found so don't output it
+        continue;
+      } catch (IntrospectionException e) {
+        // This is an internal error, something odd happened with reflection so
+        // log it and don't output the bean.
+        LOG.error("Problem while trying to process JMX query: " + qry
+            + " with MBean " + oname, e);
+        continue;
+      } catch (ReflectionException e) {
+        // This happens when the code inside the JMX bean threw an exception, so
+        // log it and don't output the bean.
+        LOG.error("Problem while trying to process JMX query: " + qry
+            + " with MBean " + oname, e);
+        continue;
+      }
+
+      jg.writeStartObject();
+      jg.writeStringField("name", oname.toString());
+      if (description && descriptionStr != null && descriptionStr.length() > 0) {
+        jg.writeStringField("description", descriptionStr);
+      }
+      jg.writeStringField("modelerType", code);
+      if (attribute != null && attributeinfo == null) {
+        jg.writeStringField("result", "ERROR");
+        jg.writeStringField("message", "No attribute with name " + attribute + " was found.");
+        jg.writeEndObject();
+        jg.writeEndArray();
+        jg.close();
+        return -1;
+      }
+
+      if (attribute != null) {
+        writeAttribute(jg, attribute, descriptionStr, attributeinfo);
+      } else {
+        MBeanAttributeInfo[] attrs = minfo.getAttributes();
+        for (int i = 0; i < attrs.length; i++) {
+          writeAttribute(jg, mBeanServer, oname, description, attrs[i]);
+        }
+      }
+      jg.writeEndObject();
+    }
+    jg.writeEndArray();
+    return 0;
+  }
+
+  private static void writeAttribute(final JsonGenerator jg,
+      final MBeanServer mBeanServer, ObjectName oname,
+      final boolean description, final MBeanAttributeInfo attr)
+  throws IOException {
+    if (!attr.isReadable()) {
+      return;
+    }
+    String attName = attr.getName();
+    if ("modelerType".equals(attName)) {
+      return;
+    }
+    if (attName.indexOf("=") >= 0 || attName.indexOf(":") >= 0 || attName.indexOf(" ") >= 0) {
+      return;
+    }
+    String descriptionStr = description? attr.getDescription(): null;
+    Object value = null;
+    try {
+      value = mBeanServer.getAttribute(oname, attName);
+    } catch (RuntimeMBeanException e) {
+      // UnsupportedOperationExceptions happen in the normal course of business,
+      // so no need to log them as errors all the time.
+      if (e.getCause() instanceof UnsupportedOperationException) {
+        if (LOG.isTraceEnabled()) {
+          LOG.trace("Getting attribute " + attName + " of " + oname + " threw " + e);
+        }
+      } else {
+        LOG.error("getting attribute "+attName+" of "+oname+" threw an exception", e);
+      }
+      return;
+    } catch (RuntimeErrorException e) {
+      // RuntimeErrorException happens when an unexpected failure occurs in getAttribute
+      // for example https://issues.apache.org/jira/browse/DAEMON-120
+      LOG.debug("getting attribute "+attName+" of "+oname+" threw an exception", e);
+      return;
+    } catch (AttributeNotFoundException e) {
+      //Ignored the attribute was not found, which should never happen because the bean
+      //just told us that it has this attribute, but if this happens just don't output
+      //the attribute.
+      return;
+    } catch (MBeanException e) {
+      //The code inside the attribute getter threw an exception so log it, and
+      // skip outputting the attribute
+      LOG.error("getting attribute "+attName+" of "+oname+" threw an exception", e);
+      return;
+    } catch (RuntimeException e) {
+      //For some reason even with an MBeanException available to them Runtime exceptions
+      //can still find their way through, so treat them the same as MBeanException
+      LOG.error("getting attribute "+attName+" of "+oname+" threw an exception", e);
+      return;
+    } catch (ReflectionException e) {
+      //This happens when the code inside the JMX bean (setter?? from the java docs)
+      //threw an exception, so log it and skip outputting the attribute
+      LOG.error("getting attribute "+attName+" of "+oname+" threw an exception", e);
+      return;
+    } catch (InstanceNotFoundException e) {
+      //Ignored the mbean itself was not found, which should never happen because we
+      //just accessed it (perhaps something unregistered in-between) but if this
+      //happens just don't output the attribute.
+      return;
+    }
+
+    writeAttribute(jg, attName, descriptionStr, value);
+  }
+
+  private static void writeAttribute(JsonGenerator jg, String attName, final String descriptionStr,
+      Object value)
+  throws IOException {
+    boolean description = false;
+    if (descriptionStr != null && descriptionStr.length() > 0 && !attName.equals(descriptionStr)) {
+      description = true;
+      jg.writeFieldName(attName);
+      jg.writeStartObject();
+      jg.writeFieldName("description");
+      jg.writeString(descriptionStr);
+      jg.writeFieldName("value");
+      writeObject(jg, description, value);
+      jg.writeEndObject();
+    } else {
+      jg.writeFieldName(attName);
+      writeObject(jg, description, value);
+    }
+  }
+
+  private static void writeObject(final JsonGenerator jg, final boolean description, Object value)
+  throws IOException {
+    if(value == null) {
+      jg.writeNull();
+    } else {
+      Class<?> c = value.getClass();
+      if (c.isArray()) {
+        jg.writeStartArray();
+        int len = Array.getLength(value);
+        for (int j = 0; j < len; j++) {
+          Object item = Array.get(value, j);
+          writeObject(jg, description, item);
+        }
+        jg.writeEndArray();
+      } else if(value instanceof Number) {
+        Number n = (Number)value;
+        jg.writeNumber(n.toString());
+      } else if(value instanceof Boolean) {
+        Boolean b = (Boolean)value;
+        jg.writeBoolean(b);
+      } else if(value instanceof CompositeData) {
+        CompositeData cds = (CompositeData)value;
+        CompositeType comp = cds.getCompositeType();
+        Set<String> keys = comp.keySet();
+        jg.writeStartObject();
+        for (String key: keys) {
+          writeAttribute(jg, key, null, cds.get(key));
+        }
+        jg.writeEndObject();
+      } else if(value instanceof TabularData) {
+        TabularData tds = (TabularData)value;
+        jg.writeStartArray();
+        for(Object entry : tds.values()) {
+          writeObject(jg, description, entry);
+        }
+        jg.writeEndArray();
+      } else {
+        jg.writeString(value.toString());
+      }
+    }
+  }
+
+  /**
+   * Dump out a subset of regionserver mbeans only, not all of them, as json on System.out.
+   * @throws MalformedObjectNameException
+   * @throws IOException
+   */
+  public static String dumpRegionServerMetrics() throws MalformedObjectNameException, IOException {
+    StringWriter sw = new StringWriter(1024 * 100); // Guess this size
+    try (PrintWriter writer = new PrintWriter(sw)) {
+      JSONBean dumper = new JSONBean();
+      try (JSONBean.Writer jsonBeanWriter = dumper.open(writer)) {
+        MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer();
+        jsonBeanWriter.write(mbeanServer,
+          new ObjectName("java.lang:type=Memory"), null, false);
+        jsonBeanWriter.write(mbeanServer,
+          new ObjectName("Hadoop:service=HBase,name=RegionServer,sub=IPC"), null, false);
+        jsonBeanWriter.write(mbeanServer,
+          new ObjectName("Hadoop:service=HBase,name=RegionServer,sub=Replication"), null, false);
+        jsonBeanWriter.write(mbeanServer,
+          new ObjectName("Hadoop:service=HBase,name=RegionServer,sub=Server"), null, false);
+      }
+    }
+    sw.close();
+    return sw.toString();
+  }
+
+  /**
+   * Dump out all registered mbeans as json on System.out.
+   * @throws IOException
+   * @throws MalformedObjectNameException
+   */
+  public static void dumpAllBeans() throws IOException, MalformedObjectNameException {
+    try (PrintWriter writer = new PrintWriter(System.out)) {
+      JSONBean dumper = new JSONBean();
+      try (JSONBean.Writer jsonBeanWriter = dumper.open(writer)) {
+        MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer();
+        jsonBeanWriter.write(mbeanServer, new ObjectName("*:*"), null, false);
+      }
+    }
+  }
+
+  public static void main(String[] args) throws IOException, MalformedObjectNameException {
+    String str = dumpRegionServerMetrics();
+    System.out.println(str);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
new file mode 100644
index 0000000..879f32e
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
@@ -0,0 +1,214 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * */
+package org.apache.hadoop.hbase.util;
+
+import java.beans.IntrospectionException;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.lang.management.GarbageCollectorMXBean;
+import java.lang.management.ManagementFactory;
+import java.lang.management.MemoryPoolMXBean;
+import java.lang.management.RuntimeMXBean;
+import java.util.Hashtable;
+import java.util.List;
+import java.util.Set;
+
+import javax.management.InstanceNotFoundException;
+import javax.management.MBeanAttributeInfo;
+import javax.management.MBeanInfo;
+import javax.management.MBeanServer;
+import javax.management.MalformedObjectNameException;
+import javax.management.ObjectName;
+import javax.management.ReflectionException;
+import javax.management.openmbean.CompositeData;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.codehaus.jettison.json.JSONException;
+
+public final class JSONMetricUtil {
+
+  private static final Log LOG = LogFactory.getLog(JSONMetricUtil.class);
+
+  private static MBeanServer mbServer = ManagementFactory.getPlatformMBeanServer();
+  //MBeans ObjectName domain names
+  public static final String JAVA_LANG_DOMAIN = "java.lang";
+  public static final String JAVA_NIO_DOMAIN = "java.nio";
+  public static final String SUN_MGMT_DOMAIN = "com.sun.management";
+  public static final String HADOOP_DOMAIN = "Hadoop";
+
+  //MBeans ObjectName properties key names
+  public static final String TYPE_KEY = "type";
+  public static final String NAME_KEY = "name";
+  public static final String SERVICE_KEY = "service";
+  public static final String SUBSYSTEM_KEY = "sub";
+
+/**
+ * Utility for getting metric values. Collection of static methods intended for
+ * easier access to metric values.
+ */
+  private JSONMetricUtil() {
+    // Not to be called
+  }
+
+  public static MBeanAttributeInfo[] getMBeanAttributeInfo(ObjectName bean)
+      throws IntrospectionException, InstanceNotFoundException, ReflectionException,
+      IntrospectionException, javax.management.IntrospectionException {
+    MBeanInfo mbinfo = mbServer.getMBeanInfo(bean);
+    return mbinfo.getAttributes();
+  }
+
+  public static Object getValueFromMBean(ObjectName bean, String attribute) {
+    Object value = null;
+    try {
+      value = mbServer.getAttribute(bean, attribute);
+    }
+    catch(Exception e) {
+      LOG.error("Unable to get value from MBean= "+ bean.toString() +
+        "for attribute=" + attribute + " " + e.getMessage());
+    }
+    return value;
+  }
+
+  /**
+   * Returns a subset of mbeans defined by qry.
+   * Modeled after {@link JSONBean#dumpRegionServerMetrics()}
+   * Example: String qry= "java.lang:type=Memory"
+   * @throws MalformedObjectNameException if json have bad format
+   * @throws IOException /
+   * @return String representation of json array.
+   */
+  public static String dumpBeanToString(String qry) throws MalformedObjectNameException,
+  IOException {
+    StringWriter sw = new StringWriter(1024 * 100); // Guess this size
+    try (PrintWriter writer = new PrintWriter(sw)) {
+      JSONBean dumper = new JSONBean();
+      try (JSONBean.Writer jsonBeanWriter = dumper.open(writer)) {
+        MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer();
+        jsonBeanWriter.write(mbeanServer,
+          new ObjectName(qry), null, false);
+      }
+    }
+    sw.close();
+    return sw.toString();
+  }
+
+  public static JsonNode mappStringToJsonNode(String jsonString)
+      throws JsonProcessingException, IOException {
+    ObjectMapper mapper = new ObjectMapper();
+    JsonNode node = mapper.readTree(jsonString);
+    return node;
+  }
+
+
+  public static JsonNode searchJson(JsonNode tree, String searchKey)
+      throws JsonProcessingException, IOException {
+    if (tree == null) {
+      return null;
+    }
+    if(tree.has(searchKey)) {
+      return tree.get(searchKey);
+    }
+    if(tree.isContainerNode()) {
+      for(JsonNode branch: tree) {
+        JsonNode branchResult = searchJson(branch, searchKey);
+        if (branchResult != null && !branchResult.isMissingNode()) {
+          return branchResult;
+        }
+      }
+    }
+    return null;
+  }
+
+  /**
+   * Method for building hashtable used for constructing ObjectName.
+   * Mapping is done with arrays indices
+   * @param keys Hashtable keys
+   * @param values Hashtable values
+   * @return Hashtable or null if arrays are empty * or have different number of elements
+   */
+  public static Hashtable<String, String> buldKeyValueTable(String[] keys, String[] values) {
+    if (keys.length != values.length) {
+      LOG.error("keys and values arrays must be same size");
+      return null;
+    }
+    if (keys.length == 0 || values.length == 0) {
+      LOG.error("keys and values arrays can not be empty;");
+      return null;
+    }
+    Hashtable<String, String> table = new Hashtable<String, String>();
+    for(int i = 0; i < keys.length; i++) {
+      table.put(keys[i], values[i]);
+    }
+    return table;
+  }
+
+  public static ObjectName buildObjectName(String pattern) throws MalformedObjectNameException {
+    return new ObjectName(pattern);
+  }
+
+  public static ObjectName buildObjectName(String domain, Hashtable<String, String> keyValueTable)
+      throws MalformedObjectNameException {
+    return new ObjectName(domain, keyValueTable);
+  }
+
+  public static Set<ObjectName> getRegistredMBeans(ObjectName name, MBeanServer mbs) {
+    return mbs.queryNames(name, null);
+  }
+
+  public static String getProcessPID() {
+    return ManagementFactory.getRuntimeMXBean().getName().split("@")[0];
+  }
+
+  public static String getCommmand() throws MalformedObjectNameException,
+  IOException, JSONException {
+    RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean();
+    return runtimeBean.getSystemProperties().get("sun.java.command");
+  }
+
+  public static List<GarbageCollectorMXBean> getGcCollectorBeans() {
+    List<GarbageCollectorMXBean> gcBeans = ManagementFactory.getGarbageCollectorMXBeans();
+    return gcBeans;
+  }
+
+  public static long getLastGcDuration(ObjectName gcCollector) {
+    long lastGcDuration = 0;
+    Object lastGcInfo = getValueFromMBean(gcCollector, "LastGcInfo");
+    if (lastGcInfo != null && lastGcInfo instanceof CompositeData) {
+      CompositeData cds = (CompositeData)lastGcInfo;
+      lastGcDuration = (long) cds.get("duration");
+    }
+    return lastGcDuration;
+  }
+
+  public static List<MemoryPoolMXBean> getMemoryPools() {
+    List<MemoryPoolMXBean> mPools = ManagementFactory.getMemoryPoolMXBeans();
+    return mPools;
+  }
+
+  public static float calcPercentage(long a, long b) {
+    if (a == 0 || b == 0) {
+      return 0;
+    }
+    return ((float)a / (float)b) *100;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java
index 08565e0..2014b5b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java
@@ -35,6 +35,7 @@ import java.util.regex.Pattern;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.impl.Log4JLogger;
 import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Time;
 import org.apache.log4j.Layout;
 import org.apache.log4j.Logger;
@@ -69,14 +70,14 @@ public abstract class GenericTestUtils {
   public static int uniqueSequenceId() {
     return sequence.incrementAndGet();
   }
-
+  
   /**
    * Assert that a given file exists.
    */
   public static void assertExists(File f) {
     Assert.assertTrue("File " + f + " should exist", f.exists());
   }
-
+    
   /**
    * List all of the files in 'dir' that match the regex 'pattern'.
    * Then check that this list is identical to 'expectedMatches'.
@@ -84,7 +85,7 @@ public abstract class GenericTestUtils {
    */
   public static void assertGlobEquals(File dir, String pattern,
       String ... expectedMatches) throws IOException {
-
+    
     Set<String> found = Sets.newTreeSet();
     for (File f : FileUtil.listFiles(dir)) {
       if (f.getName().matches(pattern)) {
@@ -97,6 +98,13 @@ public abstract class GenericTestUtils {
         Joiner.on(",").join(expectedSet),
         Joiner.on(",").join(found));
   }
+  
+  public static void assertExceptionContains(String string, Throwable t) {
+    String msg = t.getMessage();
+    Assert.assertTrue(
+        "Expected to find '" + string + "' but got unexpected exception:"
+        + StringUtils.stringifyException(t), msg.contains(string));
+  }  
 
   public static void waitFor(Supplier<Boolean> check,
       int checkEveryMillis, int waitForMillis)
@@ -108,26 +116,26 @@ public abstract class GenericTestUtils {
       if (result) {
         return;
       }
-
+      
       Thread.sleep(checkEveryMillis);
     } while (Time.now() - st < waitForMillis);
-
+    
     throw new TimeoutException("Timed out waiting for condition. " +
         "Thread diagnostics:\n" +
         TimedOutTestsListener.buildThreadDiagnosticString());
   }
-
+  
   public static class LogCapturer {
     private StringWriter sw = new StringWriter();
     private WriterAppender appender;
     private Logger logger;
-
+    
     public static LogCapturer captureLogs(Log l) {
       Logger logger = ((Log4JLogger)l).getLogger();
       LogCapturer c = new LogCapturer(logger);
       return c;
     }
-
+    
 
     private LogCapturer(Logger logger) {
       this.logger = logger;
@@ -135,36 +143,36 @@ public abstract class GenericTestUtils {
       WriterAppender wa = new WriterAppender(layout, sw);
       logger.addAppender(wa);
     }
-
+    
     public String getOutput() {
       return sw.toString();
     }
-
+    
     public void stopCapturing() {
       logger.removeAppender(appender);
 
     }
   }
-
-
+  
+  
   /**
    * Mockito answer helper that triggers one latch as soon as the
    * method is called, then waits on another before continuing.
    */
   public static class DelayAnswer implements Answer<Object> {
     private final Log LOG;
-
+    
     private final CountDownLatch fireLatch = new CountDownLatch(1);
     private final CountDownLatch waitLatch = new CountDownLatch(1);
     private final CountDownLatch resultLatch = new CountDownLatch(1);
-
+    
     private final AtomicInteger fireCounter = new AtomicInteger(0);
     private final AtomicInteger resultCounter = new AtomicInteger(0);
-
+    
     // Result fields set after proceed() is called.
     private volatile Throwable thrown;
     private volatile Object returnValue;
-
+    
     public DelayAnswer(Log log) {
       this.LOG = log;
     }
@@ -175,7 +183,7 @@ public abstract class GenericTestUtils {
     public void waitForCall() throws InterruptedException {
       fireLatch.await();
     }
-
+  
     /**
      * Tell the method to proceed.
      * This should only be called after waitForCall()
@@ -183,7 +191,7 @@ public abstract class GenericTestUtils {
     public void proceed() {
       waitLatch.countDown();
     }
-
+  
     @Override
     public Object answer(InvocationOnMock invocation) throws Throwable {
       LOG.info("DelayAnswer firing fireLatch");
@@ -212,7 +220,7 @@ public abstract class GenericTestUtils {
         resultLatch.countDown();
       }
     }
-
+    
     /**
      * After calling proceed(), this will wait until the call has
      * completed and a result has been returned to the caller.
@@ -220,7 +228,7 @@ public abstract class GenericTestUtils {
     public void waitForResult() throws InterruptedException {
       resultLatch.await();
     }
-
+    
     /**
      * After the call has gone through, return any exception that
      * was thrown, or null if no exception was thrown.
@@ -228,7 +236,7 @@ public abstract class GenericTestUtils {
     public Throwable getThrown() {
       return thrown;
     }
-
+    
     /**
      * After the call has gone through, return the call's return value,
      * or null in case it was void or an exception was thrown.
@@ -236,20 +244,20 @@ public abstract class GenericTestUtils {
     public Object getReturnValue() {
       return returnValue;
     }
-
+    
     public int getFireCount() {
       return fireCounter.get();
     }
-
+    
     public int getResultCount() {
       return resultCounter.get();
     }
   }
-
+  
   /**
    * An Answer implementation that simply forwards all calls through
    * to a delegate.
-   *
+   * 
    * This is useful as the default Answer for a mock object, to create
    * something like a spy on an RPC proxy. For example:
    * <code>
@@ -260,14 +268,14 @@ public abstract class GenericTestUtils {
    *    ...
    * </code>
    */
-  public static class DelegateAnswer implements Answer<Object> {
+  public static class DelegateAnswer implements Answer<Object> { 
     private final Object delegate;
     private final Log log;
-
+    
     public DelegateAnswer(Object delegate) {
       this(null, delegate);
     }
-
+    
     public DelegateAnswer(Log log, Object delegate) {
       this.log = log;
       this.delegate = delegate;
@@ -297,11 +305,11 @@ public abstract class GenericTestUtils {
   public static class SleepAnswer implements Answer<Object> {
     private final int maxSleepTime;
     private static Random r = new Random();
-
+    
     public SleepAnswer(int maxSleepTime) {
       this.maxSleepTime = maxSleepTime;
     }
-
+    
     @Override
     public Object answer(InvocationOnMock invocation) throws Throwable {
       boolean interrupted = false;
@@ -325,11 +333,11 @@ public abstract class GenericTestUtils {
         " but got:\n" + output,
         Pattern.compile(pattern).matcher(output).find());
   }
-
+  
   public static void assertValueNear(long expected, long actual, long allowedError) {
     assertValueWithinRange(expected - allowedError, expected + allowedError, actual);
   }
-
+  
   public static void assertValueWithinRange(long expectedMin, long expectedMax,
       long actual) {
     Assert.assertTrue("Expected " + actual + " to be in range (" + expectedMin + ","
@@ -344,7 +352,7 @@ public abstract class GenericTestUtils {
   public static void assertNoThreadsMatching(String regex) {
     Pattern pattern = Pattern.compile(regex);
     ThreadMXBean threadBean = ManagementFactory.getThreadMXBean();
-
+    
     ThreadInfo[] infos = threadBean.getThreadInfo(threadBean.getAllThreadIds(), 20);
     for (ThreadInfo info : infos) {
       if (info == null) continue;

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java
new file mode 100644
index 0000000..7d610e4
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java
@@ -0,0 +1,272 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ 
+package org.apache.hadoop.hbase.http;
+
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.authorize.AccessControlList;
+import org.junit.Assert;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.http.HttpServer.Builder;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.ServerSocket;
+import java.net.URI;
+import java.net.URL;
+import java.net.MalformedURLException;
+
+/**
+ * This is a base class for functional tests of the {@link HttpServer}.
+ * The methods are static for other classes to import statically.
+ */
+public class HttpServerFunctionalTest extends Assert {
+  /** JVM property for the webapp test dir : {@value} */
+  public static final String TEST_BUILD_WEBAPPS = "test.build.webapps";
+  /** expected location of the test.build.webapps dir: {@value} */
+  private static final String BUILD_WEBAPPS_DIR = "src/main/resources/hbase-webapps";
+  
+  /** name of the test webapp: {@value} */
+  private static final String TEST = "test";
+
+  /**
+   * Create but do not start the test webapp server. The test webapp dir is
+   * prepared/checked in advance.
+   *
+   * @return the server instance
+   *
+   * @throws IOException if a problem occurs
+   * @throws AssertionError if a condition was not met
+   */
+  public static HttpServer createTestServer() throws IOException {
+    prepareTestWebapp();
+    return createServer(TEST);
+  }
+
+  /**
+   * Create but do not start the test webapp server. The test webapp dir is
+   * prepared/checked in advance.
+   * @param conf the server configuration to use
+   * @return the server instance
+   *
+   * @throws IOException if a problem occurs
+   * @throws AssertionError if a condition was not met
+   */
+  public static HttpServer createTestServer(Configuration conf)
+      throws IOException {
+    prepareTestWebapp();
+    return createServer(TEST, conf);
+  }
+
+  public static HttpServer createTestServer(Configuration conf, AccessControlList adminsAcl)
+      throws IOException {
+    prepareTestWebapp();
+    return createServer(TEST, conf, adminsAcl);
+  }
+
+  /**
+   * Create but do not start the test webapp server. The test webapp dir is
+   * prepared/checked in advance.
+   * @param conf the server configuration to use
+   * @return the server instance
+   *
+   * @throws IOException if a problem occurs
+   * @throws AssertionError if a condition was not met
+   */
+  public static HttpServer createTestServer(Configuration conf, 
+      String[] pathSpecs) throws IOException {
+    prepareTestWebapp();
+    return createServer(TEST, conf, pathSpecs);
+  }
+
+  public static HttpServer createTestServerWithSecurity(Configuration conf) throws IOException {
+    prepareTestWebapp();
+    return localServerBuilder(TEST).setFindPort(true).setConf(conf).setSecurityEnabled(true)
+        // InfoServer normally sets these for us
+        .setUsernameConfKey(HttpServer.HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY)
+        .setKeytabConfKey(HttpServer.HTTP_SPNEGO_AUTHENTICATION_KEYTAB_KEY)
+        .build();
+  }
+
+  /**
+   * Prepare the test webapp by creating the directory from the test properties
+   * fail if the directory cannot be created.
+   * @throws AssertionError if a condition was not met
+   */
+  protected static void prepareTestWebapp() {
+    String webapps = System.getProperty(TEST_BUILD_WEBAPPS, BUILD_WEBAPPS_DIR);
+    File testWebappDir = new File(webapps +
+        File.separatorChar + TEST);
+    try {
+    if (!testWebappDir.exists()) {
+      fail("Test webapp dir " + testWebappDir.getCanonicalPath() + " missing");
+    }
+    }
+    catch (IOException e) {
+    }
+  }
+
+  /**
+   * Create an HttpServer instance on the given address for the given webapp
+   * @param host to bind
+   * @param port to bind
+   * @return the server
+   * @throws IOException if it could not be created
+   */
+  public static HttpServer createServer(String host, int port)
+      throws IOException {
+    prepareTestWebapp();
+    return new HttpServer.Builder().setName(TEST)
+        .addEndpoint(URI.create("http://" + host + ":" + port))
+        .setFindPort(true).build();
+  }
+
+  /**
+   * Create an HttpServer instance for the given webapp
+   * @param webapp the webapp to work with
+   * @return the server
+   * @throws IOException if it could not be created
+   */
+  public static HttpServer createServer(String webapp) throws IOException {
+    return localServerBuilder(webapp).setFindPort(true).build();
+  }
+  /**
+   * Create an HttpServer instance for the given webapp
+   * @param webapp the webapp to work with
+   * @param conf the configuration to use for the server
+   * @return the server
+   * @throws IOException if it could not be created
+   */
+  public static HttpServer createServer(String webapp, Configuration conf)
+      throws IOException {
+    return localServerBuilder(webapp).setFindPort(true).setConf(conf).build();
+  }
+
+  public static HttpServer createServer(String webapp, Configuration conf, AccessControlList adminsAcl)
+      throws IOException {
+    return localServerBuilder(webapp).setFindPort(true).setConf(conf).setACL(adminsAcl).build();
+  }
+
+  private static Builder localServerBuilder(String webapp) {
+    return new HttpServer.Builder().setName(webapp).addEndpoint(
+        URI.create("http://localhost:0"));
+  }
+  
+  /**
+   * Create an HttpServer instance for the given webapp
+   * @param webapp the webapp to work with
+   * @param conf the configuration to use for the server
+   * @param pathSpecs the paths specifications the server will service
+   * @return the server
+   * @throws IOException if it could not be created
+   */
+  public static HttpServer createServer(String webapp, Configuration conf,
+      String[] pathSpecs) throws IOException {
+    return localServerBuilder(webapp).setFindPort(true).setConf(conf).setPathSpec(pathSpecs).build();
+  }
+
+  /**
+   * Create and start a server with the test webapp
+   *
+   * @return the newly started server
+   *
+   * @throws IOException on any failure
+   * @throws AssertionError if a condition was not met
+   */
+  public static HttpServer createAndStartTestServer() throws IOException {
+    HttpServer server = createTestServer();
+    server.start();
+    return server;
+  }
+
+  /**
+   * If the server is non null, stop it
+   * @param server to stop
+   * @throws Exception on any failure
+   */
+  public static void stop(HttpServer server) throws Exception {
+    if (server != null) {
+      server.stop();
+    }
+  }
+
+  /**
+   * Pass in a server, return a URL bound to localhost and its port
+   * @param server server
+   * @return a URL bonded to the base of the server
+   * @throws MalformedURLException if the URL cannot be created.
+   */
+  public static URL getServerURL(HttpServer server)
+      throws MalformedURLException {
+    assertNotNull("No server", server);
+    return new URL("http://"
+        + NetUtils.getHostPortString(server.getConnectorAddress(0)));
+  }
+
+  /**
+   * Read in the content from a URL
+   * @param url URL To read
+   * @return the text from the output
+   * @throws IOException if something went wrong
+   */
+  protected static String readOutput(URL url) throws IOException {
+    StringBuilder out = new StringBuilder();
+    InputStream in = url.openConnection().getInputStream();
+    byte[] buffer = new byte[64 * 1024];
+    int len = in.read(buffer);
+    while (len > 0) {
+      out.append(new String(buffer, 0, len));
+      len = in.read(buffer);
+    }
+    return out.toString();
+  }
+
+  /**
+   * Recursively deletes a {@link File}.
+   */
+  protected static void deleteRecursively(File d) {
+    if (d.isDirectory()) {
+      for (String name : d.list()) {
+        File child = new File(d, name);
+        if (child.isFile()) {
+          child.delete();
+        } else {
+          deleteRecursively(child);
+        }
+      }
+    }
+    d.delete();
+  }
+
+  /**
+   * Picks a free port on the host by binding a Socket to '0'.
+   */
+  protected static int getFreePort() throws IOException {
+    ServerSocket s = new ServerSocket(0);
+    try {
+      s.setReuseAddress(true);
+      int port = s.getLocalPort();
+      return port;
+    } finally {
+      if (null != s) {
+        s.close();
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java
new file mode 100644
index 0000000..acfe929
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java
@@ -0,0 +1,151 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.URL;
+import java.net.URLConnection;
+import java.util.Set;
+import java.util.TreeSet;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.net.NetUtils;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({MiscTests.class, SmallTests.class})
+public class TestGlobalFilter extends HttpServerFunctionalTest {
+  private static final Log LOG = LogFactory.getLog(HttpServer.class);
+  static final Set<String> RECORDS = new TreeSet<>();
+
+  /** A very simple filter that records accessed uri's */
+  static public class RecordingFilter implements Filter {
+    private FilterConfig filterConfig = null;
+
+    @Override
+    public void init(FilterConfig filterConfig) {
+      this.filterConfig = filterConfig;
+    }
+
+    @Override
+    public void destroy() {
+      this.filterConfig = null;
+    }
+
+    @Override
+    public void doFilter(ServletRequest request, ServletResponse response,
+        FilterChain chain) throws IOException, ServletException {
+      if (filterConfig == null)
+         return;
+
+      String uri = ((HttpServletRequest)request).getRequestURI();
+      LOG.info("filtering " + uri);
+      RECORDS.add(uri);
+      chain.doFilter(request, response);
+    }
+
+    /** Configuration for RecordingFilter */
+    static public class Initializer extends FilterInitializer {
+      public Initializer() {}
+
+      @Override
+      public void initFilter(FilterContainer container, Configuration conf) {
+        container.addGlobalFilter("recording", RecordingFilter.class.getName(), null);
+      }
+    }
+  }
+  
+  
+  /** access a url, ignoring some IOException such as the page does not exist */
+  static void access(String urlstring) throws IOException {
+    LOG.warn("access " + urlstring);
+    URL url = new URL(urlstring);
+    URLConnection connection = url.openConnection();
+    connection.connect();
+    
+    try {
+      BufferedReader in = new BufferedReader(new InputStreamReader(
+          connection.getInputStream()));
+      try {
+        for(; in.readLine() != null; );
+      } finally {
+        in.close();
+      }
+    } catch(IOException ioe) {
+      LOG.warn("urlstring=" + urlstring, ioe);
+    }
+  }
+
+  @Test
+  public void testServletFilter() throws Exception {
+    Configuration conf = new Configuration();
+    
+    //start a http server with CountingFilter
+    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
+        RecordingFilter.Initializer.class.getName());
+    HttpServer http = createTestServer(conf);
+    http.start();
+
+    final String fsckURL = "/fsck";
+    final String stacksURL = "/stacks";
+    final String ajspURL = "/a.jsp";
+    final String listPathsURL = "/listPaths";
+    final String dataURL = "/data";
+    final String streamFile = "/streamFile";
+    final String rootURL = "/";
+    final String allURL = "/*";
+    final String outURL = "/static/a.out";
+    final String logURL = "/logs/a.log";
+
+    final String[] urls = {fsckURL, stacksURL, ajspURL, listPathsURL, 
+        dataURL, streamFile, rootURL, allURL, outURL, logURL};
+
+    //access the urls
+    final String prefix = "http://"
+        + NetUtils.getHostPortString(http.getConnectorAddress(0));
+    try {
+      for(int i = 0; i < urls.length; i++) {
+        access(prefix + urls[i]);
+      }
+    } finally {
+      http.stop();
+    }
+
+    LOG.info("RECORDS = " + RECORDS);
+    
+    //verify records
+    for(int i = 0; i < urls.length; i++) {
+      assertTrue(RECORDS.remove(urls[i]));
+    }
+    assertTrue(RECORDS.isEmpty());
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHtmlQuoting.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHtmlQuoting.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHtmlQuoting.java
new file mode 100644
index 0000000..82fbe04
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHtmlQuoting.java
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import static org.junit.Assert.*;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.mockito.Mockito;
+
+@Category({MiscTests.class, SmallTests.class})
+public class TestHtmlQuoting {
+
+  @Test public void testNeedsQuoting() throws Exception {
+    assertTrue(HtmlQuoting.needsQuoting("abcde>"));
+    assertTrue(HtmlQuoting.needsQuoting("<abcde"));
+    assertTrue(HtmlQuoting.needsQuoting("abc'de"));
+    assertTrue(HtmlQuoting.needsQuoting("abcde\""));
+    assertTrue(HtmlQuoting.needsQuoting("&"));
+    assertFalse(HtmlQuoting.needsQuoting(""));
+    assertFalse(HtmlQuoting.needsQuoting("ab\ncdef"));
+    assertFalse(HtmlQuoting.needsQuoting(null));
+  }
+
+  @Test public void testQuoting() throws Exception {
+    assertEquals("ab&lt;cd", HtmlQuoting.quoteHtmlChars("ab<cd"));
+    assertEquals("ab&gt;", HtmlQuoting.quoteHtmlChars("ab>"));
+    assertEquals("&amp;&amp;&amp;", HtmlQuoting.quoteHtmlChars("&&&"));
+    assertEquals(" &apos;\n", HtmlQuoting.quoteHtmlChars(" '\n"));
+    assertEquals("&quot;", HtmlQuoting.quoteHtmlChars("\""));
+    assertEquals(null, HtmlQuoting.quoteHtmlChars(null));
+  }
+
+  private void runRoundTrip(String str) throws Exception {
+    assertEquals(str, 
+                 HtmlQuoting.unquoteHtmlChars(HtmlQuoting.quoteHtmlChars(str)));
+  }
+  
+  @Test public void testRoundtrip() throws Exception {
+    runRoundTrip("");
+    runRoundTrip("<>&'\"");
+    runRoundTrip("ab>cd<ef&ghi'\"");
+    runRoundTrip("A string\n with no quotable chars in it!");
+    runRoundTrip(null);
+    StringBuilder buffer = new StringBuilder();
+    for(char ch=0; ch < 127; ++ch) {
+      buffer.append(ch);
+    }
+    runRoundTrip(buffer.toString());
+  }
+  
+
+  @Test
+  public void testRequestQuoting() throws Exception {
+    HttpServletRequest mockReq = Mockito.mock(HttpServletRequest.class);
+    HttpServer.QuotingInputFilter.RequestQuoter quoter =
+      new HttpServer.QuotingInputFilter.RequestQuoter(mockReq);
+    
+    Mockito.doReturn("a<b").when(mockReq).getParameter("x");
+    assertEquals("Test simple param quoting",
+        "a&lt;b", quoter.getParameter("x"));
+    
+    Mockito.doReturn(null).when(mockReq).getParameter("x");
+    assertEquals("Test that missing parameters dont cause NPE",
+        null, quoter.getParameter("x"));
+
+    Mockito.doReturn(new String[]{"a<b", "b"}).when(mockReq).getParameterValues("x");
+    assertArrayEquals("Test escaping of an array",
+        new String[]{"a&lt;b", "b"}, quoter.getParameterValues("x"));
+
+    Mockito.doReturn(null).when(mockReq).getParameterValues("x");
+    assertArrayEquals("Test that missing parameters dont cause NPE for array",
+        null, quoter.getParameterValues("x"));
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLog.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLog.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLog.java
new file mode 100644
index 0000000..b8d21d1
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLog.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.log4j.Logger;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import org.eclipse.jetty.server.RequestLog;
+import org.eclipse.jetty.server.NCSARequestLog;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+
+@Category({MiscTests.class, SmallTests.class})
+public class TestHttpRequestLog {
+
+  @Test
+  public void testAppenderUndefined() {
+    RequestLog requestLog = HttpRequestLog.getRequestLog("test");
+    assertNull("RequestLog should be null", requestLog);
+  }
+
+  @Test
+  public void testAppenderDefined() {
+    HttpRequestLogAppender requestLogAppender = new HttpRequestLogAppender();
+    requestLogAppender.setName("testrequestlog");
+    Logger.getLogger("http.requests.test").addAppender(requestLogAppender);
+    RequestLog requestLog = HttpRequestLog.getRequestLog("test");
+    Logger.getLogger("http.requests.test").removeAppender(requestLogAppender);
+    assertNotNull("RequestLog should not be null", requestLog);
+    assertEquals("Class mismatch", NCSARequestLog.class, requestLog.getClass());
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLogAppender.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLogAppender.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLogAppender.java
new file mode 100644
index 0000000..a17b9e9
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLogAppender.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import static org.junit.Assert.assertEquals;
+
+@Category({MiscTests.class, SmallTests.class})
+public class TestHttpRequestLogAppender {
+
+  @Test
+  public void testParameterPropagation() {
+
+    HttpRequestLogAppender requestLogAppender = new HttpRequestLogAppender();
+    requestLogAppender.setFilename("jetty-namenode-yyyy_mm_dd.log");
+    requestLogAppender.setRetainDays(17);
+    assertEquals("Filename mismatch", "jetty-namenode-yyyy_mm_dd.log",
+        requestLogAppender.getFilename());
+    assertEquals("Retain days mismatch", 17,
+        requestLogAppender.getRetainDays());
+  }
+}


[13/13] hbase git commit: WIP do not push. placeholder disabling of long-running unit tests.

Posted by bu...@apache.org.
WIP do not push. placeholder disabling of long-running unit tests.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c192b9bb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c192b9bb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c192b9bb

Branch: refs/heads/HBASE-19124
Commit: c192b9bb6039434e07e2888800d11f14cde92176
Parents: 851f239
Author: Sean Busbey <bu...@apache.org>
Authored: Wed Nov 1 17:13:05 2017 -0500
Committer: Sean Busbey <bu...@apache.org>
Committed: Wed Nov 1 19:15:46 2017 -0500

----------------------------------------------------------------------
 dev-support/Jenkinsfile | 8 ++++++--
 1 file changed, 6 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/c192b9bb/dev-support/Jenkinsfile
----------------------------------------------------------------------
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index 5bd41aa..50b9a50 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -128,7 +128,9 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
       steps {
         unstash 'yetus'
         // TODO should this be a download from master, similar to how the personality is?
-        sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh"
+        // sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh"
+        mkdir -p output-general
+        sh "echo '<html><body>passed</body></html>' >output-general/console-report.html"
       }
       post {
         always {
@@ -215,7 +217,9 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
       }
       steps {
         unstash 'yetus'
-        sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh"
+        //sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh"
+        mkdir -p output-jdk8
+        sh "echo '<html><body>passed</body></html>' >output-jdk8/console-report.html"
       }
       post {
         always {


[12/13] hbase git commit: Revert "HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module"

Posted by bu...@apache.org.
Revert "HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module"

This reverts commit 3969b853b272c9d898024b3e86308c964e6fe6d0.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/851f239f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/851f239f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/851f239f

Branch: refs/heads/HBASE-19124
Commit: 851f239f1e8288e099b76cc69e54c14f84041857
Parents: 6ea4288
Author: Sean Busbey <bu...@apache.org>
Authored: Wed Nov 1 09:19:06 2017 -0500
Committer: Sean Busbey <bu...@apache.org>
Committed: Wed Nov 1 19:14:30 2017 -0500

----------------------------------------------------------------------
 hbase-common/pom.xml                            |    4 -
 .../org/apache/hadoop/hbase/util/JSONBean.java  |  351 -----
 .../hadoop/hbase/util/JSONMetricUtil.java       |  214 ---
 hbase-endpoint/pom.xml                          |    6 -
 hbase-http/pom.xml                              |  515 -------
 .../hbase/http/AdminAuthorizedServlet.java      |   49 -
 .../http/ClickjackingPreventionFilter.java      |   55 -
 .../hadoop/hbase/http/FilterContainer.java      |   41 -
 .../hadoop/hbase/http/FilterInitializer.java    |   32 -
 .../apache/hadoop/hbase/http/HtmlQuoting.java   |  215 ---
 .../apache/hadoop/hbase/http/HttpConfig.java    |   80 -
 .../hadoop/hbase/http/HttpRequestLog.java       |   93 --
 .../hbase/http/HttpRequestLogAppender.java      |   63 -
 .../apache/hadoop/hbase/http/HttpServer.java    | 1387 ------------------
 .../hadoop/hbase/http/HttpServerUtil.java       |   52 -
 .../apache/hadoop/hbase/http/InfoServer.java    |  112 --
 .../apache/hadoop/hbase/http/NoCacheFilter.java |   56 -
 .../hbase/http/ServerConfigurationKeys.java     |   47 -
 .../hadoop/hbase/http/conf/ConfServlet.java     |  107 --
 .../hadoop/hbase/http/jmx/JMXJsonServlet.java   |  240 ---
 .../hadoop/hbase/http/jmx/package-info.java     |   26 -
 .../hbase/http/lib/StaticUserWebFilter.java     |  155 --
 .../hadoop/hbase/http/lib/package-info.java     |   38 -
 .../apache/hadoop/hbase/http/log/LogLevel.java  |  175 ---
 .../apache/hadoop/hbase/http/package-info.java  |   27 -
 .../hbase/http/HttpServerFunctionalTest.java    |  272 ----
 .../hadoop/hbase/http/TestGlobalFilter.java     |  151 --
 .../hadoop/hbase/http/TestHtmlQuoting.java      |   94 --
 .../hadoop/hbase/http/TestHttpRequestLog.java   |   52 -
 .../hbase/http/TestHttpRequestLogAppender.java  |   41 -
 .../hadoop/hbase/http/TestHttpServer.java       |  617 --------
 .../hbase/http/TestHttpServerLifecycle.java     |  135 --
 .../hbase/http/TestHttpServerWebapps.java       |   68 -
 .../hadoop/hbase/http/TestPathFilter.java       |  155 --
 .../hadoop/hbase/http/TestSSLHttpServer.java    |  124 --
 .../hadoop/hbase/http/TestServletFilter.java    |  217 ---
 .../hadoop/hbase/http/TestSpnegoHttpServer.java |  258 ----
 .../hadoop/hbase/http/conf/TestConfServlet.java |  116 --
 .../hbase/http/jmx/TestJMXJsonServlet.java      |  134 --
 .../hbase/http/lib/TestStaticUserWebFilter.java |   86 --
 .../hadoop/hbase/http/log/TestLogLevel.java     |   92 --
 .../hbase/http/resource/JerseyResource.java     |   64 -
 .../hadoop/hbase/http/ssl/KeyStoreTestUtil.java |  342 -----
 hbase-http/src/test/resources/log4j.properties  |   68 -
 .../src/test/resources/webapps/static/test.css  |   21 -
 .../src/test/resources/webapps/test/testjsp.jsp |   21 -
 .../apache/hadoop/hbase/rest/RESTServer.java    |    4 +-
 .../hbase/rest/HBaseRESTTestingUtility.java     |    4 +-
 hbase-server/pom.xml                            |   45 +-
 .../hbase/http/AdminAuthorizedServlet.java      |   49 +
 .../http/ClickjackingPreventionFilter.java      |   55 +
 .../hadoop/hbase/http/FilterContainer.java      |   41 +
 .../hadoop/hbase/http/FilterInitializer.java    |   32 +
 .../apache/hadoop/hbase/http/HtmlQuoting.java   |  215 +++
 .../apache/hadoop/hbase/http/HttpConfig.java    |   80 +
 .../hadoop/hbase/http/HttpRequestLog.java       |   93 ++
 .../hbase/http/HttpRequestLogAppender.java      |   63 +
 .../apache/hadoop/hbase/http/HttpServer.java    | 1387 ++++++++++++++++++
 .../apache/hadoop/hbase/http/InfoServer.java    |  112 ++
 .../apache/hadoop/hbase/http/NoCacheFilter.java |   56 +
 .../hbase/http/ServerConfigurationKeys.java     |   47 +
 .../hadoop/hbase/http/conf/ConfServlet.java     |  107 ++
 .../hadoop/hbase/http/jmx/JMXJsonServlet.java   |  240 +++
 .../hadoop/hbase/http/jmx/package-info.java     |   26 +
 .../hbase/http/lib/StaticUserWebFilter.java     |  155 ++
 .../hadoop/hbase/http/lib/package-info.java     |   38 +
 .../apache/hadoop/hbase/http/log/LogLevel.java  |  175 +++
 .../apache/hadoop/hbase/http/package-info.java  |   27 +
 .../regionserver/DumpRegionServerMetrics.java   |   60 -
 .../hbase/regionserver/HRegionServer.java       |    3 +-
 .../hadoop/hbase/util/HttpServerUtil.java       |   52 +
 .../org/apache/hadoop/hbase/util/JSONBean.java  |  387 +++++
 .../hadoop/hbase/util/JSONMetricUtil.java       |  214 +++
 .../apache/hadoop/hbase/GenericTestUtils.java   |   74 +-
 .../hbase/http/HttpServerFunctionalTest.java    |  272 ++++
 .../hadoop/hbase/http/TestGlobalFilter.java     |  151 ++
 .../hadoop/hbase/http/TestHtmlQuoting.java      |   94 ++
 .../hadoop/hbase/http/TestHttpRequestLog.java   |   52 +
 .../hbase/http/TestHttpRequestLogAppender.java  |   41 +
 .../hadoop/hbase/http/TestHttpServer.java       |  617 ++++++++
 .../hbase/http/TestHttpServerLifecycle.java     |  135 ++
 .../hbase/http/TestHttpServerWebapps.java       |   68 +
 .../hadoop/hbase/http/TestPathFilter.java       |  155 ++
 .../hadoop/hbase/http/TestSSLHttpServer.java    |  124 ++
 .../hadoop/hbase/http/TestServletFilter.java    |  210 +++
 .../hadoop/hbase/http/TestSpnegoHttpServer.java |  258 ++++
 .../hadoop/hbase/http/conf/TestConfServlet.java |  116 ++
 .../hbase/http/jmx/TestJMXJsonServlet.java      |  134 ++
 .../hbase/http/lib/TestStaticUserWebFilter.java |   86 ++
 .../hadoop/hbase/http/log/TestLogLevel.java     |   92 ++
 .../hbase/http/resource/JerseyResource.java     |   64 +
 .../hadoop/hbase/http/ssl/KeyStoreTestUtil.java |  342 +++++
 hbase-shaded/hbase-shaded-mapreduce/pom.xml     |    4 -
 pom.xml                                         |   23 -
 94 files changed, 6745 insertions(+), 7402 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-common/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml
index b732bbe..93a09b3 100644
--- a/hbase-common/pom.xml
+++ b/hbase-common/pom.xml
@@ -264,10 +264,6 @@
       <artifactId>findbugs-annotations</artifactId>
     </dependency>
     <dependency>
-      <groupId>com.fasterxml.jackson.core</groupId>
-      <artifactId>jackson-databind</artifactId>
-    </dependency>
-    <dependency>
       <groupId>org.mockito</groupId>
       <artifactId>mockito-core</artifactId>
       <scope>test</scope>

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
deleted file mode 100644
index 0571a08..0000000
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
+++ /dev/null
@@ -1,351 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.util;
-
-import java.io.Closeable;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.lang.management.ManagementFactory;
-import java.lang.reflect.Array;
-import java.util.Iterator;
-import java.util.Set;
-
-import javax.management.AttributeNotFoundException;
-import javax.management.InstanceNotFoundException;
-import javax.management.IntrospectionException;
-import javax.management.MBeanAttributeInfo;
-import javax.management.MBeanException;
-import javax.management.MBeanInfo;
-import javax.management.MBeanServer;
-import javax.management.MalformedObjectNameException;
-import javax.management.ObjectName;
-import javax.management.ReflectionException;
-import javax.management.RuntimeErrorException;
-import javax.management.RuntimeMBeanException;
-import javax.management.openmbean.CompositeData;
-import javax.management.openmbean.CompositeType;
-import javax.management.openmbean.TabularData;
-
-import com.fasterxml.jackson.core.JsonFactory;
-import com.fasterxml.jackson.core.JsonGenerator;
-import com.fasterxml.jackson.core.JsonGenerationException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-/**
- * Utility for doing JSON and MBeans.
- */
-public class JSONBean {
-  private static final Log LOG = LogFactory.getLog(JSONBean.class);
-  private final JsonFactory jsonFactory;
-
-  public JSONBean() {
-    this.jsonFactory = new JsonFactory();
-  }
-
-  /**
-   * Use dumping out mbeans as JSON.
-   */
-  public interface Writer extends Closeable {
-    void write(final String key, final String value) throws JsonGenerationException, IOException;
-    int write(final MBeanServer mBeanServer, ObjectName qry, String attribute,
-        final boolean description) throws IOException;
-    void flush() throws IOException;
-  }
-
-  public Writer open(final PrintWriter writer) throws IOException {
-    final JsonGenerator jg = jsonFactory.createJsonGenerator(writer);
-    jg.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET);
-    jg.useDefaultPrettyPrinter();
-    jg.writeStartObject();
-    return new Writer() {
-      @Override
-      public void flush() throws IOException {
-        jg.flush();
-      }
-
-      @Override
-      public void close() throws IOException {
-        jg.close();
-      }
-
-      @Override
-      public void write(String key, String value) throws JsonGenerationException, IOException {
-        jg.writeStringField(key, value);
-      }
-
-      @Override
-      public int write(MBeanServer mBeanServer, ObjectName qry, String attribute,
-          boolean description)
-      throws IOException {
-        return JSONBean.write(jg, mBeanServer, qry, attribute, description);
-      }
-    };
-  }
-
-  /**
-   * @return Return non-zero if failed to find bean. 0
-   */
-  private static int write(final JsonGenerator jg,
-      final MBeanServer mBeanServer, ObjectName qry, String attribute,
-      final boolean description)
-  throws IOException {
-    LOG.trace("Listing beans for "+qry);
-    Set<ObjectName> names = null;
-    names = mBeanServer.queryNames(qry, null);
-    jg.writeArrayFieldStart("beans");
-    Iterator<ObjectName> it = names.iterator();
-    while (it.hasNext()) {
-      ObjectName oname = it.next();
-      MBeanInfo minfo;
-      String code = "";
-      String descriptionStr = null;
-      Object attributeinfo = null;
-      try {
-        minfo = mBeanServer.getMBeanInfo(oname);
-        code = minfo.getClassName();
-        if (description) descriptionStr = minfo.getDescription();
-        String prs = "";
-        try {
-          if ("org.apache.commons.modeler.BaseModelMBean".equals(code)) {
-            prs = "modelerType";
-            code = (String) mBeanServer.getAttribute(oname, prs);
-          }
-          if (attribute != null) {
-            prs = attribute;
-            attributeinfo = mBeanServer.getAttribute(oname, prs);
-          }
-        } catch (RuntimeMBeanException e) {
-         // UnsupportedOperationExceptions happen in the normal course of business,
-         // so no need to log them as errors all the time.
-         if (e.getCause() instanceof UnsupportedOperationException) {
-           if (LOG.isTraceEnabled()) {
-             LOG.trace("Getting attribute " + prs + " of " + oname + " threw " + e);
-           }
-         } else {
-           LOG.error("Getting attribute " + prs + " of " + oname + " threw an exception", e);
-         }
-         return 0;
-        } catch (AttributeNotFoundException e) {
-          // If the modelerType attribute was not found, the class name is used
-          // instead.
-          LOG.error("getting attribute " + prs + " of " + oname
-              + " threw an exception", e);
-        } catch (MBeanException e) {
-          // The code inside the attribute getter threw an exception so log it,
-          // and fall back on the class name
-          LOG.error("getting attribute " + prs + " of " + oname
-              + " threw an exception", e);
-        } catch (RuntimeException e) {
-          // For some reason even with an MBeanException available to them
-          // Runtime exceptionscan still find their way through, so treat them
-          // the same as MBeanException
-          LOG.error("getting attribute " + prs + " of " + oname
-              + " threw an exception", e);
-        } catch (ReflectionException e) {
-          // This happens when the code inside the JMX bean (setter?? from the
-          // java docs) threw an exception, so log it and fall back on the
-          // class name
-          LOG.error("getting attribute " + prs + " of " + oname
-              + " threw an exception", e);
-        }
-      } catch (InstanceNotFoundException e) {
-        //Ignored for some reason the bean was not found so don't output it
-        continue;
-      } catch (IntrospectionException e) {
-        // This is an internal error, something odd happened with reflection so
-        // log it and don't output the bean.
-        LOG.error("Problem while trying to process JMX query: " + qry
-            + " with MBean " + oname, e);
-        continue;
-      } catch (ReflectionException e) {
-        // This happens when the code inside the JMX bean threw an exception, so
-        // log it and don't output the bean.
-        LOG.error("Problem while trying to process JMX query: " + qry
-            + " with MBean " + oname, e);
-        continue;
-      }
-
-      jg.writeStartObject();
-      jg.writeStringField("name", oname.toString());
-      if (description && descriptionStr != null && descriptionStr.length() > 0) {
-        jg.writeStringField("description", descriptionStr);
-      }
-      jg.writeStringField("modelerType", code);
-      if (attribute != null && attributeinfo == null) {
-        jg.writeStringField("result", "ERROR");
-        jg.writeStringField("message", "No attribute with name " + attribute + " was found.");
-        jg.writeEndObject();
-        jg.writeEndArray();
-        jg.close();
-        return -1;
-      }
-
-      if (attribute != null) {
-        writeAttribute(jg, attribute, descriptionStr, attributeinfo);
-      } else {
-        MBeanAttributeInfo[] attrs = minfo.getAttributes();
-        for (int i = 0; i < attrs.length; i++) {
-          writeAttribute(jg, mBeanServer, oname, description, attrs[i]);
-        }
-      }
-      jg.writeEndObject();
-    }
-    jg.writeEndArray();
-    return 0;
-  }
-
-  private static void writeAttribute(final JsonGenerator jg,
-      final MBeanServer mBeanServer, ObjectName oname,
-      final boolean description, final MBeanAttributeInfo attr)
-  throws IOException {
-    if (!attr.isReadable()) {
-      return;
-    }
-    String attName = attr.getName();
-    if ("modelerType".equals(attName)) {
-      return;
-    }
-    if (attName.indexOf("=") >= 0 || attName.indexOf(":") >= 0 || attName.indexOf(" ") >= 0) {
-      return;
-    }
-    String descriptionStr = description? attr.getDescription(): null;
-    Object value = null;
-    try {
-      value = mBeanServer.getAttribute(oname, attName);
-    } catch (RuntimeMBeanException e) {
-      // UnsupportedOperationExceptions happen in the normal course of business,
-      // so no need to log them as errors all the time.
-      if (e.getCause() instanceof UnsupportedOperationException) {
-        if (LOG.isTraceEnabled()) {
-          LOG.trace("Getting attribute " + attName + " of " + oname + " threw " + e);
-        }
-      } else {
-        LOG.error("getting attribute "+attName+" of "+oname+" threw an exception", e);
-      }
-      return;
-    } catch (RuntimeErrorException e) {
-      // RuntimeErrorException happens when an unexpected failure occurs in getAttribute
-      // for example https://issues.apache.org/jira/browse/DAEMON-120
-      LOG.debug("getting attribute "+attName+" of "+oname+" threw an exception", e);
-      return;
-    } catch (AttributeNotFoundException e) {
-      //Ignored the attribute was not found, which should never happen because the bean
-      //just told us that it has this attribute, but if this happens just don't output
-      //the attribute.
-      return;
-    } catch (MBeanException e) {
-      //The code inside the attribute getter threw an exception so log it, and
-      // skip outputting the attribute
-      LOG.error("getting attribute "+attName+" of "+oname+" threw an exception", e);
-      return;
-    } catch (RuntimeException e) {
-      //For some reason even with an MBeanException available to them Runtime exceptions
-      //can still find their way through, so treat them the same as MBeanException
-      LOG.error("getting attribute "+attName+" of "+oname+" threw an exception", e);
-      return;
-    } catch (ReflectionException e) {
-      //This happens when the code inside the JMX bean (setter?? from the java docs)
-      //threw an exception, so log it and skip outputting the attribute
-      LOG.error("getting attribute "+attName+" of "+oname+" threw an exception", e);
-      return;
-    } catch (InstanceNotFoundException e) {
-      //Ignored the mbean itself was not found, which should never happen because we
-      //just accessed it (perhaps something unregistered in-between) but if this
-      //happens just don't output the attribute.
-      return;
-    }
-
-    writeAttribute(jg, attName, descriptionStr, value);
-  }
-
-  private static void writeAttribute(JsonGenerator jg, String attName, final String descriptionStr,
-      Object value)
-  throws IOException {
-    boolean description = false;
-    if (descriptionStr != null && descriptionStr.length() > 0 && !attName.equals(descriptionStr)) {
-      description = true;
-      jg.writeFieldName(attName);
-      jg.writeStartObject();
-      jg.writeFieldName("description");
-      jg.writeString(descriptionStr);
-      jg.writeFieldName("value");
-      writeObject(jg, description, value);
-      jg.writeEndObject();
-    } else {
-      jg.writeFieldName(attName);
-      writeObject(jg, description, value);
-    }
-  }
-
-  private static void writeObject(final JsonGenerator jg, final boolean description, Object value)
-  throws IOException {
-    if(value == null) {
-      jg.writeNull();
-    } else {
-      Class<?> c = value.getClass();
-      if (c.isArray()) {
-        jg.writeStartArray();
-        int len = Array.getLength(value);
-        for (int j = 0; j < len; j++) {
-          Object item = Array.get(value, j);
-          writeObject(jg, description, item);
-        }
-        jg.writeEndArray();
-      } else if(value instanceof Number) {
-        Number n = (Number)value;
-        jg.writeNumber(n.toString());
-      } else if(value instanceof Boolean) {
-        Boolean b = (Boolean)value;
-        jg.writeBoolean(b);
-      } else if(value instanceof CompositeData) {
-        CompositeData cds = (CompositeData)value;
-        CompositeType comp = cds.getCompositeType();
-        Set<String> keys = comp.keySet();
-        jg.writeStartObject();
-        for (String key: keys) {
-          writeAttribute(jg, key, null, cds.get(key));
-        }
-        jg.writeEndObject();
-      } else if(value instanceof TabularData) {
-        TabularData tds = (TabularData)value;
-        jg.writeStartArray();
-        for(Object entry : tds.values()) {
-          writeObject(jg, description, entry);
-        }
-        jg.writeEndArray();
-      } else {
-        jg.writeString(value.toString());
-      }
-    }
-  }
-
-  /**
-   * Dump out all registered mbeans as json on System.out.
-   * @throws IOException
-   * @throws MalformedObjectNameException
-   */
-  public static void dumpAllBeans() throws IOException, MalformedObjectNameException {
-    try (PrintWriter writer = new PrintWriter(System.out)) {
-      JSONBean dumper = new JSONBean();
-      try (JSONBean.Writer jsonBeanWriter = dumper.open(writer)) {
-        MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer();
-        jsonBeanWriter.write(mbeanServer, new ObjectName("*:*"), null, false);
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
deleted file mode 100644
index d10610e..0000000
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
+++ /dev/null
@@ -1,214 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * */
-package org.apache.hadoop.hbase.util;
-
-import java.beans.IntrospectionException;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.lang.management.GarbageCollectorMXBean;
-import java.lang.management.ManagementFactory;
-import java.lang.management.MemoryPoolMXBean;
-import java.lang.management.RuntimeMXBean;
-import java.util.Hashtable;
-import java.util.List;
-import java.util.Set;
-
-import javax.management.InstanceNotFoundException;
-import javax.management.MBeanAttributeInfo;
-import javax.management.MBeanInfo;
-import javax.management.MBeanServer;
-import javax.management.MalformedObjectNameException;
-import javax.management.ObjectName;
-import javax.management.ReflectionException;
-import javax.management.openmbean.CompositeData;
-
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.codehaus.jettison.json.JSONException;
-
-public final class JSONMetricUtil {
-
-  private static final Log LOG = LogFactory.getLog(JSONMetricUtil.class);
-
-  private static MBeanServer mbServer = ManagementFactory.getPlatformMBeanServer();
-  //MBeans ObjectName domain names
-  public static final String JAVA_LANG_DOMAIN = "java.lang";
-  public static final String JAVA_NIO_DOMAIN = "java.nio";
-  public static final String SUN_MGMT_DOMAIN = "com.sun.management";
-  public static final String HADOOP_DOMAIN = "Hadoop";
-
-  //MBeans ObjectName properties key names
-  public static final String TYPE_KEY = "type";
-  public static final String NAME_KEY = "name";
-  public static final String SERVICE_KEY = "service";
-  public static final String SUBSYSTEM_KEY = "sub";
-
-/**
- * Utility for getting metric values. Collection of static methods intended for
- * easier access to metric values.
- */
-  private JSONMetricUtil() {
-    // Not to be called
-  }
-
-  public static MBeanAttributeInfo[] getMBeanAttributeInfo(ObjectName bean)
-      throws IntrospectionException, InstanceNotFoundException, ReflectionException,
-      IntrospectionException, javax.management.IntrospectionException {
-    MBeanInfo mbinfo = mbServer.getMBeanInfo(bean);
-    return mbinfo.getAttributes();
-  }
-
-  public static Object getValueFromMBean(ObjectName bean, String attribute) {
-    Object value = null;
-    try {
-      value = mbServer.getAttribute(bean, attribute);
-    }
-    catch(Exception e) {
-      LOG.error("Unable to get value from MBean= "+ bean.toString() +
-        "for attribute=" + attribute + " " + e.getMessage());
-    }
-    return value;
-  }
-
-  /**
-   * Returns a subset of mbeans defined by qry.
-   * Modeled after DumpRegionServerMetrics#dumpMetrics.
-   * Example: String qry= "java.lang:type=Memory"
-   * @throws MalformedObjectNameException if json have bad format
-   * @throws IOException /
-   * @return String representation of json array.
-   */
-  public static String dumpBeanToString(String qry) throws MalformedObjectNameException,
-  IOException {
-    StringWriter sw = new StringWriter(1024 * 100); // Guess this size
-    try (PrintWriter writer = new PrintWriter(sw)) {
-      JSONBean dumper = new JSONBean();
-      try (JSONBean.Writer jsonBeanWriter = dumper.open(writer)) {
-        MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer();
-        jsonBeanWriter.write(mbeanServer,
-          new ObjectName(qry), null, false);
-      }
-    }
-    sw.close();
-    return sw.toString();
-  }
-
-  public static JsonNode mappStringToJsonNode(String jsonString)
-      throws JsonProcessingException, IOException {
-    ObjectMapper mapper = new ObjectMapper();
-    JsonNode node = mapper.readTree(jsonString);
-    return node;
-  }
-
-
-  public static JsonNode searchJson(JsonNode tree, String searchKey)
-      throws JsonProcessingException, IOException {
-    if (tree == null) {
-      return null;
-    }
-    if(tree.has(searchKey)) {
-      return tree.get(searchKey);
-    }
-    if(tree.isContainerNode()) {
-      for(JsonNode branch: tree) {
-        JsonNode branchResult = searchJson(branch, searchKey);
-        if (branchResult != null && !branchResult.isMissingNode()) {
-          return branchResult;
-        }
-      }
-    }
-    return null;
-  }
-
-  /**
-   * Method for building hashtable used for constructing ObjectName.
-   * Mapping is done with arrays indices
-   * @param keys Hashtable keys
-   * @param values Hashtable values
-   * @return Hashtable or null if arrays are empty * or have different number of elements
-   */
-  public static Hashtable<String, String> buldKeyValueTable(String[] keys, String[] values) {
-    if (keys.length != values.length) {
-      LOG.error("keys and values arrays must be same size");
-      return null;
-    }
-    if (keys.length == 0 || values.length == 0) {
-      LOG.error("keys and values arrays can not be empty;");
-      return null;
-    }
-    Hashtable<String, String> table = new Hashtable<String, String>();
-    for(int i = 0; i < keys.length; i++) {
-      table.put(keys[i], values[i]);
-    }
-    return table;
-  }
-
-  public static ObjectName buildObjectName(String pattern) throws MalformedObjectNameException {
-    return new ObjectName(pattern);
-  }
-
-  public static ObjectName buildObjectName(String domain, Hashtable<String, String> keyValueTable)
-      throws MalformedObjectNameException {
-    return new ObjectName(domain, keyValueTable);
-  }
-
-  public static Set<ObjectName> getRegistredMBeans(ObjectName name, MBeanServer mbs) {
-    return mbs.queryNames(name, null);
-  }
-
-  public static String getProcessPID() {
-    return ManagementFactory.getRuntimeMXBean().getName().split("@")[0];
-  }
-
-  public static String getCommmand() throws MalformedObjectNameException,
-  IOException, JSONException {
-    RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean();
-    return runtimeBean.getSystemProperties().get("sun.java.command");
-  }
-
-  public static List<GarbageCollectorMXBean> getGcCollectorBeans() {
-    List<GarbageCollectorMXBean> gcBeans = ManagementFactory.getGarbageCollectorMXBeans();
-    return gcBeans;
-  }
-
-  public static long getLastGcDuration(ObjectName gcCollector) {
-    long lastGcDuration = 0;
-    Object lastGcInfo = getValueFromMBean(gcCollector, "LastGcInfo");
-    if (lastGcInfo != null && lastGcInfo instanceof CompositeData) {
-      CompositeData cds = (CompositeData)lastGcInfo;
-      lastGcDuration = (long) cds.get("duration");
-    }
-    return lastGcDuration;
-  }
-
-  public static List<MemoryPoolMXBean> getMemoryPools() {
-    List<MemoryPoolMXBean> mPools = ManagementFactory.getMemoryPoolMXBeans();
-    return mPools;
-  }
-
-  public static float calcPercentage(long a, long b) {
-    if (a == 0 || b == 0) {
-      return 0;
-    }
-    return ((float)a / (float)b) *100;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-endpoint/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml
index 101bfdb..2686702 100644
--- a/hbase-endpoint/pom.xml
+++ b/hbase-endpoint/pom.xml
@@ -174,12 +174,6 @@
       <type>test-jar</type>
       <scope>test</scope>
     </dependency>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-http</artifactId>
-      <type>test-jar</type>
-      <scope>test</scope>
-    </dependency>
     <!-- The coprocessor.Export needs mapreduce.Import and mapreduce.Export to run the unit tests -->
     <!-- see org.apache.hadoop.hbase.coprocessor.TestImportExport -->
     <dependency>

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-http/pom.xml b/hbase-http/pom.xml
deleted file mode 100644
index 9bd6fcd..0000000
--- a/hbase-http/pom.xml
+++ /dev/null
@@ -1,515 +0,0 @@
-<?xml version="1.0"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-  <!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-  <modelVersion>4.0.0</modelVersion>
-  <parent>
-    <artifactId>hbase-build-configuration</artifactId>
-    <groupId>org.apache.hbase</groupId>
-    <version>3.0.0-SNAPSHOT</version>
-    <relativePath>../hbase-build-configuration</relativePath>
-  </parent>
-  <artifactId>hbase-http</artifactId>
-  <name>Apache HBase - HTTP</name>
-  <description>HTTP functionality for HBase Servers</description>
-
-  <build>
-    <!-- Makes sure the resources get added before they are processed
-      by placing this first -->
-    <testResources>
-      <!-- Our test artifact has different license info than our source/bin ones -->
-      <testResource>
-        <directory>src/test/resources/META-INF/</directory>
-        <targetPath>META-INF/</targetPath>
-        <includes>
-          <include>NOTICE</include>
-        </includes>
-        <filtering>true</filtering>
-      </testResource>
-      <testResource>
-        <directory>src/test/resources</directory>
-        <includes>
-          <include>**/**</include>
-        </includes>
-      </testResource>
-    </testResources>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-site-plugin</artifactId>
-        <configuration>
-          <skip>true</skip>
-        </configuration>
-      </plugin>
-      <!-- licensing info from our bundled works -->
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-remote-resources-plugin</artifactId>
-        <version>1.5</version>
-        <executions>
-          <execution>
-            <id>default</id>
-            <configuration>
-              <attachToTest>false</attachToTest>
-              <properties>
-                <copyright-end-year>${build.year}</copyright-end-year>
-                <debug-print-included-work-info>${license.debug.print.included}</debug-print-included-work-info>
-                <bundled-dependencies>${license.bundles.dependencies}</bundled-dependencies>
-                <bundled-jquery>${license.bundles.jquery}</bundled-jquery>
-                <bundled-logo>${license.bundles.logo}</bundled-logo>
-                <bundled-bootstrap>${license.bundles.bootstrap}</bundled-bootstrap>
-              </properties>
-              <resourceBundles>
-                <resourceBundle>${project.groupId}:hbase-resource-bundle:${project.version}</resourceBundle>
-              </resourceBundles>
-              <supplementalModelArtifacts>
-                <supplementalModelArtifact>${project.groupId}:hbase-resource-bundle:${project.version}</supplementalModelArtifact>
-              </supplementalModelArtifacts>
-              <supplementalModels>
-                <supplementalModel>supplemental-models.xml</supplementalModel>
-              </supplementalModels>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <!-- Run with -Dmaven.test.skip.exec=true to build -tests.jar without running
-        tests (this is needed for upstream projects whose tests need this jar simply for
-        compilation) -->
-      <plugin>
-        <!--Make it so assembly:single does nothing in here-->
-        <artifactId>maven-assembly-plugin</artifactId>
-        <configuration>
-          <skipAssembly>true</skipAssembly>
-        </configuration>
-      </plugin>
-      <!-- Make a jar and put the sources in the jar -->
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-source-plugin</artifactId>
-        <executions>
-          <execution>
-            <phase>package</phase>
-            <goals>
-              <goal>jar</goal>
-              <goal>test-jar</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <!-- Run findbugs -->
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>findbugs-maven-plugin</artifactId>
-      </plugin>
-      <!-- Testing plugins -->
-      <plugin>
-        <artifactId>maven-surefire-plugin</artifactId>
-        <configuration>
-          <properties>
-            <property>
-              <name>listener</name>
-              <value>org.apache.hadoop.hbase.ResourceCheckerJUnitListener</value>
-            </property>
-          </properties>
-          <systemPropertyVariables>
-            <test.build.webapps>target/test-classes/webapps</test.build.webapps>
-          </systemPropertyVariables>
-        </configuration>
-      </plugin>
-    </plugins>
-    <!-- General Resources -->
-    <pluginManagement>
-       <plugins>
-         <!--This plugin's configuration is used to store Eclipse m2e settings
-             only. It has no influence on the Maven build itself and needs to
-             be kept in plugin management, not in the actual plugins. -->
-        <plugin>
-          <groupId>org.eclipse.m2e</groupId>
-          <artifactId>lifecycle-mapping</artifactId>
-          <configuration>
-            <lifecycleMappingMetadata>
-              <pluginExecutions>
-                <pluginExecution>
-                  <pluginExecutionFilter>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-antrun-plugin</artifactId>
-                    <versionRange>[1.6,)</versionRange>
-                    <goals>
-                      <goal>run</goal>
-                    </goals>
-                  </pluginExecutionFilter>
-                  <action>
-                    <execute>
-                      <runOnIncremental>false</runOnIncremental>
-                      <runOnConfiguration>true</runOnConfiguration>
-                    </execute>
-                  </action>
-                </pluginExecution>
-                <pluginExecution>
-                  <pluginExecutionFilter>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-dependency-plugin</artifactId>
-                    <versionRange>[2.8,)</versionRange>
-                    <goals>
-                      <goal>build-classpath</goal>
-                    </goals>
-                  </pluginExecutionFilter>
-                  <action>
-                    <ignore></ignore>
-                  </action>
-                </pluginExecution>
-                <pluginExecution>
-                  <pluginExecutionFilter>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-compiler-plugin</artifactId>
-                    <versionRange>[3.2,)</versionRange>
-                    <goals>
-                      <goal>compile</goal>
-                    </goals>
-                  </pluginExecutionFilter>
-                  <action>
-                    <ignore></ignore>
-                  </action>
-                </pluginExecution>
-              </pluginExecutions>
-            </lifecycleMappingMetadata>
-          </configuration>
-        </plugin>
-      </plugins>
-    </pluginManagement>
-  </build>
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.hbase.thirdparty</groupId>
-      <artifactId>hbase-shaded-miscellaneous</artifactId>
-    </dependency>
-    <!-- Intra-project dependencies -->
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-common</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-common</artifactId>
-      <type>test-jar</type>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-annotations</artifactId>
-      <type>test-jar</type>
-      <scope>test</scope>
-    </dependency>
-    <!-- resource bundle only needed at build time -->
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-resource-bundle</artifactId>
-      <version>${project.version}</version>
-      <optional>true</optional>
-    </dependency>
-    <dependency>
-      <groupId>org.eclipse.jetty</groupId>
-      <artifactId>jetty-server</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.eclipse.jetty</groupId>
-      <artifactId>jetty-servlet</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.eclipse.jetty</groupId>
-      <artifactId>jetty-util</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.eclipse.jetty</groupId>
-      <artifactId>jetty-util-ajax</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.eclipse.jetty</groupId>
-      <artifactId>jetty-webapp</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.eclipse.jetty</groupId>
-      <artifactId>jetty-http</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.eclipse.jetty</groupId>
-      <artifactId>jetty-security</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.glassfish.jersey.core</groupId>
-      <artifactId>jersey-server</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.glassfish.jersey.containers</groupId>
-      <artifactId>jersey-container-servlet-core</artifactId>
-    </dependency>
-    <!-- General dependencies -->
-    <dependency>
-      <groupId>commons-logging</groupId>
-      <artifactId>commons-logging</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>javax.servlet</groupId>
-      <artifactId>javax.servlet-api</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>javax.ws.rs</groupId>
-      <artifactId>javax.ws.rs-api</artifactId>
-    </dependency>
-
-    <!-- Test dependencies -->
-    <dependency>
-      <groupId>org.apache.kerby</groupId>
-      <artifactId>kerb-simplekdc</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.bouncycastle</groupId>
-      <artifactId>bcprov-jdk16</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.kerby</groupId>
-      <artifactId>kerb-core</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.httpcomponents</groupId>
-      <artifactId>httpclient</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.httpcomponents</groupId>
-      <artifactId>httpcore</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.mockito</groupId>
-      <artifactId>mockito-core</artifactId>
-      <scope>test</scope>
-    </dependency>
-  </dependencies>
-  <profiles>
-    <!-- Needs to make the profile in apache parent pom -->
-    <profile>
-      <id>apache-release</id>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-resources-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>license-javadocs</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>copy-resources</goal>
-                </goals>
-                <configuration>
-                  <outputDirectory>${project.build.directory}/apidocs</outputDirectory>
-                  <resources>
-                    <resource>
-                      <directory>src/main/javadoc/META-INF/</directory>
-                      <targetPath>META-INF/</targetPath>
-                      <includes>
-                        <include>LICENSE</include>
-                        <include>NOTICE</include>
-                      </includes>
-                      <filtering>true</filtering>
-                    </resource>
-                  </resources>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-    <!-- Skip the tests in this module -->
-    <profile>
-      <id>skipServerTests</id>
-      <activation>
-        <property>
-          <name>skipServerTests</name>
-        </property>
-      </activation>
-      <properties>
-        <surefire.skipFirstPart>true</surefire.skipFirstPart>
-        <surefire.skipSecondPart>true</surefire.skipSecondPart>
-      </properties>
-    </profile>
-    <!-- Special builds -->
-    <profile>
-      <id>native</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>make</id>
-                <phase>compile</phase>
-                <goals><goal>run</goal></goals>
-                <configuration>
-                  <target>
-                    <mkdir dir="${project.build.directory}/native"/>
-                    <exec executable="cmake" dir="${project.build.directory}/native" failonerror="true">
-                      <arg line="${basedir}/src/main/native -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}"/>
-                    </exec>
-                    <exec executable="make" dir="${project.build.directory}/native" failonerror="true">
-                      <arg line="VERBOSE=1"/>
-                    </exec>
-                  </target>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-    <!-- Profiles for building against different hadoop versions -->
-    <!-- There are a lot of common dependencies used here, should investigate
-    if we can combine these profiles somehow -->
-
-    <!-- profile for building against Hadoop 2.x.  This is the default.  -->
-    <profile>
-      <id>hadoop-2.0</id>
-      <activation>
-        <property>
-            <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
-            <!--h2--><name>!hadoop.profile</name>
-        </property>
-      </activation>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-minicluster</artifactId>
-          <exclusions>
-            <exclusion>
-              <groupId>com.google.guava</groupId>
-              <artifactId>guava</artifactId>
-            </exclusion>
-          </exclusions>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-auth</artifactId>
-        </dependency>
-      </dependencies>
-      <build>
-        <plugins>
-          <plugin>
-            <artifactId>maven-dependency-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>create-mrapp-generated-classpath</id>
-                <phase>generate-test-resources</phase>
-                <goals>
-                  <goal>build-classpath</goal>
-                </goals>
-                <configuration>
-                  <!-- needed to run the unit test for DS to generate
-                  the required classpath that is required in the env
-                  of the launch container in the mini mr/yarn cluster
-                  -->
-                  <outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-    <!--
-      profile for building against Hadoop 3.0.x. Activate using:
-       mvn -Dhadoop.profile=3.0
-    -->
-    <profile>
-      <id>hadoop-3.0</id>
-      <activation>
-        <property>
-          <name>hadoop.profile</name>
-          <value>3.0</value>
-        </property>
-      </activation>
-      <properties>
-        <hadoop.version>${hadoop-three.version}</hadoop.version>
-      </properties>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-minicluster</artifactId>
-          <exclusions>
-            <exclusion>
-              <groupId>com.google.guava</groupId>
-              <artifactId>guava</artifactId>
-            </exclusion>
-          </exclusions>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-      <build>
-        <plugins>
-          <plugin>
-            <artifactId>maven-dependency-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>create-mrapp-generated-classpath</id>
-                <phase>generate-test-resources</phase>
-                <goals>
-                  <goal>build-classpath</goal>
-                </goals>
-                <configuration>
-                  <!-- needed to run the unit test for DS to generate
-                  the required classpath that is required in the env
-                  of the launch container in the mini mr/yarn cluster
-                  -->
-                  <outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-  </profiles>
-</project>

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedServlet.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedServlet.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedServlet.java
deleted file mode 100644
index bd8570e..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedServlet.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.IOException;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
-import org.eclipse.jetty.servlet.DefaultServlet;
-
-/**
- * General servlet which is admin-authorized.
- */
-@InterfaceAudience.Private
-@InterfaceStability.Evolving
-public class AdminAuthorizedServlet extends DefaultServlet {
-
-  private static final long serialVersionUID = 1L;
-
-  @Override
-  protected void doGet(HttpServletRequest request, HttpServletResponse response)
-    throws ServletException, IOException {
-    // Do the authorization
-    if (HttpServer.hasAdministratorAccess(getServletContext(), request,
-        response)) {
-      // Authorization is done. Just call super.
-      super.doGet(request, response);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ClickjackingPreventionFilter.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ClickjackingPreventionFilter.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ClickjackingPreventionFilter.java
deleted file mode 100644
index 9944d29..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ClickjackingPreventionFilter.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletResponse;
-import java.io.IOException;
-
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-
-@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
-public class ClickjackingPreventionFilter implements Filter {
-
-    private FilterConfig filterConfig;
-
-    @Override
-    public void init(FilterConfig filterConfig) throws ServletException {
-        this.filterConfig = filterConfig;
-    }
-
-    @Override
-    public void doFilter(ServletRequest req, ServletResponse res,
-                         FilterChain chain)
-            throws IOException, ServletException {
-        HttpServletResponse httpRes = (HttpServletResponse) res;
-        httpRes.addHeader("X-Frame-Options", filterConfig.getInitParameter("xframeoptions"));
-        chain.doFilter(req, res);
-    }
-
-    @Override
-    public void destroy() {
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/FilterContainer.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/FilterContainer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/FilterContainer.java
deleted file mode 100644
index 7a79acc..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/FilterContainer.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.util.Map;
-
-/**
- * A container interface to add javax.servlet.Filter.
- */
-public interface FilterContainer {
-  /**
-   * Add a filter to the container.
-   * @param name Filter name
-   * @param classname Filter class name
-   * @param parameters a map from parameter names to initial values
-   */
-  void addFilter(String name, String classname, Map<String, String> parameters);
-  /**
-   * Add a global filter to the container - This global filter will be
-   * applied to all available web contexts.
-   * @param name filter name
-   * @param classname filter class name
-   * @param parameters a map from parameter names to initial values
-   */
-  void addGlobalFilter(String name, String classname, Map<String, String> parameters);
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/FilterInitializer.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/FilterInitializer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/FilterInitializer.java
deleted file mode 100644
index d317343..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/FilterInitializer.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import org.apache.hadoop.conf.Configuration;
-
-/**
- * Initialize a javax.servlet.Filter.
- */
-public abstract class FilterInitializer {
-  /**
-   * Initialize a Filter to a FilterContainer.
-   * @param container The filter container
-   * @param conf Configuration for run-time parameters
-   */
-  public abstract void initFilter(FilterContainer container, Configuration conf);
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HtmlQuoting.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HtmlQuoting.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HtmlQuoting.java
deleted file mode 100644
index 7f4bb83..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HtmlQuoting.java
+++ /dev/null
@@ -1,215 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-
-/**
- * This class is responsible for quoting HTML characters.
- */
-public class HtmlQuoting {
-  private static final byte[] ampBytes = "&amp;".getBytes();
-  private static final byte[] aposBytes = "&apos;".getBytes();
-  private static final byte[] gtBytes = "&gt;".getBytes();
-  private static final byte[] ltBytes = "&lt;".getBytes();
-  private static final byte[] quotBytes = "&quot;".getBytes();
-
-  /**
-   * Does the given string need to be quoted?
-   * @param data the string to check
-   * @param off the starting position
-   * @param len the number of bytes to check
-   * @return does the string contain any of the active html characters?
-   */
-  public static boolean needsQuoting(byte[] data, int off, int len) {
-    if (off+len > data.length) {
-        throw new IllegalStateException("off+len=" + off+len + " should be lower"
-                + " than data length=" + data.length);
-    }
-    for(int i=off; i< off+len; ++i) {
-      switch(data[i]) {
-      case '&':
-      case '<':
-      case '>':
-      case '\'':
-      case '"':
-        return true;
-      default:
-        break;
-      }
-    }
-    return false;
-  }
-
-  /**
-   * Does the given string need to be quoted?
-   * @param str the string to check
-   * @return does the string contain any of the active html characters?
-   */
-  public static boolean needsQuoting(String str) {
-    if (str == null) {
-      return false;
-    }
-    byte[] bytes = str.getBytes();
-    return needsQuoting(bytes, 0 , bytes.length);
-  }
-
-  /**
-   * Quote all of the active HTML characters in the given string as they
-   * are added to the buffer.
-   * @param output the stream to write the output to
-   * @param buffer the byte array to take the characters from
-   * @param off the index of the first byte to quote
-   * @param len the number of bytes to quote
-   */
-  public static void quoteHtmlChars(OutputStream output, byte[] buffer,
-                                    int off, int len) throws IOException {
-    for(int i=off; i < off+len; i++) {
-      switch (buffer[i]) {
-      case '&': output.write(ampBytes); break;
-      case '<': output.write(ltBytes); break;
-      case '>': output.write(gtBytes); break;
-      case '\'': output.write(aposBytes); break;
-      case '"': output.write(quotBytes); break;
-      default: output.write(buffer, i, 1);
-      }
-    }
-  }
-
-  /**
-   * Quote the given item to make it html-safe.
-   * @param item the string to quote
-   * @return the quoted string
-   */
-  public static String quoteHtmlChars(String item) {
-    if (item == null) {
-      return null;
-    }
-    byte[] bytes = item.getBytes();
-    if (needsQuoting(bytes, 0, bytes.length)) {
-      ByteArrayOutputStream buffer = new ByteArrayOutputStream();
-      try {
-        quoteHtmlChars(buffer, bytes, 0, bytes.length);
-      } catch (IOException ioe) {
-        // Won't happen, since it is a bytearrayoutputstream
-      }
-      return buffer.toString();
-    } else {
-      return item;
-    }
-  }
-
-  /**
-   * Return an output stream that quotes all of the output.
-   * @param out the stream to write the quoted output to
-   * @return a new stream that the application show write to
-   * @throws IOException if the underlying output fails
-   */
-  public static OutputStream quoteOutputStream(final OutputStream out
-                                               ) throws IOException {
-    return new OutputStream() {
-      private byte[] data = new byte[1];
-      @Override
-      public void write(byte[] data, int off, int len) throws IOException {
-        quoteHtmlChars(out, data, off, len);
-      }
-
-      @Override
-      public void write(int b) throws IOException {
-        data[0] = (byte) b;
-        quoteHtmlChars(out, data, 0, 1);
-      }
-
-      @Override
-      public void flush() throws IOException {
-        out.flush();
-      }
-
-      @Override
-      public void close() throws IOException {
-        out.close();
-      }
-    };
-  }
-
-  /**
-   * Remove HTML quoting from a string.
-   * @param item the string to unquote
-   * @return the unquoted string
-   */
-  public static String unquoteHtmlChars(String item) {
-    if (item == null) {
-      return null;
-    }
-    int next = item.indexOf('&');
-    // nothing was quoted
-    if (next == -1) {
-      return item;
-    }
-    int len = item.length();
-    int posn = 0;
-    StringBuilder buffer = new StringBuilder();
-    while (next != -1) {
-      buffer.append(item.substring(posn, next));
-      if (item.startsWith("&amp;", next)) {
-        buffer.append('&');
-        next += 5;
-      } else if (item.startsWith("&apos;", next)) {
-        buffer.append('\'');
-        next += 6;
-      } else if (item.startsWith("&gt;", next)) {
-        buffer.append('>');
-        next += 4;
-      } else if (item.startsWith("&lt;", next)) {
-        buffer.append('<');
-        next += 4;
-      } else if (item.startsWith("&quot;", next)) {
-        buffer.append('"');
-        next += 6;
-      } else {
-        int end = item.indexOf(';', next)+1;
-        if (end == 0) {
-          end = len;
-        }
-        throw new IllegalArgumentException("Bad HTML quoting for " +
-                                           item.substring(next,end));
-      }
-      posn = next;
-      next = item.indexOf('&', posn);
-    }
-    buffer.append(item.substring(posn, len));
-    return buffer.toString();
-  }
-
-  public static void main(String[] args) throws Exception {
-    if (args.length == 0) {
-        throw new IllegalArgumentException("Please provide some arguments");
-    }
-    for(String arg:args) {
-      System.out.println("Original: " + arg);
-      String quoted = quoteHtmlChars(arg);
-      System.out.println("Quoted: "+ quoted);
-      String unquoted = unquoteHtmlChars(quoted);
-      System.out.println("Unquoted: " + unquoted);
-      System.out.println();
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpConfig.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpConfig.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpConfig.java
deleted file mode 100644
index b9dde23..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpConfig.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
-
-/**
- * Statics to get access to Http related configuration.
- */
-@InterfaceAudience.Private
-@InterfaceStability.Unstable
-public class HttpConfig {
-  private Policy policy;
-  public enum Policy {
-    HTTP_ONLY,
-    HTTPS_ONLY,
-    HTTP_AND_HTTPS;
-
-    public Policy fromString(String value) {
-      if (HTTPS_ONLY.name().equalsIgnoreCase(value)) {
-        return HTTPS_ONLY;
-      } else if (HTTP_AND_HTTPS.name().equalsIgnoreCase(value)) {
-        return HTTP_AND_HTTPS;
-      }
-      return HTTP_ONLY;
-    }
-
-    public boolean isHttpEnabled() {
-      return this == HTTP_ONLY || this == HTTP_AND_HTTPS;
-    }
-
-    public boolean isHttpsEnabled() {
-      return this == HTTPS_ONLY || this == HTTP_AND_HTTPS;
-    }
-  }
-
-   public HttpConfig(final Configuration conf) {
-    boolean sslEnabled = conf.getBoolean(
-      ServerConfigurationKeys.HBASE_SSL_ENABLED_KEY,
-      ServerConfigurationKeys.HBASE_SSL_ENABLED_DEFAULT);
-    policy = sslEnabled ? Policy.HTTPS_ONLY : Policy.HTTP_ONLY;
-    if (sslEnabled) {
-      conf.addResource("ssl-server.xml");
-      conf.addResource("ssl-client.xml");
-    }
-  }
-
-  public void setPolicy(Policy policy) {
-    this.policy = policy;
-  }
-
-  public boolean isSecure() {
-    return policy == Policy.HTTPS_ONLY;
-  }
-
-  public String getSchemePrefix() {
-    return (isSecure()) ? "https://" : "http://";
-  }
-
-  public String getScheme(Policy policy) {
-    return policy == Policy.HTTPS_ONLY ? "https://" : "http://";
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java
deleted file mode 100644
index cfc0640..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.util.HashMap;
-
-import org.apache.commons.logging.impl.Log4JLogger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogConfigurationException;
-import org.apache.commons.logging.LogFactory;
-import org.apache.log4j.Appender;
-import org.apache.log4j.Logger;
-
-import org.eclipse.jetty.server.RequestLog;
-import org.eclipse.jetty.server.NCSARequestLog;
-
-/**
- * RequestLog object for use with Http
- */
-public class HttpRequestLog {
-
-  private static final Log LOG = LogFactory.getLog(HttpRequestLog.class);
-  private static final HashMap<String, String> serverToComponent;
-
-  static {
-    serverToComponent = new HashMap<>();
-    serverToComponent.put("master", "master");
-    serverToComponent.put("region", "regionserver");
-  }
-
-  public static RequestLog getRequestLog(String name) {
-
-    String lookup = serverToComponent.get(name);
-    if (lookup != null) {
-      name = lookup;
-    }
-    String loggerName = "http.requests." + name;
-    String appenderName = name + "requestlog";
-    Log logger = LogFactory.getLog(loggerName);
-
-    if (logger instanceof Log4JLogger) {
-      Log4JLogger httpLog4JLog = (Log4JLogger)logger;
-      Logger httpLogger = httpLog4JLog.getLogger();
-      Appender appender = null;
-
-      try {
-        appender = httpLogger.getAppender(appenderName);
-      } catch (LogConfigurationException e) {
-        LOG.warn("Http request log for " + loggerName
-            + " could not be created");
-        throw e;
-      }
-
-      if (appender == null) {
-        LOG.info("Http request log for " + loggerName
-            + " is not defined");
-        return null;
-      }
-
-      if (appender instanceof HttpRequestLogAppender) {
-        HttpRequestLogAppender requestLogAppender
-          = (HttpRequestLogAppender)appender;
-        NCSARequestLog requestLog = new NCSARequestLog();
-        requestLog.setFilename(requestLogAppender.getFilename());
-        requestLog.setRetainDays(requestLogAppender.getRetainDays());
-        return requestLog;
-      } else {
-        LOG.warn("Jetty request log for " + loggerName
-            + " was of the wrong class");
-        return null;
-      }
-    }
-    else {
-      LOG.warn("Jetty request log can only be enabled using Log4j");
-      return null;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLogAppender.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLogAppender.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLogAppender.java
deleted file mode 100644
index 8039b34..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLogAppender.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import org.apache.log4j.spi.LoggingEvent;
-import org.apache.log4j.AppenderSkeleton;
-
-/**
- * Log4j Appender adapter for HttpRequestLog
- */
-public class HttpRequestLogAppender extends AppenderSkeleton {
-
-  private String filename;
-  private int retainDays;
-
-  public HttpRequestLogAppender() {
-  }
-
-  public void setRetainDays(int retainDays) {
-    this.retainDays = retainDays;
-  }
-
-  public int getRetainDays() {
-    return retainDays;
-  }
-
-  public void setFilename(String filename) {
-    this.filename = filename;
-  }
-
-  public String getFilename() {
-    return filename;
-  }
-
-  @Override
-  public void append(LoggingEvent event) {
-  }
-
-  @Override
-  public void close() {
-      // Do nothing, we don't have close() on AppenderSkeleton.
-  }
-
-  @Override
-  public boolean requiresLayout() {
-    return false;
-  }
-}


[04/13] hbase git commit: HBASE-19124 Move nightly test of source artifact creation from Jenkinsfile to a script.

Posted by bu...@apache.org.
HBASE-19124 Move nightly test of source artifact creation from Jenkinsfile to a script.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/fb937677
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/fb937677
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/fb937677

Branch: refs/heads/HBASE-19124
Commit: fb937677be671cab1d60e2a5fecc9ea134305d63
Parents: 71a55dc
Author: Sean Busbey <bu...@apache.org>
Authored: Wed Nov 1 08:12:03 2017 -0500
Committer: Sean Busbey <bu...@apache.org>
Committed: Wed Nov 1 19:13:09 2017 -0500

----------------------------------------------------------------------
 dev-support/Jenkinsfile                      |  61 +++-----
 dev-support/hbase_nightly_source-artifact.sh | 168 ++++++++++++++++++++++
 2 files changed, 186 insertions(+), 43 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/fb937677/dev-support/Jenkinsfile
----------------------------------------------------------------------
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index d7faeb6..5bd41aa 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -258,51 +258,26 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
         jdk "JDK 1.8 (latest)"
       }
       steps {
-        sh 'mvn --version --offline'
-        dir ("component") {
-          sh '''#!/bin/bash -e
-            echo "Clean..."
-            mvn --batch-mode -DskipTests clean
-            echo "Step 3 Build the source tarball"
-            mvn -Prelease --batch-mode -Dmaven.repo.local="${WORKSPACE}/.m2-for-repo" install -DskipTests assembly:single -Dassembly.file=hbase-assembly/src/main/assembly/src.xml
-'''
-        }
-        dir ("unpacked_src_tarball") {
-          sh '''#!/bin/bash -e
-            echo "Unpack the source tarball"
-            tar --strip-components=1 -xzf ../component/hbase-assembly/target/hbase-*-src.tar.gz
-            echo "Building from source artifact."
-            mvn -DskipTests -Prelease --batch-mode -Dmaven.repo.local="${WORKSPACE}/.m2-for-src" clean install assembly:single
-            echo "Clean up after checking ability to build."
-            mvn -DskipTests --batch-mode clean
-'''
-        }
-        dir ("component") {
-          sh '''#!/bin/bash -e
-            echo "Clean up the source checkout"
-            mvn -DskipTests --batch-mode clean
-            echo "Diff against source tree"
-            diff --binary --recursive . ../unpacked_src_tarball >../diff_output || true
-'''
-        }
-        // expectation check largely based on HBASE-14952
         sh '''#!/bin/bash -e
-          echo "Checking against things we don't expect to include in the source tarball (git related, hbase-native-client, etc.)"
-          cat >known_excluded <<END
-Only in .: .git
-Only in .: .gitattributes
-Only in .: .gitignore
-Only in .: hbase-native-client
-END
-          if ! diff known_excluded diff_output >/dev/null ; then
-            echo "Any output here are unexpected differences between the source artifact we'd make for an RC and the current branch."
-            echo "The expected differences are on the < side and the current differences are on the > side."
-            echo "In a given set of differences, '.' refers to the branch in the repo and 'unpacked_src_tarball' refers to what we pulled out of the tarball."
-            diff known_excluded diff_output
-          else
-            echo "Everything looks as expected."
-          fi
+          echo "Setting up directories"
+          rm -rf "output-srctarball" && mkdir "output-srctarball"
+          rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
+          rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
+          rm -rf ".m2-for-src" && mkdir ".m2-for-src"
 '''
+        sh """#!/bin/bash -e
+          ${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh \
+              --intermediate-file-dir output-srctarball \
+              --unpack-temp-dir unpacked_src_tarball \
+              --maven-m2-initial .m2-for-repo \
+              --maven-m2-src-build .m2-for-src \
+              ${env.BASEDIR}
+"""
+      }
+      post {
+        always {
+          archive 'output-srctarball/*'
+        }
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/fb937677/dev-support/hbase_nightly_source-artifact.sh
----------------------------------------------------------------------
diff --git a/dev-support/hbase_nightly_source-artifact.sh b/dev-support/hbase_nightly_source-artifact.sh
new file mode 100755
index 0000000..4e4c6a2
--- /dev/null
+++ b/dev-support/hbase_nightly_source-artifact.sh
@@ -0,0 +1,168 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -e
+function usage {
+  echo "Usage: ${0} [options] /path/to/component/checkout"
+  echo ""
+  echo "    --intermediate-file-dir /path/to/use  Path for writing listings and diffs. must exist."
+  echo "                                          defaults to making a directory in /tmp."
+  echo "    --unpack-temp-dir /path/to/use        Path for unpacking tarball. default to"
+  echo "                                          'unpacked_src_tarball' in intermediate directory."
+  echo "    --maven-m2-initial /path/to/use       Path for maven artifacts while building in"
+  echo "                                          component-dir."
+  echo "    --maven-m2-src-build /path/to/use     Path for maven artifacts while building from the"
+  echo "                                          unpacked source tarball."
+  exit 1
+}
+# if no args specified, show usage
+if [ $# -lt 1 ]; then
+  usage
+fi
+
+# Get arguments
+declare component_dir
+declare unpack_dir
+declare m2_initial
+declare m2_tarbuild
+declare working_dir
+while [ $# -gt 0 ]
+do
+  case "$1" in
+    --unpack-temp-dir) shift; unpack_dir=$1; shift;;
+    --maven-m2-initial) shift; m2_initial=$1; shift;;
+    --maven-m2-src-build) shift; m2_tarbuild=$1; shift;;
+    --intermediate-file-dir) shift; working_dir=$1; shift;;
+    --) shift; break;;
+    -*) usage ;;
+    *)  break;;  # terminate while loop
+  esac
+done
+
+# should still have where component checkout is.
+if [ $# -lt 1 ]; then
+  usage
+fi
+component_dir="$(cd "$(dirname "$1")"; pwd)/$(basename "$1")"
+
+if [ -z "${working_dir}" ]; then
+  working_dir=/tmp
+  while [[ -e ${working_dir} ]]; do
+    working_dir=/tmp/hbase-srctarball-test-${RANDOM}.${RANDOM}
+  done
+  mkdir "${working_dir}"
+else
+  # absolutes please
+  working_dir="$(cd "$(dirname "${working_dir}")"; pwd)/$(basename "${working_dir}")"
+  if [ ! -d "${working_dir}" ]; then
+    echo "passed working directory '${working_dir}' must already exist."
+    exit 1
+  fi
+fi
+
+echo "You'll find logs and temp files in ${working_dir}"
+
+if [ -z "${unpack_dir}" ]; then
+  unpack_dir="${working_dir}/unpacked_src_tarball"
+  mkdir "${unpack_dir}"
+else
+  # absolutes please
+  unpack_dir="$(cd "$(dirname "${unpack_dir}")"; pwd)/$(basename "${unpack_dir}")"
+  if [ ! -d "${unpack_dir}" ]; then
+    echo "passed directory for unpacking the source tarball '${unpack_dir}' must already exist."
+    exit 1
+  fi
+  rm -rf "${unpack_dir:?}/*"
+fi
+
+if [ -z "${m2_initial}" ]; then
+  m2_initial="${working_dir}/.m2-initial"
+  mkdir "${m2_initial}"
+else
+  # absolutes please
+  m2_initial="$(cd "$(dirname "${m2_initial}")"; pwd)/$(basename "${m2_initial}")"
+  if [ ! -d "${m2_initial}" ]; then
+    echo "passed directory for storing the initial build's maven repo  '${m2_initial}' " \
+        "must already exist."
+    exit 1
+  fi
+fi
+
+if [ -z "${m2_tarbuild}" ]; then
+  m2_tarbuild="${working_dir}/.m2-tarbuild"
+  mkdir "${m2_tarbuild}"
+else
+  # absolutes please
+  m2_tarbuild="$(cd "$(dirname "${m2_tarbuild}")"; pwd)/$(basename "${m2_tarbuild}")"
+  if [ ! -d "${m2_tarbuild}" ]; then
+    echo "passed directory for storing the build from src tarball's maven repo  '${m2_tarbuild}' " \
+        "must already exist."
+    exit 1
+  fi
+fi
+
+# This is meant to mimic what a release manager will do to create RCs.
+# See http://hbase.apache.org/book.html#maven.release
+
+echo "Maven details, in case our JDK doesn't match expectations:"
+mvn --version --offline | tee "${working_dir}/maven_version"
+
+echo "Do a clean building of the source artifact using code in ${component_dir}"
+cd "${component_dir}"
+echo "Clean..."
+mvn --batch-mode -DskipTests clean >"${working_dir}/component_clean_before.log" 2>&1
+echo "Step 3 Build the source tarball"
+mvn -Prelease --batch-mode -Dmaven.repo.local="${m2_initial}" install -DskipTests assembly:single \
+    -Dassembly.file=hbase-assembly/src/main/assembly/src.xml \
+    >"${working_dir}/component_build_src_tarball.log" 2>&1
+
+cd "${unpack_dir}"
+echo "Unpack the source tarball"
+tar --strip-components=1 -xzf "${component_dir}"/hbase-assembly/target/hbase-*-src.tar.gz \
+    >"${working_dir}/srctarball_unpack.log" 2>&1
+echo "Building from source artifact."
+mvn -DskipTests -Prelease --batch-mode -Dmaven.repo.local="${m2_tarbuild}" clean install \
+    assembly:single >"${working_dir}/srctarball_install.log" 2>&1
+
+echo "Clean up after checking ability to build."
+mvn -DskipTests --batch-mode clean >"${working_dir}/srctarball_clean.log" 2>&1
+
+
+cd "${component_dir}"
+echo "Clean up the source checkout"
+mvn --batch-mode -DskipTests clean >"${working_dir}/component_clean_after.log" 2>&1
+echo "Diff against source tree"
+diff --binary --recursive . "${unpack_dir}" >"${working_dir}/diff_output" || true
+
+cd "${working_dir}"
+# expectation check largely based on HBASE-14952
+echo "Checking against things we don't expect to include in the source tarball (git related, hbase-native-client, etc.)"
+cat >known_excluded <<END
+Only in .: .git
+Only in .: .gitattributes
+Only in .: .gitignore
+Only in .: hbase-native-client
+END
+if ! diff known_excluded diff_output >"${working_dir}/unexpected.diff" ; then
+  echo "Any output here are unexpected differences between the source artifact we'd make for an RC and the current branch."
+  echo "The expected differences are on the < side and the current differences are on the > side."
+  echo "In a given set of differences, '.' refers to the branch in the repo and 'unpacked_src_tarball' refers to what we pulled out of the tarball."
+  diff known_excluded diff_output
+else
+  echo "Everything looks as expected."
+fi


[11/13] hbase git commit: Revert "HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module"

Posted by bu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
deleted file mode 100644
index c2b5944..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
+++ /dev/null
@@ -1,1387 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import com.google.common.annotations.VisibleForTesting;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InterruptedIOException;
-import java.io.PrintStream;
-import java.net.BindException;
-import java.net.InetSocketAddress;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.stream.Collectors;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletRequestWrapper;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.HadoopIllegalArgumentException;
-import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.http.conf.ConfServlet;
-import org.apache.hadoop.hbase.http.jmx.JMXJsonServlet;
-import org.apache.hadoop.hbase.http.log.LogLevel;
-import org.apache.hadoop.hbase.util.Threads;
-import org.apache.hadoop.hbase.util.ReflectionUtils;
-import org.apache.hadoop.security.SecurityUtil;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
-import org.apache.hadoop.security.authorize.AccessControlList;
-import org.apache.hadoop.util.Shell;
-
-import org.eclipse.jetty.http.HttpVersion;
-import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.server.Handler;
-import org.eclipse.jetty.server.HttpConfiguration;
-import org.eclipse.jetty.server.HttpConnectionFactory;
-import org.eclipse.jetty.server.ServerConnector;
-import org.eclipse.jetty.server.SecureRequestCustomizer;
-import org.eclipse.jetty.server.SslConnectionFactory;
-import org.eclipse.jetty.server.handler.ContextHandlerCollection;
-import org.eclipse.jetty.server.handler.HandlerCollection;
-import org.eclipse.jetty.server.RequestLog;
-import org.eclipse.jetty.server.handler.RequestLogHandler;
-import org.eclipse.jetty.servlet.FilterMapping;
-import org.eclipse.jetty.servlet.ServletHandler;
-import org.eclipse.jetty.servlet.FilterHolder;
-import org.eclipse.jetty.servlet.ServletContextHandler;
-import org.eclipse.jetty.servlet.DefaultServlet;
-import org.eclipse.jetty.servlet.ServletHolder;
-import org.eclipse.jetty.util.MultiException;
-import org.eclipse.jetty.util.ssl.SslContextFactory;
-import org.eclipse.jetty.util.thread.QueuedThreadPool;
-import org.eclipse.jetty.webapp.WebAppContext;
-
-import org.glassfish.jersey.server.ResourceConfig;
-import org.glassfish.jersey.servlet.ServletContainer;
-
-/**
- * Create a Jetty embedded server to answer http requests. The primary goal
- * is to serve up status information for the server.
- * There are three contexts:
- *   "/logs/" -&gt; points to the log directory
- *   "/static/" -&gt; points to common static files (src/webapps/static)
- *   "/" -&gt; the jsp server code from (src/webapps/&lt;name&gt;)
- */
-@InterfaceAudience.Private
-@InterfaceStability.Evolving
-public class HttpServer implements FilterContainer {
-  private static final Log LOG = LogFactory.getLog(HttpServer.class);
-  private static final String EMPTY_STRING = "";
-
-  private static final int DEFAULT_MAX_HEADER_SIZE = 64 * 1024; // 64K
-
-  static final String FILTER_INITIALIZERS_PROPERTY
-      = "hbase.http.filter.initializers";
-  static final String HTTP_MAX_THREADS = "hbase.http.max.threads";
-
-  public static final String HTTP_UI_AUTHENTICATION = "hbase.security.authentication.ui";
-  static final String HTTP_AUTHENTICATION_PREFIX = "hbase.security.authentication.";
-  static final String HTTP_SPNEGO_AUTHENTICATION_PREFIX = HTTP_AUTHENTICATION_PREFIX
-      + "spnego.";
-  static final String HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX = "kerberos.principal";
-  public static final String HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY =
-      HTTP_SPNEGO_AUTHENTICATION_PREFIX + HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX;
-  static final String HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX = "kerberos.keytab";
-  public static final String HTTP_SPNEGO_AUTHENTICATION_KEYTAB_KEY =
-      HTTP_SPNEGO_AUTHENTICATION_PREFIX + HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX;
-  static final String HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_SUFFIX = "kerberos.name.rules";
-  public static final String HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_KEY =
-      HTTP_SPNEGO_AUTHENTICATION_PREFIX + HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_SUFFIX;
-  static final String HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_SUFFIX =
-      "signature.secret.file";
-  public static final String HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_KEY =
-      HTTP_AUTHENTICATION_PREFIX + HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_SUFFIX;
-
-  // The ServletContext attribute where the daemon Configuration
-  // gets stored.
-  public static final String CONF_CONTEXT_ATTRIBUTE = "hbase.conf";
-  public static final String ADMINS_ACL = "admins.acl";
-  public static final String BIND_ADDRESS = "bind.address";
-  public static final String SPNEGO_FILTER = "SpnegoFilter";
-  public static final String NO_CACHE_FILTER = "NoCacheFilter";
-  public static final String APP_DIR = "webapps";
-
-  private final AccessControlList adminsAcl;
-
-  protected final Server webServer;
-  protected String appDir;
-  protected String logDir;
-
-  private static class ListenerInfo {
-    /**
-     * Boolean flag to determine whether the HTTP server should clean up the
-     * listener in stop().
-     */
-    private final boolean isManaged;
-    private final ServerConnector listener;
-    private ListenerInfo(boolean isManaged, ServerConnector listener) {
-      this.isManaged = isManaged;
-      this.listener = listener;
-    }
-  }
-
-  private final List<ListenerInfo> listeners = Lists.newArrayList();
-
-  @VisibleForTesting
-  public List<ServerConnector> getServerConnectors() {
-    return listeners.stream().map(info -> info.listener).collect(Collectors.toList());
-  }
-
-  protected final WebAppContext webAppContext;
-  protected final boolean findPort;
-  protected final Map<ServletContextHandler, Boolean> defaultContexts = new HashMap<>();
-  protected final List<String> filterNames = new ArrayList<>();
-  static final String STATE_DESCRIPTION_ALIVE = " - alive";
-  static final String STATE_DESCRIPTION_NOT_LIVE = " - not live";
-
-  /**
-   * Class to construct instances of HTTP server with specific options.
-   */
-  public static class Builder {
-    private ArrayList<URI> endpoints = Lists.newArrayList();
-    private Configuration conf;
-    private String[] pathSpecs;
-    private AccessControlList adminsAcl;
-    private boolean securityEnabled = false;
-    private String usernameConfKey;
-    private String keytabConfKey;
-    private boolean needsClientAuth;
-
-    private String hostName;
-    private String appDir = APP_DIR;
-    private String logDir;
-    private boolean findPort;
-
-    private String trustStore;
-    private String trustStorePassword;
-    private String trustStoreType;
-
-    private String keyStore;
-    private String keyStorePassword;
-    private String keyStoreType;
-
-    // The -keypass option in keytool
-    private String keyPassword;
-
-    private String kerberosNameRulesKey;
-    private String signatureSecretFileKey;
-
-    @Deprecated
-    private String name;
-    @Deprecated
-    private String bindAddress;
-    @Deprecated
-    private int port = -1;
-
-    /**
-     * Add an endpoint that the HTTP server should listen to.
-     *
-     * @param endpoint
-     *          the endpoint of that the HTTP server should listen to. The
-     *          scheme specifies the protocol (i.e. HTTP / HTTPS), the host
-     *          specifies the binding address, and the port specifies the
-     *          listening port. Unspecified or zero port means that the server
-     *          can listen to any port.
-     */
-    public Builder addEndpoint(URI endpoint) {
-      endpoints.add(endpoint);
-      return this;
-    }
-
-    /**
-     * Set the hostname of the http server. The host name is used to resolve the
-     * _HOST field in Kerberos principals. The hostname of the first listener
-     * will be used if the name is unspecified.
-     */
-    public Builder hostName(String hostName) {
-      this.hostName = hostName;
-      return this;
-    }
-
-    public Builder trustStore(String location, String password, String type) {
-      this.trustStore = location;
-      this.trustStorePassword = password;
-      this.trustStoreType = type;
-      return this;
-    }
-
-    public Builder keyStore(String location, String password, String type) {
-      this.keyStore = location;
-      this.keyStorePassword = password;
-      this.keyStoreType = type;
-      return this;
-    }
-
-    public Builder keyPassword(String password) {
-      this.keyPassword = password;
-      return this;
-    }
-
-    /**
-     * Specify whether the server should authorize the client in SSL
-     * connections.
-     */
-    public Builder needsClientAuth(boolean value) {
-      this.needsClientAuth = value;
-      return this;
-    }
-
-    /**
-     * Use setAppDir() instead.
-     */
-    @Deprecated
-    public Builder setName(String name){
-      this.name = name;
-      return this;
-    }
-
-    /**
-     * Use addEndpoint() instead.
-     */
-    @Deprecated
-    public Builder setBindAddress(String bindAddress){
-      this.bindAddress = bindAddress;
-      return this;
-    }
-
-    /**
-     * Use addEndpoint() instead.
-     */
-    @Deprecated
-    public Builder setPort(int port) {
-      this.port = port;
-      return this;
-    }
-
-    public Builder setFindPort(boolean findPort) {
-      this.findPort = findPort;
-      return this;
-    }
-
-    public Builder setConf(Configuration conf) {
-      this.conf = conf;
-      return this;
-    }
-
-    public Builder setPathSpec(String[] pathSpec) {
-      this.pathSpecs = pathSpec;
-      return this;
-    }
-
-    public Builder setACL(AccessControlList acl) {
-      this.adminsAcl = acl;
-      return this;
-    }
-
-    public Builder setSecurityEnabled(boolean securityEnabled) {
-      this.securityEnabled = securityEnabled;
-      return this;
-    }
-
-    public Builder setUsernameConfKey(String usernameConfKey) {
-      this.usernameConfKey = usernameConfKey;
-      return this;
-    }
-
-    public Builder setKeytabConfKey(String keytabConfKey) {
-      this.keytabConfKey = keytabConfKey;
-      return this;
-    }
-
-    public Builder setKerberosNameRulesKey(String kerberosNameRulesKey) {
-      this.kerberosNameRulesKey = kerberosNameRulesKey;
-      return this;
-    }
-
-    public Builder setSignatureSecretFileKey(String signatureSecretFileKey) {
-      this.signatureSecretFileKey = signatureSecretFileKey;
-      return this;
-    }
-
-    public Builder setAppDir(String appDir) {
-        this.appDir = appDir;
-        return this;
-      }
-
-    public Builder setLogDir(String logDir) {
-        this.logDir = logDir;
-        return this;
-      }
-
-    public HttpServer build() throws IOException {
-
-      // Do we still need to assert this non null name if it is deprecated?
-      if (this.name == null) {
-        throw new HadoopIllegalArgumentException("name is not set");
-      }
-
-      // Make the behavior compatible with deprecated interfaces
-      if (bindAddress != null && port != -1) {
-        try {
-          endpoints.add(0, new URI("http", "", bindAddress, port, "", "", ""));
-        } catch (URISyntaxException e) {
-          throw new HadoopIllegalArgumentException("Invalid endpoint: "+ e); }
-      }
-
-      if (endpoints.isEmpty()) {
-        throw new HadoopIllegalArgumentException("No endpoints specified");
-      }
-
-      if (hostName == null) {
-        hostName = endpoints.get(0).getHost();
-      }
-
-      if (this.conf == null) {
-        conf = new Configuration();
-      }
-
-      HttpServer server = new HttpServer(this);
-
-      if (this.securityEnabled) {
-        server.initSpnego(conf, hostName, usernameConfKey, keytabConfKey, kerberosNameRulesKey,
-            signatureSecretFileKey);
-      }
-
-      for (URI ep : endpoints) {
-        ServerConnector listener = null;
-        String scheme = ep.getScheme();
-        HttpConfiguration httpConfig = new HttpConfiguration();
-        httpConfig.setSecureScheme("https");
-        httpConfig.setHeaderCacheSize(DEFAULT_MAX_HEADER_SIZE);
-        httpConfig.setResponseHeaderSize(DEFAULT_MAX_HEADER_SIZE);
-        httpConfig.setRequestHeaderSize(DEFAULT_MAX_HEADER_SIZE);
-
-        if ("http".equals(scheme)) {
-          listener = new ServerConnector(server.webServer, new HttpConnectionFactory(httpConfig));
-        } else if ("https".equals(scheme)) {
-          HttpConfiguration httpsConfig = new HttpConfiguration(httpConfig);
-          httpsConfig.addCustomizer(new SecureRequestCustomizer());
-          SslContextFactory sslCtxFactory = new SslContextFactory();
-          sslCtxFactory.setNeedClientAuth(needsClientAuth);
-          sslCtxFactory.setKeyManagerPassword(keyPassword);
-
-          if (keyStore != null) {
-            sslCtxFactory.setKeyStorePath(keyStore);
-            sslCtxFactory.setKeyStoreType(keyStoreType);
-            sslCtxFactory.setKeyStorePassword(keyStorePassword);
-          }
-
-          if (trustStore != null) {
-            sslCtxFactory.setTrustStorePath(trustStore);
-            sslCtxFactory.setTrustStoreType(trustStoreType);
-            sslCtxFactory.setTrustStorePassword(trustStorePassword);
-
-          }
-          listener = new ServerConnector(server.webServer, new SslConnectionFactory(sslCtxFactory,
-              HttpVersion.HTTP_1_1.toString()), new HttpConnectionFactory(httpsConfig));
-        } else {
-          throw new HadoopIllegalArgumentException(
-              "unknown scheme for endpoint:" + ep);
-        }
-
-        // default settings for connector
-        listener.setAcceptQueueSize(128);
-        if (Shell.WINDOWS) {
-          // result of setting the SO_REUSEADDR flag is different on Windows
-          // http://msdn.microsoft.com/en-us/library/ms740621(v=vs.85).aspx
-          // without this 2 NN's can start on the same machine and listen on
-          // the same port with indeterminate routing of incoming requests to them
-          listener.setReuseAddress(false);
-        }
-
-        listener.setHost(ep.getHost());
-        listener.setPort(ep.getPort() == -1 ? 0 : ep.getPort());
-        server.addManagedListener(listener);
-      }
-
-      server.loadListeners();
-      return server;
-
-    }
-
-  }
-
-  /** Same as this(name, bindAddress, port, findPort, null); */
-  @Deprecated
-  public HttpServer(String name, String bindAddress, int port, boolean findPort
-      ) throws IOException {
-    this(name, bindAddress, port, findPort, new Configuration());
-  }
-
-  /**
-   * Create a status server on the given port. Allows you to specify the
-   * path specifications that this server will be serving so that they will be
-   * added to the filters properly.
-   *
-   * @param name The name of the server
-   * @param bindAddress The address for this server
-   * @param port The port to use on the server
-   * @param findPort whether the server should start at the given port and
-   *        increment by 1 until it finds a free port.
-   * @param conf Configuration
-   * @param pathSpecs Path specifications that this httpserver will be serving.
-   *        These will be added to any filters.
-   */
-  @Deprecated
-  public HttpServer(String name, String bindAddress, int port,
-      boolean findPort, Configuration conf, String[] pathSpecs) throws IOException {
-    this(name, bindAddress, port, findPort, conf, null, pathSpecs);
-  }
-
-  /**
-   * Create a status server on the given port.
-   * The jsp scripts are taken from src/webapps/&lt;name&gt;.
-   * @param name The name of the server
-   * @param port The port to use on the server
-   * @param findPort whether the server should start at the given port and
-   *        increment by 1 until it finds a free port.
-   * @param conf Configuration
-   */
-  @Deprecated
-  public HttpServer(String name, String bindAddress, int port,
-      boolean findPort, Configuration conf) throws IOException {
-    this(name, bindAddress, port, findPort, conf, null, null);
-  }
-
-  @Deprecated
-  public HttpServer(String name, String bindAddress, int port,
-      boolean findPort, Configuration conf, AccessControlList adminsAcl)
-      throws IOException {
-    this(name, bindAddress, port, findPort, conf, adminsAcl, null);
-  }
-
-  /**
-   * Create a status server on the given port.
-   * The jsp scripts are taken from src/webapps/&lt;name&gt;.
-   * @param name The name of the server
-   * @param bindAddress The address for this server
-   * @param port The port to use on the server
-   * @param findPort whether the server should start at the given port and
-   *        increment by 1 until it finds a free port.
-   * @param conf Configuration
-   * @param adminsAcl {@link AccessControlList} of the admins
-   * @param pathSpecs Path specifications that this httpserver will be serving.
-   *        These will be added to any filters.
-   */
-  @Deprecated
-  public HttpServer(String name, String bindAddress, int port,
-      boolean findPort, Configuration conf, AccessControlList adminsAcl,
-      String[] pathSpecs) throws IOException {
-    this(new Builder().setName(name)
-        .addEndpoint(URI.create("http://" + bindAddress + ":" + port))
-        .setFindPort(findPort).setConf(conf).setACL(adminsAcl)
-        .setPathSpec(pathSpecs));
-  }
-
-  private HttpServer(final Builder b) throws IOException {
-    this.appDir = b.appDir;
-    this.logDir = b.logDir;
-    final String appDir = getWebAppsPath(b.name);
-
-
-    int maxThreads = b.conf.getInt(HTTP_MAX_THREADS, 16);
-    // If HTTP_MAX_THREADS is less than or equal to 0, QueueThreadPool() will use the
-    // default value (currently 200).
-    QueuedThreadPool threadPool = maxThreads <= 0 ? new QueuedThreadPool()
-        : new QueuedThreadPool(maxThreads);
-    threadPool.setDaemon(true);
-    this.webServer = new Server(threadPool);
-
-    this.adminsAcl = b.adminsAcl;
-    this.webAppContext = createWebAppContext(b.name, b.conf, adminsAcl, appDir);
-    this.findPort = b.findPort;
-    initializeWebServer(b.name, b.hostName, b.conf, b.pathSpecs);
-  }
-
-  private void initializeWebServer(String name, String hostName,
-      Configuration conf, String[] pathSpecs)
-      throws FileNotFoundException, IOException {
-
-    Preconditions.checkNotNull(webAppContext);
-
-    HandlerCollection handlerCollection = new HandlerCollection();
-
-    ContextHandlerCollection contexts = new ContextHandlerCollection();
-    RequestLog requestLog = HttpRequestLog.getRequestLog(name);
-
-    if (requestLog != null) {
-      RequestLogHandler requestLogHandler = new RequestLogHandler();
-      requestLogHandler.setRequestLog(requestLog);
-      handlerCollection.addHandler(requestLogHandler);
-    }
-
-    final String appDir = getWebAppsPath(name);
-
-    handlerCollection.addHandler(contexts);
-    handlerCollection.addHandler(webAppContext);
-
-    webServer.setHandler(handlerCollection);
-
-    addDefaultApps(contexts, appDir, conf);
-
-    addGlobalFilter("safety", QuotingInputFilter.class.getName(), null);
-    Map<String, String> params = new HashMap<>();
-    params.put("xframeoptions", conf.get("hbase.http.filter.xframeoptions.mode", "DENY"));
-    addGlobalFilter("clickjackingprevention",
-            ClickjackingPreventionFilter.class.getName(), params);
-    final FilterInitializer[] initializers = getFilterInitializers(conf);
-    if (initializers != null) {
-      conf = new Configuration(conf);
-      conf.set(BIND_ADDRESS, hostName);
-      for (FilterInitializer c : initializers) {
-        c.initFilter(this, conf);
-      }
-    }
-
-    addDefaultServlets();
-
-    if (pathSpecs != null) {
-      for (String path : pathSpecs) {
-        LOG.info("adding path spec: " + path);
-        addFilterPathMapping(path, webAppContext);
-      }
-    }
-  }
-
-  private void addManagedListener(ServerConnector connector) {
-    listeners.add(new ListenerInfo(true, connector));
-  }
-
-  private static WebAppContext createWebAppContext(String name,
-      Configuration conf, AccessControlList adminsAcl, final String appDir) {
-    WebAppContext ctx = new WebAppContext();
-    ctx.setDisplayName(name);
-    ctx.setContextPath("/");
-    ctx.setWar(appDir + "/" + name);
-    ctx.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
-    ctx.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
-    addNoCacheFilter(ctx);
-    return ctx;
-  }
-
-  private static void addNoCacheFilter(WebAppContext ctxt) {
-    defineFilter(ctxt, NO_CACHE_FILTER, NoCacheFilter.class.getName(),
-        Collections.<String, String> emptyMap(), new String[] { "/*" });
-  }
-
-  /** Get an array of FilterConfiguration specified in the conf */
-  private static FilterInitializer[] getFilterInitializers(Configuration conf) {
-    if (conf == null) {
-      return null;
-    }
-
-    Class<?>[] classes = conf.getClasses(FILTER_INITIALIZERS_PROPERTY);
-    if (classes == null) {
-      return null;
-    }
-
-    FilterInitializer[] initializers = new FilterInitializer[classes.length];
-    for(int i = 0; i < classes.length; i++) {
-      initializers[i] = (FilterInitializer)ReflectionUtils.newInstance(classes[i]);
-    }
-    return initializers;
-  }
-
-  /**
-   * Add default apps.
-   * @param appDir The application directory
-   * @throws IOException
-   */
-  protected void addDefaultApps(ContextHandlerCollection parent,
-      final String appDir, Configuration conf) throws IOException {
-    // set up the context for "/logs/" if "hadoop.log.dir" property is defined.
-    String logDir = this.logDir;
-    if (logDir == null) {
-        logDir = System.getProperty("hadoop.log.dir");
-    }
-    if (logDir != null) {
-      ServletContextHandler logContext = new ServletContextHandler(parent, "/logs");
-      logContext.addServlet(AdminAuthorizedServlet.class, "/*");
-      logContext.setResourceBase(logDir);
-
-      if (conf.getBoolean(
-          ServerConfigurationKeys.HBASE_JETTY_LOGS_SERVE_ALIASES,
-          ServerConfigurationKeys.DEFAULT_HBASE_JETTY_LOGS_SERVE_ALIASES)) {
-        Map<String, String> params = logContext.getInitParams();
-        params.put(
-            "org.mortbay.jetty.servlet.Default.aliases", "true");
-      }
-      logContext.setDisplayName("logs");
-      setContextAttributes(logContext, conf);
-      addNoCacheFilter(webAppContext);
-      defaultContexts.put(logContext, true);
-    }
-    // set up the context for "/static/*"
-    ServletContextHandler staticContext = new ServletContextHandler(parent, "/static");
-    staticContext.setResourceBase(appDir + "/static");
-    staticContext.addServlet(DefaultServlet.class, "/*");
-    staticContext.setDisplayName("static");
-    setContextAttributes(staticContext, conf);
-    defaultContexts.put(staticContext, true);
-  }
-
-  private void setContextAttributes(ServletContextHandler context, Configuration conf) {
-    context.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
-    context.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
-  }
-
-  /**
-   * Add default servlets.
-   */
-  protected void addDefaultServlets() {
-    // set up default servlets
-    addServlet("stacks", "/stacks", StackServlet.class);
-    addServlet("logLevel", "/logLevel", LogLevel.Servlet.class);
-
-    // Hadoop3 has moved completely to metrics2, and  dropped support for Metrics v1's
-    // MetricsServlet (see HADOOP-12504).  We'll using reflection to load if against hadoop2.
-    // Remove when we drop support for hbase on hadoop2.x.
-    try {
-      Class clz = Class.forName("org.apache.hadoop.metrics.MetricsServlet");
-      addServlet("metrics", "/metrics", clz);
-    } catch (Exception e) {
-      // do nothing
-    }
-
-    addServlet("jmx", "/jmx", JMXJsonServlet.class);
-    addServlet("conf", "/conf", ConfServlet.class);
-  }
-
-  /**
-   * Set a value in the webapp context. These values are available to the jsp
-   * pages as "application.getAttribute(name)".
-   * @param name The name of the attribute
-   * @param value The value of the attribute
-   */
-  public void setAttribute(String name, Object value) {
-    webAppContext.setAttribute(name, value);
-  }
-
-  /**
-   * Add a Jersey resource package.
-   * @param packageName The Java package name containing the Jersey resource.
-   * @param pathSpec The path spec for the servlet
-   */
-  public void addJerseyResourcePackage(final String packageName,
-      final String pathSpec) {
-    LOG.info("addJerseyResourcePackage: packageName=" + packageName
-        + ", pathSpec=" + pathSpec);
-
-    ResourceConfig application = new ResourceConfig().packages(packageName);
-    final ServletHolder sh = new ServletHolder(new ServletContainer(application));
-    webAppContext.addServlet(sh, pathSpec);
-  }
-
-  /**
-   * Add a servlet in the server.
-   * @param name The name of the servlet (can be passed as null)
-   * @param pathSpec The path spec for the servlet
-   * @param clazz The servlet class
-   */
-  public void addServlet(String name, String pathSpec,
-      Class<? extends HttpServlet> clazz) {
-    addInternalServlet(name, pathSpec, clazz, false);
-    addFilterPathMapping(pathSpec, webAppContext);
-  }
-
-  /**
-   * Add an internal servlet in the server.
-   * Note: This method is to be used for adding servlets that facilitate
-   * internal communication and not for user facing functionality. For
-   * servlets added using this method, filters are not enabled.
-   *
-   * @param name The name of the servlet (can be passed as null)
-   * @param pathSpec The path spec for the servlet
-   * @param clazz The servlet class
-   */
-  public void addInternalServlet(String name, String pathSpec,
-      Class<? extends HttpServlet> clazz) {
-    addInternalServlet(name, pathSpec, clazz, false);
-  }
-
-  /**
-   * Add an internal servlet in the server, specifying whether or not to
-   * protect with Kerberos authentication.
-   * Note: This method is to be used for adding servlets that facilitate
-   * internal communication and not for user facing functionality. For
-   +   * servlets added using this method, filters (except internal Kerberos
-   * filters) are not enabled.
-   *
-   * @param name The name of the servlet (can be passed as null)
-   * @param pathSpec The path spec for the servlet
-   * @param clazz The servlet class
-   * @param requireAuth Require Kerberos authenticate to access servlet
-   */
-  public void addInternalServlet(String name, String pathSpec,
-      Class<? extends HttpServlet> clazz, boolean requireAuth) {
-    ServletHolder holder = new ServletHolder(clazz);
-    if (name != null) {
-      holder.setName(name);
-    }
-    webAppContext.addServlet(holder, pathSpec);
-
-    if(requireAuth && UserGroupInformation.isSecurityEnabled()) {
-       LOG.info("Adding Kerberos (SPNEGO) filter to " + name);
-       ServletHandler handler = webAppContext.getServletHandler();
-       FilterMapping fmap = new FilterMapping();
-       fmap.setPathSpec(pathSpec);
-       fmap.setFilterName(SPNEGO_FILTER);
-       fmap.setDispatches(FilterMapping.ALL);
-       handler.addFilterMapping(fmap);
-    }
-  }
-
-  @Override
-  public void addFilter(String name, String classname,
-      Map<String, String> parameters) {
-
-    final String[] USER_FACING_URLS = { "*.html", "*.jsp" };
-    defineFilter(webAppContext, name, classname, parameters, USER_FACING_URLS);
-    LOG.info("Added filter " + name + " (class=" + classname
-        + ") to context " + webAppContext.getDisplayName());
-    final String[] ALL_URLS = { "/*" };
-    for (Map.Entry<ServletContextHandler, Boolean> e : defaultContexts.entrySet()) {
-      if (e.getValue()) {
-        ServletContextHandler handler = e.getKey();
-        defineFilter(handler, name, classname, parameters, ALL_URLS);
-        LOG.info("Added filter " + name + " (class=" + classname
-            + ") to context " + handler.getDisplayName());
-      }
-    }
-    filterNames.add(name);
-  }
-
-  @Override
-  public void addGlobalFilter(String name, String classname,
-      Map<String, String> parameters) {
-    final String[] ALL_URLS = { "/*" };
-    defineFilter(webAppContext, name, classname, parameters, ALL_URLS);
-    for (ServletContextHandler ctx : defaultContexts.keySet()) {
-      defineFilter(ctx, name, classname, parameters, ALL_URLS);
-    }
-    LOG.info("Added global filter '" + name + "' (class=" + classname + ")");
-  }
-
-  /**
-   * Define a filter for a context and set up default url mappings.
-   */
-  public static void defineFilter(ServletContextHandler handler, String name,
-      String classname, Map<String,String> parameters, String[] urls) {
-
-    FilterHolder holder = new FilterHolder();
-    holder.setName(name);
-    holder.setClassName(classname);
-    if (parameters != null) {
-      holder.setInitParameters(parameters);
-    }
-    FilterMapping fmap = new FilterMapping();
-    fmap.setPathSpecs(urls);
-    fmap.setDispatches(FilterMapping.ALL);
-    fmap.setFilterName(name);
-    handler.getServletHandler().addFilter(holder, fmap);
-  }
-
-  /**
-   * Add the path spec to the filter path mapping.
-   * @param pathSpec The path spec
-   * @param webAppCtx The WebApplicationContext to add to
-   */
-  protected void addFilterPathMapping(String pathSpec,
-      WebAppContext webAppCtx) {
-    for(String name : filterNames) {
-      FilterMapping fmap = new FilterMapping();
-      fmap.setPathSpec(pathSpec);
-      fmap.setFilterName(name);
-      fmap.setDispatches(FilterMapping.ALL);
-      webAppCtx.getServletHandler().addFilterMapping(fmap);
-    }
-  }
-
-  /**
-   * Get the value in the webapp context.
-   * @param name The name of the attribute
-   * @return The value of the attribute
-   */
-  public Object getAttribute(String name) {
-    return webAppContext.getAttribute(name);
-  }
-
-  public WebAppContext getWebAppContext(){
-    return this.webAppContext;
-  }
-
-  public String getWebAppsPath(String appName) throws FileNotFoundException {
-      return getWebAppsPath(this.appDir, appName);
-  }
-
-  /**
-   * Get the pathname to the webapps files.
-   * @param appName eg "secondary" or "datanode"
-   * @return the pathname as a URL
-   * @throws FileNotFoundException if 'webapps' directory cannot be found on CLASSPATH.
-   */
-  protected String getWebAppsPath(String webapps, String appName) throws FileNotFoundException {
-    URL url = getClass().getClassLoader().getResource(webapps + "/" + appName);
-    if (url == null)
-      throw new FileNotFoundException(webapps + "/" + appName
-          + " not found in CLASSPATH");
-    String urlString = url.toString();
-    return urlString.substring(0, urlString.lastIndexOf('/'));
-  }
-
-  /**
-   * Get the port that the server is on
-   * @return the port
-   */
-  @Deprecated
-  public int getPort() {
-    return ((ServerConnector)webServer.getConnectors()[0]).getLocalPort();
-  }
-
-  /**
-   * Get the address that corresponds to a particular connector.
-   *
-   * @return the corresponding address for the connector, or null if there's no
-   *         such connector or the connector is not bounded.
-   */
-  public InetSocketAddress getConnectorAddress(int index) {
-    Preconditions.checkArgument(index >= 0);
-    if (index > webServer.getConnectors().length)
-      return null;
-
-    ServerConnector c = (ServerConnector)webServer.getConnectors()[index];
-    if (c.getLocalPort() == -1 || c.getLocalPort() == -2) {
-      // -1 if the connector has not been opened
-      // -2 if it has been closed
-      return null;
-    }
-
-    return new InetSocketAddress(c.getHost(), c.getLocalPort());
-  }
-
-  /**
-   * Set the min, max number of worker threads (simultaneous connections).
-   */
-  public void setThreads(int min, int max) {
-    QueuedThreadPool pool = (QueuedThreadPool) webServer.getThreadPool();
-    pool.setMinThreads(min);
-    pool.setMaxThreads(max);
-  }
-
-  private void initSpnego(Configuration conf, String hostName,
-      String usernameConfKey, String keytabConfKey, String kerberosNameRuleKey,
-      String signatureSecretKeyFileKey) throws IOException {
-    Map<String, String> params = new HashMap<>();
-    String principalInConf = getOrEmptyString(conf, usernameConfKey);
-    if (!principalInConf.isEmpty()) {
-      params.put(HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX, SecurityUtil.getServerPrincipal(
-          principalInConf, hostName));
-    }
-    String httpKeytab = getOrEmptyString(conf, keytabConfKey);
-    if (!httpKeytab.isEmpty()) {
-      params.put(HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX, httpKeytab);
-    }
-    String kerberosNameRule = getOrEmptyString(conf, kerberosNameRuleKey);
-    if (!kerberosNameRule.isEmpty()) {
-      params.put(HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_SUFFIX, kerberosNameRule);
-    }
-    String signatureSecretKeyFile = getOrEmptyString(conf, signatureSecretKeyFileKey);
-    if (!signatureSecretKeyFile.isEmpty()) {
-      params.put(HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_SUFFIX,
-          signatureSecretKeyFile);
-    }
-    params.put(AuthenticationFilter.AUTH_TYPE, "kerberos");
-
-    // Verify that the required options were provided
-    if (isMissing(params.get(HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX)) ||
-            isMissing(params.get(HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX))) {
-      throw new IllegalArgumentException(usernameConfKey + " and "
-          + keytabConfKey + " are both required in the configuration "
-          + "to enable SPNEGO/Kerberos authentication for the Web UI");
-    }
-
-    addGlobalFilter(SPNEGO_FILTER, AuthenticationFilter.class.getName(), params);
-  }
-
-  /**
-   * Returns true if the argument is non-null and not whitespace
-   */
-  private boolean isMissing(String value) {
-    if (null == value) {
-      return true;
-    }
-    return value.trim().isEmpty();
-  }
-
-  /**
-   * Extracts the value for the given key from the configuration of returns a string of
-   * zero length.
-   */
-  private String getOrEmptyString(Configuration conf, String key) {
-    if (null == key) {
-      return EMPTY_STRING;
-    }
-    final String value = conf.get(key.trim());
-    return null == value ? EMPTY_STRING : value;
-  }
-
-  /**
-   * Start the server. Does not wait for the server to start.
-   */
-  public void start() throws IOException {
-    try {
-      try {
-        openListeners();
-        webServer.start();
-      } catch (IOException ex) {
-        LOG.info("HttpServer.start() threw a non Bind IOException", ex);
-        throw ex;
-      } catch (MultiException ex) {
-        LOG.info("HttpServer.start() threw a MultiException", ex);
-        throw ex;
-      }
-      // Make sure there is no handler failures.
-      Handler[] handlers = webServer.getHandlers();
-      for (int i = 0; i < handlers.length; i++) {
-        if (handlers[i].isFailed()) {
-          throw new IOException(
-              "Problem in starting http server. Server handlers failed");
-        }
-      }
-      // Make sure there are no errors initializing the context.
-      Throwable unavailableException = webAppContext.getUnavailableException();
-      if (unavailableException != null) {
-        // Have to stop the webserver, or else its non-daemon threads
-        // will hang forever.
-        webServer.stop();
-        throw new IOException("Unable to initialize WebAppContext",
-            unavailableException);
-      }
-    } catch (IOException e) {
-      throw e;
-    } catch (InterruptedException e) {
-      throw (IOException) new InterruptedIOException(
-          "Interrupted while starting HTTP server").initCause(e);
-    } catch (Exception e) {
-      throw new IOException("Problem starting http server", e);
-    }
-  }
-
-  private void loadListeners() {
-    for (ListenerInfo li : listeners) {
-      webServer.addConnector(li.listener);
-    }
-  }
-
-  /**
-   * Open the main listener for the server
-   * @throws Exception
-   */
-  @VisibleForTesting
-  void openListeners() throws Exception {
-    for (ListenerInfo li : listeners) {
-      ServerConnector listener = li.listener;
-      if (!li.isManaged || (li.listener.getLocalPort() != -1 && li.listener.getLocalPort() != -2)) {
-        // This listener is either started externally, or has not been opened, or has been closed
-        continue;
-      }
-      int port = listener.getPort();
-      while (true) {
-        // jetty has a bug where you can't reopen a listener that previously
-        // failed to open w/o issuing a close first, even if the port is changed
-        try {
-          listener.close();
-          listener.open();
-          LOG.info("Jetty bound to port " + listener.getLocalPort());
-          break;
-        } catch (BindException ex) {
-          if (port == 0 || !findPort) {
-            BindException be = new BindException("Port in use: "
-                + listener.getHost() + ":" + listener.getPort());
-            be.initCause(ex);
-            throw be;
-          }
-        }
-        // try the next port number
-        listener.setPort(++port);
-        Thread.sleep(100);
-      }
-    }
-  }
-
-  /**
-   * stop the server
-   */
-  public void stop() throws Exception {
-    MultiException exception = null;
-    for (ListenerInfo li : listeners) {
-      if (!li.isManaged) {
-        continue;
-      }
-
-      try {
-        li.listener.close();
-      } catch (Exception e) {
-        LOG.error(
-            "Error while stopping listener for webapp"
-                + webAppContext.getDisplayName(), e);
-        exception = addMultiException(exception, e);
-      }
-    }
-
-    try {
-      // clear & stop webAppContext attributes to avoid memory leaks.
-      webAppContext.clearAttributes();
-      webAppContext.stop();
-    } catch (Exception e) {
-      LOG.error("Error while stopping web app context for webapp "
-          + webAppContext.getDisplayName(), e);
-      exception = addMultiException(exception, e);
-    }
-
-    try {
-      webServer.stop();
-    } catch (Exception e) {
-      LOG.error("Error while stopping web server for webapp "
-          + webAppContext.getDisplayName(), e);
-      exception = addMultiException(exception, e);
-    }
-
-    if (exception != null) {
-      exception.ifExceptionThrow();
-    }
-
-  }
-
-  private MultiException addMultiException(MultiException exception, Exception e) {
-    if(exception == null){
-      exception = new MultiException();
-    }
-    exception.add(e);
-    return exception;
-  }
-
-  public void join() throws InterruptedException {
-    webServer.join();
-  }
-
-  /**
-   * Test for the availability of the web server
-   * @return true if the web server is started, false otherwise
-   */
-  public boolean isAlive() {
-    return webServer != null && webServer.isStarted();
-  }
-
-  /**
-   * Return the host and port of the HttpServer, if live
-   * @return the classname and any HTTP URL
-   */
-  @Override
-  public String toString() {
-    if (listeners.isEmpty()) {
-      return "Inactive HttpServer";
-    } else {
-      StringBuilder sb = new StringBuilder("HttpServer (")
-        .append(isAlive() ? STATE_DESCRIPTION_ALIVE : STATE_DESCRIPTION_NOT_LIVE).append("), listening at:");
-      for (ListenerInfo li : listeners) {
-        ServerConnector l = li.listener;
-        sb.append(l.getHost()).append(":").append(l.getPort()).append("/,");
-      }
-      return sb.toString();
-    }
-  }
-
-  /**
-   * Checks the user has privileges to access to instrumentation servlets.
-   * <p>
-   * If <code>hadoop.security.instrumentation.requires.admin</code> is set to FALSE
-   * (default value) it always returns TRUE.
-   * </p><p>
-   * If <code>hadoop.security.instrumentation.requires.admin</code> is set to TRUE
-   * it will check that if the current user is in the admin ACLS. If the user is
-   * in the admin ACLs it returns TRUE, otherwise it returns FALSE.
-   * </p>
-   *
-   * @param servletContext the servlet context.
-   * @param request the servlet request.
-   * @param response the servlet response.
-   * @return TRUE/FALSE based on the logic decribed above.
-   */
-  public static boolean isInstrumentationAccessAllowed(
-    ServletContext servletContext, HttpServletRequest request,
-    HttpServletResponse response) throws IOException {
-    Configuration conf =
-      (Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE);
-
-    boolean access = true;
-    boolean adminAccess = conf.getBoolean(
-      CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN,
-      false);
-    if (adminAccess) {
-      access = hasAdministratorAccess(servletContext, request, response);
-    }
-    return access;
-  }
-
-  /**
-   * Does the user sending the HttpServletRequest has the administrator ACLs? If
-   * it isn't the case, response will be modified to send an error to the user.
-   *
-   * @param servletContext
-   * @param request
-   * @param response used to send the error response if user does not have admin access.
-   * @return true if admin-authorized, false otherwise
-   * @throws IOException
-   */
-  public static boolean hasAdministratorAccess(
-      ServletContext servletContext, HttpServletRequest request,
-      HttpServletResponse response) throws IOException {
-    Configuration conf =
-        (Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE);
-    // If there is no authorization, anybody has administrator access.
-    if (!conf.getBoolean(
-        CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false)) {
-      return true;
-    }
-
-    String remoteUser = request.getRemoteUser();
-    if (remoteUser == null) {
-      response.sendError(HttpServletResponse.SC_UNAUTHORIZED,
-                         "Unauthenticated users are not " +
-                         "authorized to access this page.");
-      return false;
-    }
-
-    if (servletContext.getAttribute(ADMINS_ACL) != null &&
-        !userHasAdministratorAccess(servletContext, remoteUser)) {
-      response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "User "
-          + remoteUser + " is unauthorized to access this page.");
-      return false;
-    }
-
-    return true;
-  }
-
-  /**
-   * Get the admin ACLs from the given ServletContext and check if the given
-   * user is in the ACL.
-   *
-   * @param servletContext the context containing the admin ACL.
-   * @param remoteUser the remote user to check for.
-   * @return true if the user is present in the ACL, false if no ACL is set or
-   *         the user is not present
-   */
-  public static boolean userHasAdministratorAccess(ServletContext servletContext,
-      String remoteUser) {
-    AccessControlList adminsAcl = (AccessControlList) servletContext
-        .getAttribute(ADMINS_ACL);
-    UserGroupInformation remoteUserUGI =
-        UserGroupInformation.createRemoteUser(remoteUser);
-    return adminsAcl != null && adminsAcl.isUserAllowed(remoteUserUGI);
-  }
-
-  /**
-   * A very simple servlet to serve up a text representation of the current
-   * stack traces. It both returns the stacks to the caller and logs them.
-   * Currently the stack traces are done sequentially rather than exactly the
-   * same data.
-   */
-  public static class StackServlet extends HttpServlet {
-    private static final long serialVersionUID = -6284183679759467039L;
-
-    @Override
-    public void doGet(HttpServletRequest request, HttpServletResponse response)
-      throws ServletException, IOException {
-      if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(),
-                                                     request, response)) {
-        return;
-      }
-      response.setContentType("text/plain; charset=UTF-8");
-      try (PrintStream out = new PrintStream(
-        response.getOutputStream(), false, "UTF-8")) {
-        Threads.printThreadInfo(out, "");
-        out.flush();
-      }
-      ReflectionUtils.logThreadInfo(LOG, "jsp requested", 1);
-    }
-  }
-
-  /**
-   * A Servlet input filter that quotes all HTML active characters in the
-   * parameter names and values. The goal is to quote the characters to make
-   * all of the servlets resistant to cross-site scripting attacks.
-   */
-  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
-  public static class QuotingInputFilter implements Filter {
-    private FilterConfig config;
-
-    public static class RequestQuoter extends HttpServletRequestWrapper {
-      private final HttpServletRequest rawRequest;
-      public RequestQuoter(HttpServletRequest rawRequest) {
-        super(rawRequest);
-        this.rawRequest = rawRequest;
-      }
-
-      /**
-       * Return the set of parameter names, quoting each name.
-       */
-      @Override
-      public Enumeration<String> getParameterNames() {
-        return new Enumeration<String>() {
-          private Enumeration<String> rawIterator =
-            rawRequest.getParameterNames();
-          @Override
-          public boolean hasMoreElements() {
-            return rawIterator.hasMoreElements();
-          }
-
-          @Override
-          public String nextElement() {
-            return HtmlQuoting.quoteHtmlChars(rawIterator.nextElement());
-          }
-        };
-      }
-
-      /**
-       * Unquote the name and quote the value.
-       */
-      @Override
-      public String getParameter(String name) {
-        return HtmlQuoting.quoteHtmlChars(rawRequest.getParameter
-                                     (HtmlQuoting.unquoteHtmlChars(name)));
-      }
-
-      @Override
-      public String[] getParameterValues(String name) {
-        String unquoteName = HtmlQuoting.unquoteHtmlChars(name);
-        String[] unquoteValue = rawRequest.getParameterValues(unquoteName);
-        if (unquoteValue == null) {
-          return null;
-        }
-        String[] result = new String[unquoteValue.length];
-        for(int i=0; i < result.length; ++i) {
-          result[i] = HtmlQuoting.quoteHtmlChars(unquoteValue[i]);
-        }
-        return result;
-      }
-
-      @Override
-      public Map<String, String[]> getParameterMap() {
-        Map<String, String[]> result = new HashMap<>();
-        Map<String, String[]> raw = rawRequest.getParameterMap();
-        for (Map.Entry<String,String[]> item: raw.entrySet()) {
-          String[] rawValue = item.getValue();
-          String[] cookedValue = new String[rawValue.length];
-          for(int i=0; i< rawValue.length; ++i) {
-            cookedValue[i] = HtmlQuoting.quoteHtmlChars(rawValue[i]);
-          }
-          result.put(HtmlQuoting.quoteHtmlChars(item.getKey()), cookedValue);
-        }
-        return result;
-      }
-
-      /**
-       * Quote the url so that users specifying the HOST HTTP header
-       * can't inject attacks.
-       */
-      @Override
-      public StringBuffer getRequestURL(){
-        String url = rawRequest.getRequestURL().toString();
-        return new StringBuffer(HtmlQuoting.quoteHtmlChars(url));
-      }
-
-      /**
-       * Quote the server name so that users specifying the HOST HTTP header
-       * can't inject attacks.
-       */
-      @Override
-      public String getServerName() {
-        return HtmlQuoting.quoteHtmlChars(rawRequest.getServerName());
-      }
-    }
-
-    @Override
-    public void init(FilterConfig config) throws ServletException {
-      this.config = config;
-    }
-
-    @Override
-    public void destroy() {
-    }
-
-    @Override
-    public void doFilter(ServletRequest request,
-                         ServletResponse response,
-                         FilterChain chain
-                         ) throws IOException, ServletException {
-      HttpServletRequestWrapper quoted =
-        new RequestQuoter((HttpServletRequest) request);
-      HttpServletResponse httpResponse = (HttpServletResponse) response;
-
-      String mime = inferMimeType(request);
-      if (mime == null) {
-        httpResponse.setContentType("text/plain; charset=utf-8");
-      } else if (mime.startsWith("text/html")) {
-        // HTML with unspecified encoding, we want to
-        // force HTML with utf-8 encoding
-        // This is to avoid the following security issue:
-        // http://openmya.hacker.jp/hasegawa/security/utf7cs.html
-        httpResponse.setContentType("text/html; charset=utf-8");
-      } else if (mime.startsWith("application/xml")) {
-        httpResponse.setContentType("text/xml; charset=utf-8");
-      }
-      chain.doFilter(quoted, httpResponse);
-    }
-
-    /**
-     * Infer the mime type for the response based on the extension of the request
-     * URI. Returns null if unknown.
-     */
-    private String inferMimeType(ServletRequest request) {
-      String path = ((HttpServletRequest)request).getRequestURI();
-      ServletContext context = config.getServletContext();
-      return context.getMimeType(path);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServerUtil.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServerUtil.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServerUtil.java
deleted file mode 100644
index 0b33fd1..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServerUtil.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import org.eclipse.jetty.security.ConstraintSecurityHandler;
-import org.eclipse.jetty.util.security.Constraint;
-import org.eclipse.jetty.security.ConstraintMapping;
-import org.eclipse.jetty.servlet.ServletContextHandler;
-
-/**
- * HttpServer utility.
- */
-public class HttpServerUtil {
-  /**
-   * Add constraints to a Jetty Context to disallow undesirable Http methods.
-   * @param ctxHandler The context to modify
-   */
-  public static void constrainHttpMethods(ServletContextHandler ctxHandler) {
-    Constraint c = new Constraint();
-    c.setAuthenticate(true);
-
-    ConstraintMapping cmt = new ConstraintMapping();
-    cmt.setConstraint(c);
-    cmt.setMethod("TRACE");
-    cmt.setPathSpec("/*");
-
-    ConstraintMapping cmo = new ConstraintMapping();
-    cmo.setConstraint(c);
-    cmo.setMethod("OPTIONS");
-    cmo.setPathSpec("/*");
-
-    ConstraintSecurityHandler securityHandler = new ConstraintSecurityHandler();
-    securityHandler.setConstraintMappings(new ConstraintMapping[]{ cmt, cmo });
-
-    ctxHandler.setSecurityHandler(securityHandler);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java
deleted file mode 100644
index 5fd6514..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/**
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.http;
-
-import java.io.IOException;
-import java.net.URI;
-
-import javax.servlet.http.HttpServlet;
-
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.conf.Configuration;
-
-/**
- * Create a Jetty embedded server to answer http requests. The primary goal
- * is to serve up status information for the server.
- * There are three contexts:
- *   "/stacks/" -&gt; points to stack trace
- *   "/static/" -&gt; points to common static files (src/hbase-webapps/static)
- *   "/" -&gt; the jsp server code from (src/hbase-webapps/&lt;name&gt;)
- */
-@InterfaceAudience.Private
-public class InfoServer {
-
-  private static final String HBASE_APP_DIR = "hbase-webapps";
-  private final org.apache.hadoop.hbase.http.HttpServer httpServer;
-
-  /**
-   * Create a status server on the given port.
-   * The jsp scripts are taken from src/hbase-webapps/<code>name</code>.
-   * @param name The name of the server
-   * @param bindAddress address to bind to
-   * @param port The port to use on the server
-   * @param findPort whether the server should start at the given port and
-   * increment by 1 until it finds a free port.
-   * @throws IOException e
-   */
-  public InfoServer(String name, String bindAddress, int port, boolean findPort,
-      final Configuration c)
-  throws IOException {
-    HttpConfig httpConfig = new HttpConfig(c);
-    HttpServer.Builder builder =
-      new org.apache.hadoop.hbase.http.HttpServer.Builder();
-
-      builder.setName(name).addEndpoint(URI.create(httpConfig.getSchemePrefix() +
-        bindAddress + ":" +
-        port)).setAppDir(HBASE_APP_DIR).setFindPort(findPort).setConf(c);
-      String logDir = System.getProperty("hbase.log.dir");
-      if (logDir != null) {
-        builder.setLogDir(logDir);
-      }
-    if (httpConfig.isSecure()) {
-    builder.keyPassword(HBaseConfiguration.getPassword(c, "ssl.server.keystore.keypassword", null))
-      .keyStore(c.get("ssl.server.keystore.location"),
-        HBaseConfiguration.getPassword(c,"ssl.server.keystore.password", null),
-        c.get("ssl.server.keystore.type", "jks"))
-      .trustStore(c.get("ssl.server.truststore.location"),
-        HBaseConfiguration.getPassword(c, "ssl.server.truststore.password", null),
-        c.get("ssl.server.truststore.type", "jks"));
-    }
-    // Enable SPNEGO authentication
-    if ("kerberos".equalsIgnoreCase(c.get(HttpServer.HTTP_UI_AUTHENTICATION, null))) {
-      builder.setUsernameConfKey(HttpServer.HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY)
-        .setKeytabConfKey(HttpServer.HTTP_SPNEGO_AUTHENTICATION_KEYTAB_KEY)
-        .setKerberosNameRulesKey(HttpServer.HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_KEY)
-        .setSignatureSecretFileKey(
-            HttpServer.HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_KEY)
-        .setSecurityEnabled(true);
-    }
-    this.httpServer = builder.build();
-  }
-
-  public void addServlet(String name, String pathSpec,
-          Class<? extends HttpServlet> clazz) {
-      this.httpServer.addServlet(name, pathSpec, clazz);
-  }
-
-  public void setAttribute(String name, Object value) {
-    this.httpServer.setAttribute(name, value);
-  }
-
-  public void start() throws IOException {
-    this.httpServer.start();
-  }
-
-  @Deprecated
-  public int getPort() {
-    return this.httpServer.getPort();
-  }
-
-  public void stop() throws Exception {
-    this.httpServer.stop();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/NoCacheFilter.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/NoCacheFilter.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/NoCacheFilter.java
deleted file mode 100644
index a1daf15..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/NoCacheFilter.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletResponse;
-import java.io.IOException;
-
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-
-@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
-public class NoCacheFilter implements Filter {
-
-  @Override
-  public void init(FilterConfig filterConfig) throws ServletException {
-  }
-
-  @Override
-  public void doFilter(ServletRequest req, ServletResponse res,
-                       FilterChain chain)
-    throws IOException, ServletException {
-    HttpServletResponse httpRes = (HttpServletResponse) res;
-    httpRes.setHeader("Cache-Control", "no-cache");
-    long now = System.currentTimeMillis();
-    httpRes.addDateHeader("Expires", now);
-    httpRes.addDateHeader("Date", now);
-    httpRes.addHeader("Pragma", "no-cache");
-    chain.doFilter(req, res);
-  }
-
-  @Override
-  public void destroy() {
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ServerConfigurationKeys.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ServerConfigurationKeys.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ServerConfigurationKeys.java
deleted file mode 100644
index 8f338a7..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ServerConfigurationKeys.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
-
-/**
- * This interface contains constants for configuration keys used
- * in the hbase http server code.
- */
-@InterfaceAudience.Private
-@InterfaceStability.Evolving
-public interface ServerConfigurationKeys {
-
-  /** Enable/Disable ssl for http server */
-  public static final String HBASE_SSL_ENABLED_KEY = "hbase.ssl.enabled";
-
-  public static final boolean HBASE_SSL_ENABLED_DEFAULT = false;
-
-  /** Enable/Disable aliases serving from jetty */
-  public static final String HBASE_JETTY_LOGS_SERVE_ALIASES =
-      "hbase.jetty.logs.serve.aliases";
-
-  public static final boolean DEFAULT_HBASE_JETTY_LOGS_SERVE_ALIASES =
-      true;
-
-  public static final String HBASE_HTTP_STATIC_USER = "hbase.http.staticuser.user";
-
-  public static final String DEFAULT_HBASE_HTTP_STATIC_USER = "dr.stack";
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/conf/ConfServlet.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/conf/ConfServlet.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/conf/ConfServlet.java
deleted file mode 100644
index d9aa7b6..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/conf/ConfServlet.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http.conf;
-
-import java.io.IOException;
-import java.io.Writer;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.http.HttpServer;
-
-/**
- * A servlet to print out the running configuration data.
- */
-@InterfaceAudience.LimitedPrivate({"HBase"})
-@InterfaceStability.Unstable
-public class ConfServlet extends HttpServlet {
-  private static final long serialVersionUID = 1L;
-
-  private static final String FORMAT_JSON = "json";
-  private static final String FORMAT_XML = "xml";
-  private static final String FORMAT_PARAM = "format";
-
-  /**
-   * Return the Configuration of the daemon hosting this servlet.
-   * This is populated when the HttpServer starts.
-   */
-  private Configuration getConfFromContext() {
-    Configuration conf = (Configuration)getServletContext().getAttribute(
-        HttpServer.CONF_CONTEXT_ATTRIBUTE);
-    assert conf != null;
-    return conf;
-  }
-
-  @Override
-  public void doGet(HttpServletRequest request, HttpServletResponse response)
-      throws ServletException, IOException {
-
-    if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(),
-                                                   request, response)) {
-      return;
-    }
-
-    String format = request.getParameter(FORMAT_PARAM);
-    if (null == format) {
-      format = FORMAT_XML;
-    }
-
-    if (FORMAT_XML.equals(format)) {
-      response.setContentType("text/xml; charset=utf-8");
-    } else if (FORMAT_JSON.equals(format)) {
-      response.setContentType("application/json; charset=utf-8");
-    }
-
-    Writer out = response.getWriter();
-    try {
-      writeResponse(getConfFromContext(), out, format);
-    } catch (BadFormatException bfe) {
-      response.sendError(HttpServletResponse.SC_BAD_REQUEST, bfe.getMessage());
-    }
-    out.close();
-  }
-
-  /**
-   * Guts of the servlet - extracted for easy testing.
-   */
-  static void writeResponse(Configuration conf, Writer out, String format)
-    throws IOException, BadFormatException {
-    if (FORMAT_JSON.equals(format)) {
-      Configuration.dumpConfiguration(conf, out);
-    } else if (FORMAT_XML.equals(format)) {
-      conf.writeXml(out);
-    } else {
-      throw new BadFormatException("Bad format: " + format);
-    }
-  }
-
-  public static class BadFormatException extends Exception {
-    private static final long serialVersionUID = 1L;
-
-    public BadFormatException(String msg) {
-      super(msg);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/jmx/JMXJsonServlet.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/jmx/JMXJsonServlet.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/jmx/JMXJsonServlet.java
deleted file mode 100644
index 2e43be2..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/jmx/JMXJsonServlet.java
+++ /dev/null
@@ -1,240 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.http.jmx;
-
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.lang.management.ManagementFactory;
-
-import javax.management.MBeanServer;
-import javax.management.MalformedObjectNameException;
-import javax.management.ObjectName;
-import javax.management.openmbean.CompositeData;
-import javax.management.openmbean.TabularData;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.http.HttpServer;
-import org.apache.hadoop.hbase.util.JSONBean;
-
-/*
- * This servlet is based off of the JMXProxyServlet from Tomcat 7.0.14. It has
- * been rewritten to be read only and to output in a JSON format so it is not
- * really that close to the original.
- */
-/**
- * Provides Read only web access to JMX.
- * <p>
- * This servlet generally will be placed under the /jmx URL for each
- * HttpServer.  It provides read only
- * access to JMX metrics.  The optional <code>qry</code> parameter
- * may be used to query only a subset of the JMX Beans.  This query
- * functionality is provided through the
- * {@link MBeanServer#queryNames(ObjectName, javax.management.QueryExp)}
- * method.
- * </p>
- * <p>
- * For example <code>http://.../jmx?qry=Hadoop:*</code> will return
- * all hadoop metrics exposed through JMX.
- * </p>
- * <p>
- * The optional <code>get</code> parameter is used to query an specific
- * attribute of a JMX bean.  The format of the URL is
- * <code>http://.../jmx?get=MXBeanName::AttributeName</code>
- * </p>
- * <p>
- * For example
- * <code>
- * http://../jmx?get=Hadoop:service=NameNode,name=NameNodeInfo::ClusterId
- * </code> will return the cluster id of the namenode mxbean.
- * </p>
- * <p>
- * If the <code>qry</code> or the <code>get</code> parameter is not formatted
- * correctly then a 400 BAD REQUEST http response code will be returned.
- * </p>
- * <p>
- * If a resouce such as a mbean or attribute can not be found,
- * a 404 SC_NOT_FOUND http response code will be returned.
- * </p>
- * <p>
- * The return format is JSON and in the form
- * </p>
- *  <pre><code>
- *  {
- *    "beans" : [
- *      {
- *        "name":"bean-name"
- *        ...
- *      }
- *    ]
- *  }
- *  </code></pre>
- *  <p>
- *  The servlet attempts to convert the the JMXBeans into JSON. Each
- *  bean's attributes will be converted to a JSON object member.
- *
- *  If the attribute is a boolean, a number, a string, or an array
- *  it will be converted to the JSON equivalent.
- *
- *  If the value is a {@link CompositeData} then it will be converted
- *  to a JSON object with the keys as the name of the JSON member and
- *  the value is converted following these same rules.
- *
- *  If the value is a {@link TabularData} then it will be converted
- *  to an array of the {@link CompositeData} elements that it contains.
- *
- *  All other objects will be converted to a string and output as such.
- *
- *  The bean's name and modelerType will be returned for all beans.
- *
- *  Optional paramater "callback" should be used to deliver JSONP response.
- * </p>
- *
- */
-public class JMXJsonServlet extends HttpServlet {
-  private static final Log LOG = LogFactory.getLog(JMXJsonServlet.class);
-
-  private static final long serialVersionUID = 1L;
-
-  private static final String CALLBACK_PARAM = "callback";
-  /**
-   * If query string includes 'description', then we will emit bean and attribute descriptions to
-   * output IFF they are not null and IFF the description is not the same as the attribute name:
-   * i.e. specify an URL like so: /jmx?description=true
-   */
-  private static final String INCLUDE_DESCRIPTION = "description";
-
-  /**
-   * MBean server.
-   */
-  protected transient MBeanServer mBeanServer;
-
-  protected transient JSONBean jsonBeanWriter;
-
-  /**
-   * Initialize this servlet.
-   */
-  @Override
-  public void init() throws ServletException {
-    // Retrieve the MBean server
-    mBeanServer = ManagementFactory.getPlatformMBeanServer();
-    this.jsonBeanWriter = new JSONBean();
-  }
-
-  /**
-   * Process a GET request for the specified resource.
-   *
-   * @param request
-   *          The servlet request we are processing
-   * @param response
-   *          The servlet response we are creating
-   */
-  @Override
-  public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
-    try {
-      if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(), request, response)) {
-        return;
-      }
-      String jsonpcb = null;
-      PrintWriter writer = null;
-      JSONBean.Writer beanWriter = null;
-      try {
-        jsonpcb = checkCallbackName(request.getParameter(CALLBACK_PARAM));
-        writer = response.getWriter();
-        beanWriter = this.jsonBeanWriter.open(writer);
-
-        // "callback" parameter implies JSONP outpout
-        if (jsonpcb != null) {
-          response.setContentType("application/javascript; charset=utf8");
-          writer.write(jsonpcb + "(");
-        } else {
-          response.setContentType("application/json; charset=utf8");
-        }
-        // Should we output description on each attribute and bean?
-        String tmpStr = request.getParameter(INCLUDE_DESCRIPTION);
-        boolean description = tmpStr != null && tmpStr.length() > 0;
-
-        // query per mbean attribute
-        String getmethod = request.getParameter("get");
-        if (getmethod != null) {
-          String[] splitStrings = getmethod.split("\\:\\:");
-          if (splitStrings.length != 2) {
-            beanWriter.write("result", "ERROR");
-            beanWriter.write("message", "query format is not as expected.");
-            beanWriter.flush();
-            response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
-            return;
-          }
-          if (beanWriter.write(this.mBeanServer, new ObjectName(splitStrings[0]),
-              splitStrings[1], description) != 0) {
-            beanWriter.flush();
-            response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
-          }
-          return;
-        }
-
-        // query per mbean
-        String qry = request.getParameter("qry");
-        if (qry == null) {
-          qry = "*:*";
-        }
-        if (beanWriter.write(this.mBeanServer, new ObjectName(qry), null, description) != 0) {
-          beanWriter.flush();
-          response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
-        }
-      } finally {
-        if (beanWriter != null) beanWriter.close();
-        if (jsonpcb != null) {
-           writer.write(");");
-        }
-        if (writer != null) {
-          writer.close();
-        }
-      }
-    } catch (IOException e) {
-      LOG.error("Caught an exception while processing JMX request", e);
-      response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
-    } catch (MalformedObjectNameException e) {
-      LOG.error("Caught an exception while processing JMX request", e);
-      response.sendError(HttpServletResponse.SC_BAD_REQUEST);
-    }
-  }
-
-  /**
-   * Verifies that the callback property, if provided, is purely alphanumeric.
-   * This prevents a malicious callback name (that is javascript code) from being
-   * returned by the UI to an unsuspecting user.
-   *
-   * @param callbackName The callback name, can be null.
-   * @return The callback name
-   * @throws IOException If the name is disallowed.
-   */
-  private String checkCallbackName(String callbackName) throws IOException {
-    if (null == callbackName) {
-      return null;
-    }
-    if (callbackName.matches("[A-Za-z0-9_]+")) {
-      return callbackName;
-    }
-    throw new IOException("'callback' must be alphanumeric");
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/jmx/package-info.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/jmx/package-info.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/jmx/package-info.java
deleted file mode 100644
index 21667d7..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/jmx/package-info.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * This package provides access to JMX primarily through the
- * {@link org.apache.hadoop.hbase.http.jmx.JMXJsonServlet} class.
- * <p>
- * Copied from hadoop source code.<br>
- * See https://issues.apache.org/jira/browse/HADOOP-10232 to know why.
- * </p>
- */
-package org.apache.hadoop.hbase.http.jmx;

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/lib/StaticUserWebFilter.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/lib/StaticUserWebFilter.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/lib/StaticUserWebFilter.java
deleted file mode 100644
index a1fa9f0..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/lib/StaticUserWebFilter.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http.lib;
-
-import java.io.IOException;
-import java.security.Principal;
-import java.util.HashMap;
-
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletRequestWrapper;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.http.FilterContainer;
-import org.apache.hadoop.hbase.http.FilterInitializer;
-
-import javax.servlet.Filter;
-
-import static org.apache.hadoop.hbase.http.ServerConfigurationKeys.HBASE_HTTP_STATIC_USER;
-import static org.apache.hadoop.hbase.http.ServerConfigurationKeys.DEFAULT_HBASE_HTTP_STATIC_USER;
-
-/**
- * Provides a servlet filter that pretends to authenticate a fake user (Dr.Who)
- * so that the web UI is usable for a secure cluster without authentication.
- */
-@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
-public class StaticUserWebFilter extends FilterInitializer {
-  static final String DEPRECATED_UGI_KEY = "dfs.web.ugi";
-
-  private static final Log LOG = LogFactory.getLog(StaticUserWebFilter.class);
-
-  static class User implements Principal {
-    private final String name;
-    public User(String name) {
-      this.name = name;
-    }
-    @Override
-    public String getName() {
-      return name;
-    }
-    @Override
-    public int hashCode() {
-      return name.hashCode();
-    }
-    @Override
-    public boolean equals(Object other) {
-      if (other == this) {
-        return true;
-      } else if (other == null || other.getClass() != getClass()) {
-        return false;
-      }
-      return ((User) other).name.equals(name);
-    }
-    @Override
-    public String toString() {
-      return name;
-    }
-  }
-
-  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
-  public static class StaticUserFilter implements Filter {
-    private User user;
-    private String username;
-
-    @Override
-    public void destroy() {
-      // NOTHING
-    }
-
-    @Override
-    public void doFilter(ServletRequest request, ServletResponse response,
-                         FilterChain chain
-                         ) throws IOException, ServletException {
-      HttpServletRequest httpRequest = (HttpServletRequest) request;
-      // if the user is already authenticated, don't override it
-      if (httpRequest.getRemoteUser() != null) {
-        chain.doFilter(request, response);
-      } else {
-        HttpServletRequestWrapper wrapper =
-            new HttpServletRequestWrapper(httpRequest) {
-          @Override
-          public Principal getUserPrincipal() {
-            return user;
-          }
-          @Override
-          public String getRemoteUser() {
-            return username;
-          }
-        };
-        chain.doFilter(wrapper, response);
-      }
-    }
-
-    @Override
-    public void init(FilterConfig conf) throws ServletException {
-      this.username = conf.getInitParameter(HBASE_HTTP_STATIC_USER);
-      this.user = new User(username);
-    }
-
-  }
-
-  @Override
-  public void initFilter(FilterContainer container, Configuration conf) {
-    HashMap<String, String> options = new HashMap<>();
-
-    String username = getUsernameFromConf(conf);
-    options.put(HBASE_HTTP_STATIC_USER, username);
-
-    container.addFilter("static_user_filter",
-                        StaticUserFilter.class.getName(),
-                        options);
-  }
-
-  /**
-   * Retrieve the static username from the configuration.
-   */
-  static String getUsernameFromConf(Configuration conf) {
-    String oldStyleUgi = conf.get(DEPRECATED_UGI_KEY);
-    if (oldStyleUgi != null) {
-      // We can't use the normal configuration deprecation mechanism here
-      // since we need to split out the username from the configured UGI.
-      LOG.warn(DEPRECATED_UGI_KEY + " should not be used. Instead, use " +
-          HBASE_HTTP_STATIC_USER + ".");
-      String[] parts = oldStyleUgi.split(",");
-      return parts[0];
-    } else {
-      return conf.get(HBASE_HTTP_STATIC_USER,
-        DEFAULT_HBASE_HTTP_STATIC_USER);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/lib/package-info.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/lib/package-info.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/lib/package-info.java
deleted file mode 100644
index 7bb9a0f..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/lib/package-info.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/**
- * <p>
- * This package provides user-selectable (via configuration) classes that add
- * functionality to the web UI. They are configured as a list of classes in the
- * configuration parameter <b>hadoop.http.filter.initializers</b>.
- * </p>
- * <ul>
- * <li> <b>StaticUserWebFilter</b> - An authorization plugin that makes all
- * users a static configured user.
- * </ul>
- * <p>
- * Copied from hadoop source code.<br>
- * See https://issues.apache.org/jira/browse/HADOOP-10232 to know why
- * </p>
- */
-@InterfaceAudience.LimitedPrivate({"HBase"})
-@InterfaceStability.Unstable
-package org.apache.hadoop.hbase.http.lib;
-
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;


[06/13] hbase git commit: Revert "HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module"

Posted by bu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
new file mode 100644
index 0000000..e05792b
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
@@ -0,0 +1,617 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.net.HttpURLConnection;
+import java.net.URI;
+import java.net.URL;
+import java.util.Arrays;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.SortedSet;
+import java.util.TreeSet;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.Executor;
+import java.util.concurrent.Executors;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletRequestWrapper;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.hbase.http.HttpServer.QuotingInputFilter.RequestQuoter;
+import org.apache.hadoop.hbase.http.resource.JerseyResource;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.Groups;
+import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.AccessControlList;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.util.ajax.JSON;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.mockito.Mockito;
+
+@Category({MiscTests.class, SmallTests.class})
+public class TestHttpServer extends HttpServerFunctionalTest {
+  private static final Log LOG = LogFactory.getLog(TestHttpServer.class);
+  private static HttpServer server;
+  private static URL baseUrl;
+  // jetty 9.4.x needs this many threads to start, even in the small.
+  static final int MAX_THREADS = 16;
+  
+  @SuppressWarnings("serial")
+  public static class EchoMapServlet extends HttpServlet {
+    @Override
+    public void doGet(HttpServletRequest request, 
+                      HttpServletResponse response
+                      ) throws ServletException, IOException {
+      PrintWriter out = response.getWriter();
+      Map<String, String[]> params = request.getParameterMap();
+      SortedSet<String> keys = new TreeSet<>(params.keySet());
+      for(String key: keys) {
+        out.print(key);
+        out.print(':');
+        String[] values = params.get(key);
+        if (values.length > 0) {
+          out.print(values[0]);
+          for(int i=1; i < values.length; ++i) {
+            out.print(',');
+            out.print(values[i]);
+          }
+        }
+        out.print('\n');
+      }
+      out.close();
+    }    
+  }
+
+  @SuppressWarnings("serial")
+  public static class EchoServlet extends HttpServlet {
+    @Override
+    public void doGet(HttpServletRequest request, 
+                      HttpServletResponse response
+                      ) throws ServletException, IOException {
+      PrintWriter out = response.getWriter();
+      SortedSet<String> sortedKeys = new TreeSet<>();
+      Enumeration<String> keys = request.getParameterNames();
+      while(keys.hasMoreElements()) {
+        sortedKeys.add(keys.nextElement());
+      }
+      for(String key: sortedKeys) {
+        out.print(key);
+        out.print(':');
+        out.print(request.getParameter(key));
+        out.print('\n');
+      }
+      out.close();
+    }    
+  }
+
+  @SuppressWarnings("serial")
+  public static class LongHeaderServlet extends HttpServlet {
+    @Override
+    public void doGet(HttpServletRequest request,
+                      HttpServletResponse response
+    ) throws ServletException, IOException {
+      Assert.assertEquals(63 * 1024, request.getHeader("longheader").length());
+      response.setStatus(HttpServletResponse.SC_OK);
+    }
+  }
+
+  @SuppressWarnings("serial")
+  public static class HtmlContentServlet extends HttpServlet {
+    @Override
+    public void doGet(HttpServletRequest request, 
+                      HttpServletResponse response
+                      ) throws ServletException, IOException {
+      response.setContentType("text/html");
+      PrintWriter out = response.getWriter();
+      out.print("hello world");
+      out.close();
+    }
+  }
+
+  @BeforeClass public static void setup() throws Exception {
+    Configuration conf = new Configuration();
+    conf.setInt(HttpServer.HTTP_MAX_THREADS, MAX_THREADS);
+    server = createTestServer(conf);
+    server.addServlet("echo", "/echo", EchoServlet.class);
+    server.addServlet("echomap", "/echomap", EchoMapServlet.class);
+    server.addServlet("htmlcontent", "/htmlcontent", HtmlContentServlet.class);
+    server.addServlet("longheader", "/longheader", LongHeaderServlet.class);
+    server.addJerseyResourcePackage(
+        JerseyResource.class.getPackage().getName(), "/jersey/*");
+    server.start();
+    baseUrl = getServerURL(server);
+    LOG.info("HTTP server started: "+ baseUrl);
+  }
+  
+  @AfterClass public static void cleanup() throws Exception {
+    server.stop();
+  }
+  
+  /** Test the maximum number of threads cannot be exceeded. */
+  @Test public void testMaxThreads() throws Exception {
+    int clientThreads = MAX_THREADS * 10;
+    Executor executor = Executors.newFixedThreadPool(clientThreads);
+    // Run many clients to make server reach its maximum number of threads
+    final CountDownLatch ready = new CountDownLatch(clientThreads);
+    final CountDownLatch start = new CountDownLatch(1);
+    for (int i = 0; i < clientThreads; i++) {
+      executor.execute(new Runnable() {
+        @Override
+        public void run() {
+          ready.countDown();
+          try {
+            start.await();
+            assertEquals("a:b\nc:d\n",
+                         readOutput(new URL(baseUrl, "/echo?a=b&c=d")));
+            int serverThreads = server.webServer.getThreadPool().getThreads();
+            assertTrue("More threads are started than expected, Server Threads count: "
+                    + serverThreads, serverThreads <= MAX_THREADS);
+            System.out.println("Number of threads = " + serverThreads +
+                " which is less or equal than the max = " + MAX_THREADS);
+          } catch (Exception e) {
+            // do nothing
+          }
+        }
+      });
+    }
+    // Start the client threads when they are all ready
+    ready.await();
+    start.countDown();
+  }
+  
+  @Test public void testEcho() throws Exception {
+    assertEquals("a:b\nc:d\n", 
+                 readOutput(new URL(baseUrl, "/echo?a=b&c=d")));
+    assertEquals("a:b\nc&lt;:d\ne:&gt;\n", 
+                 readOutput(new URL(baseUrl, "/echo?a=b&c<=d&e=>")));    
+  }
+  
+  /** Test the echo map servlet that uses getParameterMap. */
+  @Test public void testEchoMap() throws Exception {
+    assertEquals("a:b\nc:d\n", 
+                 readOutput(new URL(baseUrl, "/echomap?a=b&c=d")));
+    assertEquals("a:b,&gt;\nc&lt;:d\n", 
+                 readOutput(new URL(baseUrl, "/echomap?a=b&c<=d&a=>")));
+  }
+
+  /** 
+   *  Test that verifies headers can be up to 64K long. 
+   *  The test adds a 63K header leaving 1K for other headers.
+   *  This is because the header buffer setting is for ALL headers,
+   *  names and values included. */
+  @Test public void testLongHeader() throws Exception {
+    URL url = new URL(baseUrl, "/longheader");
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    StringBuilder sb = new StringBuilder();
+    for (int i = 0 ; i < 63 * 1024; i++) {
+      sb.append("a");
+    }
+    conn.setRequestProperty("longheader", sb.toString());
+    assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
+  }
+
+  @Test
+  public void testContentTypes() throws Exception {
+    // Static CSS files should have text/css
+    URL cssUrl = new URL(baseUrl, "/static/test.css");
+    HttpURLConnection conn = (HttpURLConnection)cssUrl.openConnection();
+    conn.connect();
+    assertEquals(200, conn.getResponseCode());
+    assertEquals("text/css", conn.getContentType());
+
+    // Servlets should have text/plain with proper encoding by default
+    URL servletUrl = new URL(baseUrl, "/echo?a=b");
+    conn = (HttpURLConnection)servletUrl.openConnection();
+    conn.connect();
+    assertEquals(200, conn.getResponseCode());
+    assertEquals("text/plain;charset=utf-8", conn.getContentType());
+
+    // We should ignore parameters for mime types - ie a parameter
+    // ending in .css should not change mime type
+    servletUrl = new URL(baseUrl, "/echo?a=b.css");
+    conn = (HttpURLConnection)servletUrl.openConnection();
+    conn.connect();
+    assertEquals(200, conn.getResponseCode());
+    assertEquals("text/plain;charset=utf-8", conn.getContentType());
+
+    // Servlets that specify text/html should get that content type
+    servletUrl = new URL(baseUrl, "/htmlcontent");
+    conn = (HttpURLConnection)servletUrl.openConnection();
+    conn.connect();
+    assertEquals(200, conn.getResponseCode());
+    assertEquals("text/html;charset=utf-8", conn.getContentType());
+
+    // JSPs should default to text/html with utf8
+    // JSPs do not work from unit tests
+    // servletUrl = new URL(baseUrl, "/testjsp.jsp");
+    // conn = (HttpURLConnection)servletUrl.openConnection();
+    // conn.connect();
+    // assertEquals(200, conn.getResponseCode());
+    // assertEquals("text/html; charset=utf-8", conn.getContentType());
+  }
+
+  /**
+   * Dummy filter that mimics as an authentication filter. Obtains user identity
+   * from the request parameter user.name. Wraps around the request so that
+   * request.getRemoteUser() returns the user identity.
+   * 
+   */
+  public static class DummyServletFilter implements Filter {
+    @Override
+    public void destroy() { }
+
+    @Override
+    public void doFilter(ServletRequest request, ServletResponse response,
+        FilterChain filterChain) throws IOException, ServletException {
+      final String userName = request.getParameter("user.name");
+      ServletRequest requestModified =
+        new HttpServletRequestWrapper((HttpServletRequest) request) {
+        @Override
+        public String getRemoteUser() {
+          return userName;
+        }
+      };
+      filterChain.doFilter(requestModified, response);
+    }
+
+    @Override
+    public void init(FilterConfig arg0) throws ServletException { }
+  }
+
+  /**
+   * FilterInitializer that initialized the DummyFilter.
+   *
+   */
+  public static class DummyFilterInitializer extends FilterInitializer {
+    public DummyFilterInitializer() {
+    }
+
+    @Override
+    public void initFilter(FilterContainer container, Configuration conf) {
+      container.addFilter("DummyFilter", DummyServletFilter.class.getName(), null);
+    }
+  }
+
+  /**
+   * Access a URL and get the corresponding return Http status code. The URL
+   * will be accessed as the passed user, by sending user.name request
+   * parameter.
+   * 
+   * @param urlstring
+   * @param userName
+   * @return
+   * @throws IOException
+   */
+  static int getHttpStatusCode(String urlstring, String userName)
+      throws IOException {
+    URL url = new URL(urlstring + "?user.name=" + userName);
+    System.out.println("Accessing " + url + " as user " + userName);
+    HttpURLConnection connection = (HttpURLConnection)url.openConnection();
+    connection.connect();
+    return connection.getResponseCode();
+  }
+
+  /**
+   * Custom user->group mapping service.
+   */
+  public static class MyGroupsProvider extends ShellBasedUnixGroupsMapping {
+    static Map<String, List<String>> mapping = new HashMap<>();
+
+    static void clearMapping() {
+      mapping.clear();
+    }
+
+    @Override
+    public List<String> getGroups(String user) throws IOException {
+      return mapping.get(user);
+    }
+  }
+
+  /**
+   * Verify the access for /logs, /stacks, /conf, /logLevel and /metrics
+   * servlets, when authentication filters are set, but authorization is not
+   * enabled.
+   * @throws Exception 
+   */
+  @Test
+  @Ignore
+  public void testDisabledAuthorizationOfDefaultServlets() throws Exception {
+
+    Configuration conf = new Configuration();
+
+    // Authorization is disabled by default
+    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
+        DummyFilterInitializer.class.getName());
+    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
+        MyGroupsProvider.class.getName());
+    Groups.getUserToGroupsMappingService(conf);
+    MyGroupsProvider.clearMapping();
+    MyGroupsProvider.mapping.put("userA", Arrays.asList("groupA"));
+    MyGroupsProvider.mapping.put("userB", Arrays.asList("groupB"));
+
+    HttpServer myServer = new HttpServer.Builder().setName("test")
+        .addEndpoint(new URI("http://localhost:0")).setFindPort(true).build();
+    myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
+    myServer.start();
+    String serverURL = "http://" + NetUtils.getHostPortString(myServer.getConnectorAddress(0)) + "/";
+    for (String servlet : new String[] { "conf", "logs", "stacks",
+        "logLevel", "metrics" }) {
+      for (String user : new String[] { "userA", "userB" }) {
+        assertEquals(HttpURLConnection.HTTP_OK, getHttpStatusCode(serverURL
+            + servlet, user));
+      }
+    }
+    myServer.stop();
+  }
+
+  /**
+   * Verify the administrator access for /logs, /stacks, /conf, /logLevel and
+   * /metrics servlets.
+   * 
+   * @throws Exception
+   */
+  @Test
+  @Ignore
+  public void testAuthorizationOfDefaultServlets() throws Exception {
+    Configuration conf = new Configuration();
+    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
+        true);
+    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN,
+        true);
+    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
+        DummyFilterInitializer.class.getName());
+
+    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
+        MyGroupsProvider.class.getName());
+    Groups.getUserToGroupsMappingService(conf);
+    MyGroupsProvider.clearMapping();
+    MyGroupsProvider.mapping.put("userA", Arrays.asList("groupA"));
+    MyGroupsProvider.mapping.put("userB", Arrays.asList("groupB"));
+    MyGroupsProvider.mapping.put("userC", Arrays.asList("groupC"));
+    MyGroupsProvider.mapping.put("userD", Arrays.asList("groupD"));
+    MyGroupsProvider.mapping.put("userE", Arrays.asList("groupE"));
+
+    HttpServer myServer = new HttpServer.Builder().setName("test")
+        .addEndpoint(new URI("http://localhost:0")).setFindPort(true).setConf(conf)
+        .setACL(new AccessControlList("userA,userB groupC,groupD")).build();
+    myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
+    myServer.start();
+
+    String serverURL = "http://"
+        + NetUtils.getHostPortString(myServer.getConnectorAddress(0)) + "/";
+    for (String servlet : new String[] { "conf", "logs", "stacks",
+        "logLevel", "metrics" }) {
+      for (String user : new String[] { "userA", "userB", "userC", "userD" }) {
+        assertEquals(HttpURLConnection.HTTP_OK, getHttpStatusCode(serverURL
+            + servlet, user));
+      }
+      assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, getHttpStatusCode(
+          serverURL + servlet, "userE"));
+    }
+    myServer.stop();
+  }
+  
+  @Test
+  public void testRequestQuoterWithNull() throws Exception {
+    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+    Mockito.doReturn(null).when(request).getParameterValues("dummy");
+    RequestQuoter requestQuoter = new RequestQuoter(request);
+    String[] parameterValues = requestQuoter.getParameterValues("dummy");
+    Assert.assertEquals("It should return null "
+        + "when there are no values for the parameter", null, parameterValues);
+  }
+
+  @Test
+  public void testRequestQuoterWithNotNull() throws Exception {
+    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+    String[] values = new String[] { "abc", "def" };
+    Mockito.doReturn(values).when(request).getParameterValues("dummy");
+    RequestQuoter requestQuoter = new RequestQuoter(request);
+    String[] parameterValues = requestQuoter.getParameterValues("dummy");
+    Assert.assertTrue("It should return Parameter Values", Arrays.equals(
+        values, parameterValues));
+  }
+
+  @SuppressWarnings("unchecked")
+  private static Map<String, Object> parse(String jsonString) {
+    return (Map<String, Object>)JSON.parse(jsonString);
+  }
+
+  @Test public void testJersey() throws Exception {
+    LOG.info("BEGIN testJersey()");
+    final String js = readOutput(new URL(baseUrl, "/jersey/foo?op=bar"));
+    final Map<String, Object> m = parse(js);
+    LOG.info("m=" + m);
+    assertEquals("foo", m.get(JerseyResource.PATH));
+    assertEquals("bar", m.get(JerseyResource.OP));
+    LOG.info("END testJersey()");
+  }
+
+  @Test
+  public void testHasAdministratorAccess() throws Exception {
+    Configuration conf = new Configuration();
+    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false);
+    ServletContext context = Mockito.mock(ServletContext.class);
+    Mockito.when(context.getAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE)).thenReturn(conf);
+    Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(null);
+    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+    Mockito.when(request.getRemoteUser()).thenReturn(null);
+    HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
+
+    //authorization OFF
+    Assert.assertTrue(HttpServer.hasAdministratorAccess(context, request, response));
+
+    //authorization ON & user NULL
+    response = Mockito.mock(HttpServletResponse.class);
+    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true);
+    Assert.assertFalse(HttpServer.hasAdministratorAccess(context, request, response));
+    Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
+
+    //authorization ON & user NOT NULL & ACLs NULL
+    response = Mockito.mock(HttpServletResponse.class);
+    Mockito.when(request.getRemoteUser()).thenReturn("foo");
+    Assert.assertTrue(HttpServer.hasAdministratorAccess(context, request, response));
+
+    //authorization ON & user NOT NULL & ACLs NOT NULL & user not in ACLs
+    response = Mockito.mock(HttpServletResponse.class);
+    AccessControlList acls = Mockito.mock(AccessControlList.class);
+    Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(false);
+    Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls);
+    Assert.assertFalse(HttpServer.hasAdministratorAccess(context, request, response));
+    Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
+
+    //authorization ON & user NOT NULL & ACLs NOT NULL & user in in ACLs
+    response = Mockito.mock(HttpServletResponse.class);
+    Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(true);
+    Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls);
+    Assert.assertTrue(HttpServer.hasAdministratorAccess(context, request, response));
+
+  }
+
+  @Test
+  public void testRequiresAuthorizationAccess() throws Exception {
+    Configuration conf = new Configuration();
+    ServletContext context = Mockito.mock(ServletContext.class);
+    Mockito.when(context.getAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE)).thenReturn(conf);
+    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+    HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
+
+    //requires admin access to instrumentation, FALSE by default
+    Assert.assertTrue(HttpServer.isInstrumentationAccessAllowed(context, request, response));
+
+    //requires admin access to instrumentation, TRUE
+    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN, true);
+    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true);
+    AccessControlList acls = Mockito.mock(AccessControlList.class);
+    Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(false);
+    Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls);
+    Assert.assertFalse(HttpServer.isInstrumentationAccessAllowed(context, request, response));
+  }
+
+  @Test public void testBindAddress() throws Exception {
+    checkBindAddress("localhost", 0, false).stop();
+    // hang onto this one for a bit more testing
+    HttpServer myServer = checkBindAddress("localhost", 0, false);
+    HttpServer myServer2 = null;
+    try { 
+      int port = myServer.getConnectorAddress(0).getPort();
+      // it's already in use, true = expect a higher port
+      myServer2 = checkBindAddress("localhost", port, true);
+      // try to reuse the port
+      port = myServer2.getConnectorAddress(0).getPort();
+      myServer2.stop();
+      assertNull(myServer2.getConnectorAddress(0)); // not bound
+      myServer2.openListeners();
+      assertEquals(port, myServer2.getConnectorAddress(0).getPort()); // expect same port
+    } finally {
+      myServer.stop();
+      if (myServer2 != null) {
+        myServer2.stop();
+      }
+    }
+  }
+  
+  private HttpServer checkBindAddress(String host, int port, boolean findPort)
+      throws Exception {
+    HttpServer server = createServer(host, port);
+    try {
+      // not bound, ephemeral should return requested port (0 for ephemeral)
+      ServerConnector listener = server.getServerConnectors().get(0);
+
+      assertEquals(port, listener.getPort());
+      // verify hostname is what was given
+      server.openListeners();
+      assertEquals(host, server.getConnectorAddress(0).getHostName());
+
+      int boundPort = server.getConnectorAddress(0).getPort();
+      if (port == 0) {
+        assertTrue(boundPort != 0); // ephemeral should now return bound port
+      } else if (findPort) {
+        assertTrue(boundPort > port);
+        // allow a little wiggle room to prevent random test failures if
+        // some consecutive ports are already in use
+        assertTrue(boundPort - port < 8);
+      }
+    } catch (Exception e) {
+      server.stop();
+      throw e;
+    }
+    return server;
+  }
+
+  @Test
+  public void testXFrameHeaderSameOrigin() throws Exception {
+    Configuration conf = new Configuration();
+    conf.set("hbase.http.filter.xframeoptions.mode", "SAMEORIGIN");
+
+    HttpServer myServer = new HttpServer.Builder().setName("test")
+            .addEndpoint(new URI("http://localhost:0"))
+            .setFindPort(true).setConf(conf).build();
+    myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
+    myServer.addServlet("echo", "/echo", EchoServlet.class);
+    myServer.start();
+
+    String serverURL = "http://"
+            + NetUtils.getHostPortString(myServer.getConnectorAddress(0));
+    URL url = new URL(new URL(serverURL), "/echo?a=b&c=d");
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
+    assertEquals("SAMEORIGIN", conn.getHeaderField("X-Frame-Options"));
+    myServer.stop();
+  }
+
+
+
+  @Test
+  public void testNoCacheHeader() throws Exception {
+    URL url = new URL(baseUrl, "/echo?a=b&c=d");
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
+    assertEquals("no-cache", conn.getHeaderField("Cache-Control"));
+    assertEquals("no-cache", conn.getHeaderField("Pragma"));
+    assertNotNull(conn.getHeaderField("Expires"));
+    assertNotNull(conn.getHeaderField("Date"));
+    assertEquals(conn.getHeaderField("Expires"), conn.getHeaderField("Date"));
+    assertEquals("DENY", conn.getHeaderField("X-Frame-Options"));
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java
new file mode 100644
index 0000000..d0f2825
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java
@@ -0,0 +1,135 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({MiscTests.class, SmallTests.class})
+public class TestHttpServerLifecycle extends HttpServerFunctionalTest {
+
+  /**
+   * Check that a server is alive by probing the {@link HttpServer#isAlive()} method
+   * and the text of its toString() description
+   * @param server server
+   */
+  private void assertAlive(HttpServer server) {
+    assertTrue("Server is not alive", server.isAlive());
+    assertToStringContains(server, HttpServer.STATE_DESCRIPTION_ALIVE);
+  }
+
+  private void assertNotLive(HttpServer server) {
+    assertTrue("Server should not be live", !server.isAlive());
+    assertToStringContains(server, HttpServer.STATE_DESCRIPTION_NOT_LIVE);
+  }
+
+  /**
+   * Test that the server is alive once started
+   *
+   * @throws Throwable on failure
+   */
+  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
+  public void testCreatedServerIsNotAlive() throws Throwable {
+    HttpServer server = createTestServer();
+    assertNotLive(server);
+  }
+
+  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
+  public void testStopUnstartedServer() throws Throwable {
+    HttpServer server = createTestServer();
+    stop(server);
+  }
+
+  /**
+   * Test that the server is alive once started
+   *
+   * @throws Throwable on failure
+   */
+  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
+  public void testStartedServerIsAlive() throws Throwable {
+    HttpServer server = null;
+    server = createTestServer();
+    assertNotLive(server);
+    server.start();
+    assertAlive(server);
+    stop(server);
+  }
+
+  /**
+   * Assert that the result of {@link HttpServer#toString()} contains the specific text
+   * @param server server to examine
+   * @param text text to search for
+   */
+  private void assertToStringContains(HttpServer server, String text) {
+    String description = server.toString();
+    assertTrue("Did not find \"" + text + "\" in \"" + description + "\"",
+               description.contains(text));
+  }
+
+  /**
+   * Test that the server is not alive once stopped
+   *
+   * @throws Throwable on failure
+   */
+  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
+  public void testStoppedServerIsNotAlive() throws Throwable {
+    HttpServer server = createAndStartTestServer();
+    assertAlive(server);
+    stop(server);
+    assertNotLive(server);
+  }
+
+  /**
+   * Test that the server is not alive once stopped
+   *
+   * @throws Throwable on failure
+   */
+  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
+  public void testStoppingTwiceServerIsAllowed() throws Throwable {
+    HttpServer server = createAndStartTestServer();
+    assertAlive(server);
+    stop(server);
+    assertNotLive(server);
+    stop(server);
+    assertNotLive(server);
+  }
+
+  /**
+   * Test that the server is alive once started
+   *
+   * @throws Throwable
+   *           on failure
+   */
+  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
+  public void testWepAppContextAfterServerStop() throws Throwable {
+    HttpServer server = null;
+    String key = "test.attribute.key";
+    String value = "test.attribute.value";
+    server = createTestServer();
+    assertNotLive(server);
+    server.start();
+    server.setAttribute(key, value);
+    assertAlive(server);
+    assertEquals(value, server.getAttribute(key));
+    stop(server);
+    assertNull("Server context should have cleared", server.getAttribute(key));
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
new file mode 100644
index 0000000..db394a8
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.Log;
+
+import java.io.FileNotFoundException;
+
+/**
+ * Test webapp loading
+ */
+@Category({MiscTests.class, SmallTests.class})
+public class TestHttpServerWebapps extends HttpServerFunctionalTest {
+  private static final Log log = LogFactory.getLog(TestHttpServerWebapps.class);
+
+  /**
+   * Test that the test server is loadable on the classpath
+   * @throws Throwable if something went wrong
+   */
+  @Test
+  public void testValidServerResource() throws Throwable {
+    HttpServer server = null;
+    try {
+      server = createServer("test");
+    } finally {
+      stop(server);
+    }
+  }
+
+  /**
+   * Test that an invalid webapp triggers an exception
+   * @throws Throwable if something went wrong
+   */
+  @Test
+  public void testMissingServerResource() throws Throwable {
+    try {
+      HttpServer server = createServer("NoSuchWebapp");
+      //should not have got here.
+      //close the server
+      String serverDescription = server.toString();
+      stop(server);
+      fail("Expected an exception, got " + serverDescription);
+    } catch (FileNotFoundException expected) {
+      log.debug("Expected exception " + expected, expected);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
new file mode 100644
index 0000000..3c2de53
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
@@ -0,0 +1,155 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.URL;
+import java.net.URLConnection;
+import java.util.Set;
+import java.util.TreeSet;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.net.NetUtils;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({MiscTests.class, SmallTests.class})
+public class TestPathFilter extends HttpServerFunctionalTest {
+  private static final Log LOG = LogFactory.getLog(HttpServer.class);
+  static final Set<String> RECORDS = new TreeSet<>();
+
+  /** A very simple filter that records accessed uri's */
+  static public class RecordingFilter implements Filter {
+    private FilterConfig filterConfig = null;
+
+    @Override
+    public void init(FilterConfig filterConfig) {
+      this.filterConfig = filterConfig;
+    }
+
+    @Override
+    public void destroy() {
+      this.filterConfig = null;
+    }
+
+    @Override
+    public void doFilter(ServletRequest request, ServletResponse response,
+        FilterChain chain) throws IOException, ServletException {
+      if (filterConfig == null)
+         return;
+
+      String uri = ((HttpServletRequest)request).getRequestURI();
+      LOG.info("filtering " + uri);
+      RECORDS.add(uri);
+      chain.doFilter(request, response);
+    }
+
+    /** Configuration for RecordingFilter */
+    static public class Initializer extends FilterInitializer {
+      public Initializer() {}
+
+      @Override
+      public void initFilter(FilterContainer container, Configuration conf) {
+        container.addFilter("recording", RecordingFilter.class.getName(), null);
+      }
+    }
+  }
+  
+  
+  /** access a url, ignoring some IOException such as the page does not exist */
+  static void access(String urlstring) throws IOException {
+    LOG.warn("access " + urlstring);
+    URL url = new URL(urlstring);
+    
+    URLConnection connection = url.openConnection();
+    connection.connect();
+    
+    try {
+      BufferedReader in = new BufferedReader(new InputStreamReader(
+          connection.getInputStream()));
+      try {
+        for(; in.readLine() != null; );
+      } finally {
+        in.close();
+      }
+    } catch(IOException ioe) {
+      LOG.warn("urlstring=" + urlstring, ioe);
+    }
+  }
+
+  @Test
+  public void testPathSpecFilters() throws Exception {
+    Configuration conf = new Configuration();
+    
+    //start a http server with CountingFilter
+    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
+        RecordingFilter.Initializer.class.getName());
+    String[] pathSpecs = { "/path", "/path/*" };
+    HttpServer http = createTestServer(conf, pathSpecs);
+    http.start();
+
+    final String baseURL = "/path";
+    final String baseSlashURL = "/path/";
+    final String addedURL = "/path/nodes";
+    final String addedSlashURL = "/path/nodes/";
+    final String longURL = "/path/nodes/foo/job";
+    final String rootURL = "/";
+    final String allURL = "/*";
+
+    final String[] filteredUrls = {baseURL, baseSlashURL, addedURL, 
+        addedSlashURL, longURL};
+    final String[] notFilteredUrls = {rootURL, allURL};
+
+    // access the urls and verify our paths specs got added to the 
+    // filters
+    final String prefix = "http://"
+        + NetUtils.getHostPortString(http.getConnectorAddress(0));
+    try {
+      for(int i = 0; i < filteredUrls.length; i++) {
+        access(prefix + filteredUrls[i]);
+      }
+      for(int i = 0; i < notFilteredUrls.length; i++) {
+        access(prefix + notFilteredUrls[i]);
+      }
+    } finally {
+      http.stop();
+    }
+
+    LOG.info("RECORDS = " + RECORDS);
+    
+    //verify records
+    for(int i = 0; i < filteredUrls.length; i++) {
+      assertTrue(RECORDS.remove(filteredUrls[i]));
+    }
+    assertTrue(RECORDS.isEmpty());
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
new file mode 100644
index 0000000..b599350
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
@@ -0,0 +1,124 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.InputStream;
+import java.net.URI;
+import java.net.URL;
+
+import javax.net.ssl.HttpsURLConnection;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.ssl.SSLFactory;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * This testcase issues SSL certificates configures the HttpServer to serve
+ * HTTPS using the created certficates and calls an echo servlet using the
+ * corresponding HTTPS URL.
+ */
+@Category({MiscTests.class, SmallTests.class})
+public class TestSSLHttpServer extends HttpServerFunctionalTest {
+  private static final String BASEDIR = System.getProperty("test.build.dir",
+      "target/test-dir") + "/" + TestSSLHttpServer.class.getSimpleName();
+
+  private static final Log LOG = LogFactory.getLog(TestSSLHttpServer.class);
+  private static Configuration conf;
+  private static HttpServer server;
+  private static URL baseUrl;
+  private static String keystoresDir;
+  private static String sslConfDir;
+  private static SSLFactory clientSslFactory;
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    conf = new Configuration();
+    conf.setInt(HttpServer.HTTP_MAX_THREADS, TestHttpServer.MAX_THREADS);
+
+    File base = new File(BASEDIR);
+    FileUtil.fullyDelete(base);
+    base.mkdirs();
+    keystoresDir = new File(BASEDIR).getAbsolutePath();
+    sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class);
+
+    KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
+    Configuration sslConf = new Configuration(false);
+    sslConf.addResource("ssl-server.xml");
+    sslConf.addResource("ssl-client.xml");
+
+    clientSslFactory = new SSLFactory(SSLFactory.Mode.CLIENT, sslConf);
+    clientSslFactory.init();
+
+    server = new HttpServer.Builder()
+        .setName("test")
+        .addEndpoint(new URI("https://localhost"))
+        .setConf(conf)
+        .keyPassword(HBaseConfiguration.getPassword(sslConf, "ssl.server.keystore.keypassword",
+            null))
+        .keyStore(sslConf.get("ssl.server.keystore.location"),
+            HBaseConfiguration.getPassword(sslConf, "ssl.server.keystore.password", null),
+            sslConf.get("ssl.server.keystore.type", "jks"))
+        .trustStore(sslConf.get("ssl.server.truststore.location"),
+            HBaseConfiguration.getPassword(sslConf, "ssl.server.truststore.password", null),
+            sslConf.get("ssl.server.truststore.type", "jks")).build();
+    server.addServlet("echo", "/echo", TestHttpServer.EchoServlet.class);
+    server.start();
+    baseUrl = new URL("https://"
+        + NetUtils.getHostPortString(server.getConnectorAddress(0)));
+    LOG.info("HTTP server started: " + baseUrl);
+  }
+
+  @AfterClass
+  public static void cleanup() throws Exception {
+    server.stop();
+    FileUtil.fullyDelete(new File(BASEDIR));
+    KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, sslConfDir);
+    clientSslFactory.destroy();
+  }
+
+  @Test
+  public void testEcho() throws Exception {
+    assertEquals("a:b\nc:d\n", readOut(new URL(baseUrl, "/echo?a=b&c=d")));
+    assertEquals("a:b\nc&lt;:d\ne:&gt;\n", readOut(new URL(baseUrl,
+        "/echo?a=b&c<=d&e=>")));
+  }
+
+  private static String readOut(URL url) throws Exception {
+    HttpsURLConnection conn = (HttpsURLConnection) url.openConnection();
+    conn.setSSLSocketFactory(clientSslFactory.createSSLSocketFactory());
+    InputStream in = conn.getInputStream();
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    IOUtils.copyBytes(in, out, 1024);
+    return out.toString();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
new file mode 100644
index 0000000..1d24ec2
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
@@ -0,0 +1,210 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.URL;
+import java.net.URLConnection;
+import java.util.Random;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.GenericTestUtils;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.net.NetUtils;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({MiscTests.class, SmallTests.class})
+public class TestServletFilter extends HttpServerFunctionalTest {
+  private static final Log LOG = LogFactory.getLog(HttpServer.class);
+  static volatile String uri = null; 
+
+  /** A very simple filter which record the uri filtered. */
+  static public class SimpleFilter implements Filter {
+    private FilterConfig filterConfig = null;
+
+    @Override
+    public void init(FilterConfig filterConfig) throws ServletException {
+      this.filterConfig = filterConfig;
+    }
+
+    @Override
+    public void destroy() {
+      this.filterConfig = null;
+    }
+
+    @Override
+    public void doFilter(ServletRequest request, ServletResponse response,
+        FilterChain chain) throws IOException, ServletException {
+      if (filterConfig == null)
+         return;
+
+      uri = ((HttpServletRequest)request).getRequestURI();
+      LOG.info("filtering " + uri);
+      chain.doFilter(request, response);
+    }
+
+    /** Configuration for the filter */
+    static public class Initializer extends FilterInitializer {
+      public Initializer() {}
+
+      @Override
+      public void initFilter(FilterContainer container, Configuration conf) {
+        container.addFilter("simple", SimpleFilter.class.getName(), null);
+      }
+    }
+  }
+  
+  
+  /** access a url, ignoring some IOException such as the page does not exist */
+  static void access(String urlstring) throws IOException {
+    LOG.warn("access " + urlstring);
+    URL url = new URL(urlstring);
+    URLConnection connection = url.openConnection();
+    connection.connect();
+    
+    try {
+      BufferedReader in = new BufferedReader(new InputStreamReader(
+          connection.getInputStream()));
+      try {
+        for(; in.readLine() != null; );
+      } finally {
+        in.close();
+      }
+    } catch(IOException ioe) {
+      LOG.warn("urlstring=" + urlstring, ioe);
+    }
+  }
+
+  @Test
+  @Ignore
+  //From stack
+  // Its a 'foreign' test, one that came in from hadoop when we copy/pasted http
+  // It's second class. Could comment it out if only failing test (as per @nkeywal – sort of)
+  public void testServletFilter() throws Exception {
+    Configuration conf = new Configuration();
+    
+    //start a http server with CountingFilter
+    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
+        SimpleFilter.Initializer.class.getName());
+    HttpServer http = createTestServer(conf);
+    http.start();
+
+    final String fsckURL = "/fsck";
+    final String stacksURL = "/stacks";
+    final String ajspURL = "/a.jsp";
+    final String logURL = "/logs/a.log";
+    final String hadooplogoURL = "/static/hadoop-logo.jpg";
+    
+    final String[] urls = {fsckURL, stacksURL, ajspURL, logURL, hadooplogoURL};
+    final Random ran = new Random();
+    final int[] sequence = new int[50];
+
+    //generate a random sequence and update counts 
+    for(int i = 0; i < sequence.length; i++) {
+      sequence[i] = ran.nextInt(urls.length);
+    }
+
+    //access the urls as the sequence
+    final String prefix = "http://"
+        + NetUtils.getHostPortString(http.getConnectorAddress(0));
+    try {
+      for(int i = 0; i < sequence.length; i++) {
+        access(prefix + urls[sequence[i]]);
+
+        //make sure everything except fsck get filtered
+        if (sequence[i] == 0) {
+          assertEquals(null, uri);
+        } else {
+          assertEquals(urls[sequence[i]], uri);
+          uri = null;
+        }
+      }
+    } finally {
+      http.stop();
+    }
+  }
+  
+  static public class ErrorFilter extends SimpleFilter {
+    @Override
+    public void init(FilterConfig arg0) throws ServletException {
+      throw new ServletException("Throwing the exception from Filter init");
+    }
+
+    /** Configuration for the filter */
+    static public class Initializer extends FilterInitializer {
+      public Initializer() {
+      }
+
+      @Override
+      public void initFilter(FilterContainer container, Configuration conf) {
+        container.addFilter("simple", ErrorFilter.class.getName(), null);
+      }
+    }
+  }
+
+  @Test
+  public void testServletFilterWhenInitThrowsException() throws Exception {
+    Configuration conf = new Configuration();
+    // start a http server with ErrorFilter
+    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
+        ErrorFilter.Initializer.class.getName());
+    HttpServer http = createTestServer(conf);
+    try {
+      http.start();
+      fail("expecting exception");
+    } catch (IOException e) {
+      GenericTestUtils.assertExceptionContains("Problem starting http server", e);
+    }
+  }
+  
+  /**
+   * Similar to the above test case, except that it uses a different API to add the
+   * filter. Regression test for HADOOP-8786.
+   */
+  @Test
+  public void testContextSpecificServletFilterWhenInitThrowsException()
+      throws Exception {
+    Configuration conf = new Configuration();
+    HttpServer http = createTestServer(conf);
+    HttpServer.defineFilter(http.webAppContext,
+        "ErrorFilter", ErrorFilter.class.getName(),
+        null, null);
+    try {
+      http.start();
+      fail("expecting exception");
+    } catch (IOException e) {
+      GenericTestUtils.assertExceptionContains("Unable to initialize WebAppContext", e);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
new file mode 100644
index 0000000..4fad031
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
@@ -0,0 +1,258 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.security.Principal;
+import java.security.PrivilegedExceptionAction;
+import java.util.Set;
+
+import javax.security.auth.Subject;
+import javax.security.auth.kerberos.KerberosTicket;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.http.TestHttpServer.EchoServlet;
+import org.apache.hadoop.hbase.http.resource.JerseyResource;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.security.authentication.util.KerberosName;
+import org.apache.http.HttpHost;
+import org.apache.http.HttpResponse;
+import org.apache.http.auth.AuthSchemeProvider;
+import org.apache.http.auth.AuthScope;
+import org.apache.http.auth.KerberosCredentials;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.config.AuthSchemes;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.protocol.HttpClientContext;
+import org.apache.http.config.Lookup;
+import org.apache.http.config.RegistryBuilder;
+import org.apache.http.entity.ByteArrayEntity;
+import org.apache.http.entity.ContentType;
+import org.apache.http.impl.auth.SPNegoSchemeFactory;
+import org.apache.http.impl.client.BasicCredentialsProvider;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.util.EntityUtils;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.client.JaasKrbUtil;
+import org.apache.kerby.kerberos.kerb.server.SimpleKdcServer;
+import org.ietf.jgss.GSSCredential;
+import org.ietf.jgss.GSSManager;
+import org.ietf.jgss.GSSName;
+import org.ietf.jgss.Oid;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Test class for SPNEGO authentication on the HttpServer. Uses Kerby's MiniKDC and Apache
+ * HttpComponents to verify that a simple Servlet is reachable via SPNEGO and unreachable w/o.
+ */
+@Category({MiscTests.class, SmallTests.class})
+public class TestSpnegoHttpServer extends HttpServerFunctionalTest {
+  private static final Log LOG = LogFactory.getLog(TestSpnegoHttpServer.class);
+  private static final String KDC_SERVER_HOST = "localhost";
+  private static final String CLIENT_PRINCIPAL = "client";
+
+  private static HttpServer server;
+  private static URL baseUrl;
+  private static SimpleKdcServer kdc;
+  private static File infoServerKeytab;
+  private static File clientKeytab;
+
+  @BeforeClass
+  public static void setupServer() throws Exception {
+    final String serverPrincipal = "HTTP/" + KDC_SERVER_HOST;
+    final File target = new File(System.getProperty("user.dir"), "target");
+    assertTrue(target.exists());
+
+    kdc = buildMiniKdc();
+    kdc.start();
+
+    File keytabDir = new File(target, TestSpnegoHttpServer.class.getSimpleName()
+        + "_keytabs");
+    if (keytabDir.exists()) {
+      deleteRecursively(keytabDir);
+    }
+    keytabDir.mkdirs();
+
+    infoServerKeytab = new File(keytabDir, serverPrincipal.replace('/', '_') + ".keytab");
+    clientKeytab = new File(keytabDir, CLIENT_PRINCIPAL + ".keytab");
+
+    setupUser(kdc, clientKeytab, CLIENT_PRINCIPAL);
+    setupUser(kdc, infoServerKeytab, serverPrincipal);
+
+    Configuration conf = buildSpnegoConfiguration(serverPrincipal, infoServerKeytab);
+
+    server = createTestServerWithSecurity(conf);
+    server.addServlet("echo", "/echo", EchoServlet.class);
+    server.addJerseyResourcePackage(JerseyResource.class.getPackage().getName(), "/jersey/*");
+    server.start();
+    baseUrl = getServerURL(server);
+
+    LOG.info("HTTP server started: "+ baseUrl);
+  }
+
+  @AfterClass
+  public static void stopServer() throws Exception {
+    try {
+      if (null != server) {
+        server.stop();
+      }
+    } catch (Exception e) {
+      LOG.info("Failed to stop info server", e);
+    }
+    try {
+      if (null != kdc) {
+        kdc.stop();
+      }
+    } catch (Exception e) {
+      LOG.info("Failed to stop mini KDC", e);
+    }
+  }
+
+  private static void setupUser(SimpleKdcServer kdc, File keytab, String principal)
+      throws KrbException {
+    kdc.createPrincipal(principal);
+    kdc.exportPrincipal(principal, keytab);
+  }
+
+  private static SimpleKdcServer buildMiniKdc() throws Exception {
+    SimpleKdcServer kdc = new SimpleKdcServer();
+
+    final File target = new File(System.getProperty("user.dir"), "target");
+    File kdcDir = new File(target, TestSpnegoHttpServer.class.getSimpleName());
+    if (kdcDir.exists()) {
+      deleteRecursively(kdcDir);
+    }
+    kdcDir.mkdirs();
+    kdc.setWorkDir(kdcDir);
+
+    kdc.setKdcHost(KDC_SERVER_HOST);
+    int kdcPort = getFreePort();
+    kdc.setAllowTcp(true);
+    kdc.setAllowUdp(false);
+    kdc.setKdcTcpPort(kdcPort);
+
+    LOG.info("Starting KDC server at " + KDC_SERVER_HOST + ":" + kdcPort);
+
+    kdc.init();
+
+    return kdc;
+  }
+
+  private static Configuration buildSpnegoConfiguration(String serverPrincipal, File
+      serverKeytab) {
+    Configuration conf = new Configuration();
+    KerberosName.setRules("DEFAULT");
+
+    conf.setInt(HttpServer.HTTP_MAX_THREADS, TestHttpServer.MAX_THREADS);
+
+    // Enable Kerberos (pre-req)
+    conf.set("hbase.security.authentication", "kerberos");
+    conf.set(HttpServer.HTTP_UI_AUTHENTICATION, "kerberos");
+    conf.set(HttpServer.HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY, serverPrincipal);
+    conf.set(HttpServer.HTTP_SPNEGO_AUTHENTICATION_KEYTAB_KEY, serverKeytab.getAbsolutePath());
+
+    return conf;
+  }
+
+  @Test
+  public void testUnauthorizedClientsDisallowed() throws IOException {
+    URL url = new URL(getServerURL(server), "/echo?a=b");
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
+  }
+
+  @Test
+  public void testAllowedClient() throws Exception {
+    // Create the subject for the client
+    final Subject clientSubject = JaasKrbUtil.loginUsingKeytab(CLIENT_PRINCIPAL, clientKeytab);
+    final Set<Principal> clientPrincipals = clientSubject.getPrincipals();
+    // Make sure the subject has a principal
+    assertFalse(clientPrincipals.isEmpty());
+
+    // Get a TGT for the subject (might have many, different encryption types). The first should
+    // be the default encryption type.
+    Set<KerberosTicket> privateCredentials =
+            clientSubject.getPrivateCredentials(KerberosTicket.class);
+    assertFalse(privateCredentials.isEmpty());
+    KerberosTicket tgt = privateCredentials.iterator().next();
+    assertNotNull(tgt);
+
+    // The name of the principal
+    final String principalName = clientPrincipals.iterator().next().getName();
+
+    // Run this code, logged in as the subject (the client)
+    HttpResponse resp = Subject.doAs(clientSubject,
+        new PrivilegedExceptionAction<HttpResponse>() {
+      @Override
+      public HttpResponse run() throws Exception {
+        // Logs in with Kerberos via GSS
+        GSSManager gssManager = GSSManager.getInstance();
+        // jGSS Kerberos login constant
+        Oid oid = new Oid("1.2.840.113554.1.2.2");
+        GSSName gssClient = gssManager.createName(principalName, GSSName.NT_USER_NAME);
+        GSSCredential credential = gssManager.createCredential(gssClient,
+            GSSCredential.DEFAULT_LIFETIME, oid, GSSCredential.INITIATE_ONLY);
+
+        HttpClientContext context = HttpClientContext.create();
+        Lookup<AuthSchemeProvider> authRegistry = RegistryBuilder.<AuthSchemeProvider>create()
+            .register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory(true, true))
+            .build();
+
+        HttpClient client = HttpClients.custom().setDefaultAuthSchemeRegistry(authRegistry).build();
+        BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider();
+        credentialsProvider.setCredentials(AuthScope.ANY, new KerberosCredentials(credential));
+
+        URL url = new URL(getServerURL(server), "/echo?a=b");
+        context.setTargetHost(new HttpHost(url.getHost(), url.getPort()));
+        context.setCredentialsProvider(credentialsProvider);
+        context.setAuthSchemeRegistry(authRegistry);
+
+        HttpGet get = new HttpGet(url.toURI());
+        return client.execute(get, context);
+      }
+    });
+
+    assertNotNull(resp);
+    assertEquals(HttpURLConnection.HTTP_OK, resp.getStatusLine().getStatusCode());
+    assertEquals("a:b", EntityUtils.toString(resp.getEntity()).trim());
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testMissingConfigurationThrowsException() throws Exception {
+    Configuration conf = new Configuration();
+    conf.setInt(HttpServer.HTTP_MAX_THREADS, TestHttpServer.MAX_THREADS);
+    // Enable Kerberos (pre-req)
+    conf.set("hbase.security.authentication", "kerberos");
+    // Intentionally skip keytab and principal
+
+    HttpServer customServer = createTestServerWithSecurity(conf);
+    customServer.addServlet("echo", "/echo", EchoServlet.class);
+    customServer.addJerseyResourcePackage(JerseyResource.class.getPackage().getName(), "/jersey/*");
+    customServer.start();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java
new file mode 100644
index 0000000..e1d9aca
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java
@@ -0,0 +1,116 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http.conf;
+
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.util.Map;
+
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.eclipse.jetty.util.ajax.JSON;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+import org.xml.sax.InputSource;
+
+/**
+ * Basic test case that the ConfServlet can write configuration
+ * to its output in XML and JSON format.
+ */
+@Category({MiscTests.class, SmallTests.class})
+public class TestConfServlet extends TestCase {
+  private static final String TEST_KEY = "testconfservlet.key";
+  private static final String TEST_VAL = "testval";
+
+  private Configuration getTestConf() {
+    Configuration testConf = new Configuration();
+    testConf.set(TEST_KEY, TEST_VAL);
+    return testConf;
+  }
+
+  @Test
+  @SuppressWarnings("unchecked")
+  public void testWriteJson() throws Exception {
+    StringWriter sw = new StringWriter();
+    ConfServlet.writeResponse(getTestConf(), sw, "json");
+    String json = sw.toString();
+    boolean foundSetting = false;
+    Object parsed = JSON.parse(json);
+    Object[] properties = ((Map<String, Object[]>)parsed).get("properties");
+    for (Object o : properties) {
+      Map<String, Object> propertyInfo = (Map<String, Object>)o;
+      String key = (String)propertyInfo.get("key");
+      String val = (String)propertyInfo.get("value");
+      String resource = (String)propertyInfo.get("resource");
+      System.err.println("k: " + key + " v: " + val + " r: " + resource);
+      if (TEST_KEY.equals(key) && TEST_VAL.equals(val)
+          && "programatically".equals(resource)) {
+        foundSetting = true;
+      }
+    }
+    assertTrue(foundSetting);
+  }
+
+  @Test
+  public void testWriteXml() throws Exception {
+    StringWriter sw = new StringWriter();
+    ConfServlet.writeResponse(getTestConf(), sw, "xml");
+    String xml = sw.toString();
+
+    DocumentBuilderFactory docBuilderFactory 
+      = DocumentBuilderFactory.newInstance();
+    DocumentBuilder builder = docBuilderFactory.newDocumentBuilder();
+    Document doc = builder.parse(new InputSource(new StringReader(xml)));
+    NodeList nameNodes = doc.getElementsByTagName("name");
+    boolean foundSetting = false;
+    for (int i = 0; i < nameNodes.getLength(); i++) {
+      Node nameNode = nameNodes.item(i);
+      String key = nameNode.getTextContent();
+      System.err.println("xml key: " + key);
+      if (TEST_KEY.equals(key)) {
+        foundSetting = true;
+        Element propertyElem = (Element)nameNode.getParentNode();
+        String val = propertyElem.getElementsByTagName("value").item(0).getTextContent();
+        assertEquals(TEST_VAL, val);
+      }
+    }
+    assertTrue(foundSetting);
+  }
+
+  @Test
+  public void testBadFormat() throws Exception {
+    StringWriter sw = new StringWriter();
+    try {
+      ConfServlet.writeResponse(getTestConf(), sw, "not a format");
+      fail("writeResponse with bad format didn't throw!");
+    } catch (ConfServlet.BadFormatException bfe) {
+      // expected
+    }
+    assertEquals("", sw.toString());
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java
new file mode 100644
index 0000000..d7e68d5
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java
@@ -0,0 +1,134 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.http.jmx;
+
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.net.URLEncoder;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.http.HttpServer;
+import org.apache.hadoop.hbase.http.HttpServerFunctionalTest;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({MiscTests.class, SmallTests.class})
+public class TestJMXJsonServlet extends HttpServerFunctionalTest {
+  private   static final Log LOG = LogFactory.getLog(TestJMXJsonServlet.class);
+  private static HttpServer server;
+  private static URL baseUrl;
+
+  @BeforeClass public static void setup() throws Exception {
+    // Eclipse doesn't pick this up correctly from the plugin
+    // configuration in the pom.
+    System.setProperty(HttpServerFunctionalTest.TEST_BUILD_WEBAPPS, "target/test-classes/webapps");
+    server = createTestServer();
+    server.start();
+    baseUrl = getServerURL(server);
+  }
+  
+  @AfterClass public static void cleanup() throws Exception {
+    server.stop();
+  }
+  
+  public static void assertReFind(String re, String value) {
+    Pattern p = Pattern.compile(re);
+    Matcher m = p.matcher(value);
+    assertTrue("'"+p+"' does not match "+value, m.find());
+  }
+  
+  @Test public void testQuery() throws Exception {
+    String result = readOutput(new URL(baseUrl, "/jmx?qry=java.lang:type=Runtime"));
+    LOG.info("/jmx?qry=java.lang:type=Runtime RESULT: "+result);
+    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Runtime\"", result);
+    assertReFind("\"modelerType\"", result);
+    
+    result = readOutput(new URL(baseUrl, "/jmx?qry=java.lang:type=Memory"));
+    LOG.info("/jmx?qry=java.lang:type=Memory RESULT: "+result);
+    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
+    assertReFind("\"modelerType\"", result);
+    
+    result = readOutput(new URL(baseUrl, "/jmx"));
+    LOG.info("/jmx RESULT: "+result);
+    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
+    
+    // test to get an attribute of a mbean
+    result = readOutput(new URL(baseUrl, 
+        "/jmx?get=java.lang:type=Memory::HeapMemoryUsage"));
+    LOG.info("/jmx RESULT: "+result);
+    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
+    assertReFind("\"committed\"\\s*:", result);
+    
+    // negative test to get an attribute of a mbean
+    result = readOutput(new URL(baseUrl, 
+        "/jmx?get=java.lang:type=Memory::"));
+    LOG.info("/jmx RESULT: "+result);
+    assertReFind("\"ERROR\"", result);
+
+    // test to get JSONP result
+    result = readOutput(new URL(baseUrl, "/jmx?qry=java.lang:type=Memory&callback=mycallback1"));
+    LOG.info("/jmx?qry=java.lang:type=Memory&callback=mycallback RESULT: "+result);
+    assertReFind("^mycallback1\\(\\{", result);
+    assertReFind("\\}\\);$", result);
+
+    // negative test to get an attribute of a mbean as JSONP
+    result = readOutput(new URL(baseUrl,
+        "/jmx?get=java.lang:type=Memory::&callback=mycallback2"));
+    LOG.info("/jmx RESULT: "+result);
+    assertReFind("^mycallback2\\(\\{", result);
+    assertReFind("\"ERROR\"", result);
+    assertReFind("\\}\\);$", result);
+
+    // test to get an attribute of a mbean as JSONP
+    result = readOutput(new URL(baseUrl,
+        "/jmx?get=java.lang:type=Memory::HeapMemoryUsage&callback=mycallback3"));
+    LOG.info("/jmx RESULT: "+result);
+    assertReFind("^mycallback3\\(\\{", result);
+    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
+    assertReFind("\"committed\"\\s*:", result);
+    assertReFind("\\}\\);$", result);
+
+  }
+
+  @Test
+  public void testDisallowedJSONPCallback() throws Exception {
+    String callback = "function(){alert('bigproblems!')};foo";
+    URL url = new URL(
+        baseUrl, "/jmx?qry=java.lang:type=Memory&callback="+URLEncoder.encode(callback, "UTF-8"));
+    HttpURLConnection cnxn = (HttpURLConnection) url.openConnection();
+    assertEquals(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, cnxn.getResponseCode());
+  }
+
+  @Test
+  public void testUnderscoresInJSONPCallback() throws Exception {
+    String callback = "my_function";
+    URL url = new URL(
+        baseUrl, "/jmx?qry=java.lang:type=Memory&callback="+URLEncoder.encode(callback, "UTF-8"));
+    HttpURLConnection cnxn = (HttpURLConnection) url.openConnection();
+    assertEquals(HttpServletResponse.SC_OK, cnxn.getResponseCode());
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java
new file mode 100644
index 0000000..81bcbd5
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http.lib;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.mock;
+
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletRequestWrapper;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.http.ServerConfigurationKeys;
+import org.apache.hadoop.hbase.http.lib.StaticUserWebFilter.StaticUserFilter;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Mockito;
+
+@Category({MiscTests.class, SmallTests.class})
+public class TestStaticUserWebFilter {
+  private FilterConfig mockConfig(String username) {
+    FilterConfig mock = Mockito.mock(FilterConfig.class);
+    Mockito.doReturn(username).when(mock).getInitParameter(
+            ServerConfigurationKeys.HBASE_HTTP_STATIC_USER);
+    return mock;
+  }
+  
+  @Test
+  public void testFilter() throws Exception {
+    FilterConfig config = mockConfig("myuser");
+    StaticUserFilter suf = new StaticUserFilter();
+    suf.init(config);
+    
+    ArgumentCaptor<HttpServletRequestWrapper> wrapperArg =
+      ArgumentCaptor.forClass(HttpServletRequestWrapper.class);
+
+    FilterChain chain = mock(FilterChain.class);
+    
+    suf.doFilter(mock(HttpServletRequest.class), mock(ServletResponse.class),
+        chain);
+        
+    Mockito.verify(chain).doFilter(wrapperArg.capture(), Mockito.<ServletResponse>anyObject());
+    
+    HttpServletRequestWrapper wrapper = wrapperArg.getValue();
+    assertEquals("myuser", wrapper.getUserPrincipal().getName());
+    assertEquals("myuser", wrapper.getRemoteUser());
+    
+    suf.destroy();
+  }
+  
+  @Test
+  public void testOldStyleConfiguration() {
+    Configuration conf = new Configuration();
+    conf.set("dfs.web.ugi", "joe,group1,group2");
+    assertEquals("joe", StaticUserWebFilter.getUsernameFromConf(conf));
+  }
+
+  @Test
+  public void testConfiguration() {
+    Configuration conf = new Configuration();
+    conf.set(CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER, "dr.stack");
+    assertEquals("dr.stack", StaticUserWebFilter.getUsernameFromConf(conf));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
new file mode 100644
index 0000000..e14e3b4
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
@@ -0,0 +1,92 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.apache.hadoop.hbase.http.log;
+
+import static org.junit.Assert.assertTrue;
+
+import java.io.*;
+import java.net.*;
+
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.http.HttpServer;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.commons.logging.*;
+import org.apache.commons.logging.impl.*;
+import org.apache.log4j.*;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({MiscTests.class, SmallTests.class})
+public class TestLogLevel {
+  static final PrintStream out = System.out;
+
+  @Test (timeout=60000)
+  @SuppressWarnings("deprecation")
+  public void testDynamicLogLevel() throws Exception {
+    String logName = TestLogLevel.class.getName();
+    Log testlog = LogFactory.getLog(logName);
+
+    //only test Log4JLogger
+    if (testlog instanceof Log4JLogger) {
+      Logger log = ((Log4JLogger)testlog).getLogger();
+      log.debug("log.debug1");
+      log.info("log.info1");
+      log.error("log.error1");
+      assertTrue(!Level.ERROR.equals(log.getEffectiveLevel()));
+
+      HttpServer server = null;
+      try {
+        server = new HttpServer.Builder().setName("..")
+            .addEndpoint(new URI("http://localhost:0")).setFindPort(true)
+            .build();
+
+        server.start();
+        String authority = NetUtils.getHostPortString(server
+            .getConnectorAddress(0));
+
+        //servlet
+        URL url =
+            new URL("http://" + authority + "/logLevel?log=" + logName + "&level=" + Level.ERROR);
+        out.println("*** Connecting to " + url);
+        try (BufferedReader in = new BufferedReader(new InputStreamReader(url.openStream()))) {
+          for(String line; (line = in.readLine()) != null; out.println(line));
+        }
+        log.debug("log.debug2");
+        log.info("log.info2");
+        log.error("log.error2");
+        assertTrue(Level.ERROR.equals(log.getEffectiveLevel()));
+
+        //command line
+        String[] args = {"-setlevel", authority, logName, Level.DEBUG.toString()};
+        LogLevel.main(args);
+        log.debug("log.debug3");
+        log.info("log.info3");
+        log.error("log.error3");
+        assertTrue(Level.DEBUG.equals(log.getEffectiveLevel()));
+      } finally {
+        if (server != null) {
+          server.stop();
+        }
+      }
+    }
+    else {
+      out.println(testlog.getClass() + " not tested.");
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
new file mode 100644
index 0000000..bf0e609
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http.resource;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.TreeMap;
+
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.eclipse.jetty.util.ajax.JSON;
+
+/**
+ * A simple Jersey resource class TestHttpServer.
+ * The servlet simply puts the path and the op parameter in a map
+ * and return it in JSON format in the response.
+ */
+@Path("")
+public class JerseyResource {
+  private static final Log LOG = LogFactory.getLog(JerseyResource.class);
+
+  public static final String PATH = "path";
+  public static final String OP = "op";
+
+  @GET
+  @Path("{" + PATH + ":.*}")
+  @Produces({MediaType.APPLICATION_JSON})
+  public Response get(
+      @PathParam(PATH) @DefaultValue("UNKNOWN_" + PATH) final String path,
+      @QueryParam(OP) @DefaultValue("UNKNOWN_" + OP) final String op
+      ) throws IOException {
+    LOG.info("get: " + PATH + "=" + path + ", " + OP + "=" + op);
+
+    final Map<String, Object> m = new TreeMap<>();
+    m.put(PATH, path);
+    m.put(OP, op);
+    final String js = JSON.toString(m);
+    return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
+  }
+}


[10/13] hbase git commit: Revert "HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module"

Posted by bu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
deleted file mode 100644
index e23eecd..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
+++ /dev/null
@@ -1,175 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http.log;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.PrintWriter;
-import java.net.URL;
-import java.net.URLConnection;
-import java.util.regex.Pattern;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.impl.Jdk14Logger;
-import org.apache.commons.logging.impl.Log4JLogger;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
-import org.apache.hadoop.hbase.http.HttpServer;
-import org.apache.hadoop.util.ServletUtil;
-
-/**
- * Change log level in runtime.
- */
-@InterfaceStability.Evolving
-public class LogLevel {
-  public static final String USAGES = "\nUsage: General options are:\n"
-      + "\t[-getlevel <host:httpPort> <name>]\n"
-      + "\t[-setlevel <host:httpPort> <name> <level>]\n";
-
-  /**
-   * A command line implementation
-   */
-  public static void main(String[] args) {
-    if (args.length == 3 && "-getlevel".equals(args[0])) {
-      process("http://" + args[1] + "/logLevel?log=" + args[2]);
-      return;
-    }
-    else if (args.length == 4 && "-setlevel".equals(args[0])) {
-      process("http://" + args[1] + "/logLevel?log=" + args[2]
-              + "&level=" + args[3]);
-      return;
-    }
-
-    System.err.println(USAGES);
-    System.exit(-1);
-  }
-
-  private static void process(String urlstring) {
-    try {
-      URL url = new URL(urlstring);
-      System.out.println("Connecting to " + url);
-      URLConnection connection = url.openConnection();
-      connection.connect();
-      try (InputStreamReader streamReader = new InputStreamReader(connection.getInputStream());
-           BufferedReader bufferedReader = new BufferedReader(streamReader)) {
-        for(String line; (line = bufferedReader.readLine()) != null; ) {
-          if (line.startsWith(MARKER)) {
-            System.out.println(TAG.matcher(line).replaceAll(""));
-          }
-        }
-      }
-    } catch (IOException ioe) {
-      System.err.println("" + ioe);
-    }
-  }
-
-  static final String MARKER = "<!-- OUTPUT -->";
-  static final Pattern TAG = Pattern.compile("<[^>]*>");
-
-  /**
-   * A servlet implementation
-   */
-  @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
-  @InterfaceStability.Unstable
-  public static class Servlet extends HttpServlet {
-    private static final long serialVersionUID = 1L;
-
-    @Override
-    public void doGet(HttpServletRequest request, HttpServletResponse response
-        ) throws ServletException, IOException {
-
-      // Do the authorization
-      if (!HttpServer.hasAdministratorAccess(getServletContext(), request,
-          response)) {
-        return;
-      }
-
-      PrintWriter out = ServletUtil.initHTML(response, "Log Level");
-      String logName = ServletUtil.getParameter(request, "log");
-      String level = ServletUtil.getParameter(request, "level");
-
-      if (logName != null) {
-        out.println("<br /><hr /><h3>Results</h3>");
-        out.println(MARKER
-            + "Submitted Log Name: <b>" + logName + "</b><br />");
-
-        Log log = LogFactory.getLog(logName);
-        out.println(MARKER
-            + "Log Class: <b>" + log.getClass().getName() +"</b><br />");
-        if (level != null) {
-          out.println(MARKER + "Submitted Level: <b>" + level + "</b><br />");
-        }
-
-        if (log instanceof Log4JLogger) {
-          process(((Log4JLogger)log).getLogger(), level, out);
-        }
-        else if (log instanceof Jdk14Logger) {
-          process(((Jdk14Logger)log).getLogger(), level, out);
-        }
-        else {
-          out.println("Sorry, " + log.getClass() + " not supported.<br />");
-        }
-      }
-
-      out.println(FORMS);
-      out.println(ServletUtil.HTML_TAIL);
-    }
-
-    static final String FORMS = "\n<br /><hr /><h3>Get / Set</h3>"
-        + "\n<form>Log: <input type='text' size='50' name='log' /> "
-        + "<input type='submit' value='Get Log Level' />"
-        + "</form>"
-        + "\n<form>Log: <input type='text' size='50' name='log' /> "
-        + "Level: <input type='text' name='level' /> "
-        + "<input type='submit' value='Set Log Level' />"
-        + "</form>";
-
-    private static void process(org.apache.log4j.Logger log, String level,
-        PrintWriter out) throws IOException {
-      if (level != null) {
-        if (!level.equals(org.apache.log4j.Level.toLevel(level).toString())) {
-          out.println(MARKER + "Bad level : <b>" + level + "</b><br />");
-        } else {
-          log.setLevel(org.apache.log4j.Level.toLevel(level));
-          out.println(MARKER + "Setting Level to " + level + " ...<br />");
-        }
-      }
-      out.println(MARKER
-          + "Effective level: <b>" + log.getEffectiveLevel() + "</b><br />");
-    }
-
-    private static void process(java.util.logging.Logger log, String level,
-        PrintWriter out) throws IOException {
-      if (level != null) {
-        log.setLevel(java.util.logging.Level.parse(level));
-        out.println(MARKER + "Setting Level to " + level + " ...<br />");
-      }
-
-      java.util.logging.Level lev;
-      for(; (lev = log.getLevel()) == null; log = log.getParent());
-      out.println(MARKER + "Effective level: <b>" + lev + "</b><br />");
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/main/java/org/apache/hadoop/hbase/http/package-info.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/package-info.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/package-info.java
deleted file mode 100644
index f55e24b..0000000
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/package-info.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/**
- * <p>
- * Copied from hadoop source code.<br>
- * See https://issues.apache.org/jira/browse/HADOOP-10232 to know why.
- * </p>
- */
-@InterfaceStability.Unstable
-package org.apache.hadoop.hbase.http;
-
-import org.apache.yetus.audience.InterfaceStability;

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java
deleted file mode 100644
index 69972a2..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java
+++ /dev/null
@@ -1,272 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.http;
-
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.authorize.AccessControlList;
-import org.junit.Assert;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.http.HttpServer.Builder;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.ServerSocket;
-import java.net.URI;
-import java.net.URL;
-import java.net.MalformedURLException;
-
-/**
- * This is a base class for functional tests of the {@link HttpServer}.
- * The methods are static for other classes to import statically.
- */
-public class HttpServerFunctionalTest extends Assert {
-  /** JVM property for the webapp test dir : {@value} */
-  public static final String TEST_BUILD_WEBAPPS = "test.build.webapps";
-  /** expected location of the test.build.webapps dir: {@value} */
-  private static final String BUILD_WEBAPPS_DIR = "src/main/resources/hbase-webapps";
-
-  /** name of the test webapp: {@value} */
-  private static final String TEST = "test";
-
-  /**
-   * Create but do not start the test webapp server. The test webapp dir is
-   * prepared/checked in advance.
-   *
-   * @return the server instance
-   *
-   * @throws IOException if a problem occurs
-   * @throws AssertionError if a condition was not met
-   */
-  public static HttpServer createTestServer() throws IOException {
-    prepareTestWebapp();
-    return createServer(TEST);
-  }
-
-  /**
-   * Create but do not start the test webapp server. The test webapp dir is
-   * prepared/checked in advance.
-   * @param conf the server configuration to use
-   * @return the server instance
-   *
-   * @throws IOException if a problem occurs
-   * @throws AssertionError if a condition was not met
-   */
-  public static HttpServer createTestServer(Configuration conf)
-      throws IOException {
-    prepareTestWebapp();
-    return createServer(TEST, conf);
-  }
-
-  public static HttpServer createTestServer(Configuration conf, AccessControlList adminsAcl)
-      throws IOException {
-    prepareTestWebapp();
-    return createServer(TEST, conf, adminsAcl);
-  }
-
-  /**
-   * Create but do not start the test webapp server. The test webapp dir is
-   * prepared/checked in advance.
-   * @param conf the server configuration to use
-   * @return the server instance
-   *
-   * @throws IOException if a problem occurs
-   * @throws AssertionError if a condition was not met
-   */
-  public static HttpServer createTestServer(Configuration conf,
-      String[] pathSpecs) throws IOException {
-    prepareTestWebapp();
-    return createServer(TEST, conf, pathSpecs);
-  }
-
-  public static HttpServer createTestServerWithSecurity(Configuration conf) throws IOException {
-    prepareTestWebapp();
-    return localServerBuilder(TEST).setFindPort(true).setConf(conf).setSecurityEnabled(true)
-        // InfoServer normally sets these for us
-        .setUsernameConfKey(HttpServer.HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY)
-        .setKeytabConfKey(HttpServer.HTTP_SPNEGO_AUTHENTICATION_KEYTAB_KEY)
-        .build();
-  }
-
-  /**
-   * Prepare the test webapp by creating the directory from the test properties
-   * fail if the directory cannot be created.
-   * @throws AssertionError if a condition was not met
-   */
-  protected static void prepareTestWebapp() {
-    String webapps = System.getProperty(TEST_BUILD_WEBAPPS, BUILD_WEBAPPS_DIR);
-    File testWebappDir = new File(webapps +
-        File.separatorChar + TEST);
-    try {
-    if (!testWebappDir.exists()) {
-      fail("Test webapp dir " + testWebappDir.getCanonicalPath() + " missing");
-    }
-    }
-    catch (IOException e) {
-    }
-  }
-
-  /**
-   * Create an HttpServer instance on the given address for the given webapp
-   * @param host to bind
-   * @param port to bind
-   * @return the server
-   * @throws IOException if it could not be created
-   */
-  public static HttpServer createServer(String host, int port)
-      throws IOException {
-    prepareTestWebapp();
-    return new HttpServer.Builder().setName(TEST)
-        .addEndpoint(URI.create("http://" + host + ":" + port))
-        .setFindPort(true).build();
-  }
-
-  /**
-   * Create an HttpServer instance for the given webapp
-   * @param webapp the webapp to work with
-   * @return the server
-   * @throws IOException if it could not be created
-   */
-  public static HttpServer createServer(String webapp) throws IOException {
-    return localServerBuilder(webapp).setFindPort(true).build();
-  }
-  /**
-   * Create an HttpServer instance for the given webapp
-   * @param webapp the webapp to work with
-   * @param conf the configuration to use for the server
-   * @return the server
-   * @throws IOException if it could not be created
-   */
-  public static HttpServer createServer(String webapp, Configuration conf)
-      throws IOException {
-    return localServerBuilder(webapp).setFindPort(true).setConf(conf).build();
-  }
-
-  public static HttpServer createServer(String webapp, Configuration conf, AccessControlList adminsAcl)
-      throws IOException {
-    return localServerBuilder(webapp).setFindPort(true).setConf(conf).setACL(adminsAcl).build();
-  }
-
-  private static Builder localServerBuilder(String webapp) {
-    return new HttpServer.Builder().setName(webapp).addEndpoint(
-        URI.create("http://localhost:0"));
-  }
-
-  /**
-   * Create an HttpServer instance for the given webapp
-   * @param webapp the webapp to work with
-   * @param conf the configuration to use for the server
-   * @param pathSpecs the paths specifications the server will service
-   * @return the server
-   * @throws IOException if it could not be created
-   */
-  public static HttpServer createServer(String webapp, Configuration conf,
-      String[] pathSpecs) throws IOException {
-    return localServerBuilder(webapp).setFindPort(true).setConf(conf).setPathSpec(pathSpecs).build();
-  }
-
-  /**
-   * Create and start a server with the test webapp
-   *
-   * @return the newly started server
-   *
-   * @throws IOException on any failure
-   * @throws AssertionError if a condition was not met
-   */
-  public static HttpServer createAndStartTestServer() throws IOException {
-    HttpServer server = createTestServer();
-    server.start();
-    return server;
-  }
-
-  /**
-   * If the server is non null, stop it
-   * @param server to stop
-   * @throws Exception on any failure
-   */
-  public static void stop(HttpServer server) throws Exception {
-    if (server != null) {
-      server.stop();
-    }
-  }
-
-  /**
-   * Pass in a server, return a URL bound to localhost and its port
-   * @param server server
-   * @return a URL bonded to the base of the server
-   * @throws MalformedURLException if the URL cannot be created.
-   */
-  public static URL getServerURL(HttpServer server)
-      throws MalformedURLException {
-    assertNotNull("No server", server);
-    return new URL("http://"
-        + NetUtils.getHostPortString(server.getConnectorAddress(0)));
-  }
-
-  /**
-   * Read in the content from a URL
-   * @param url URL To read
-   * @return the text from the output
-   * @throws IOException if something went wrong
-   */
-  protected static String readOutput(URL url) throws IOException {
-    StringBuilder out = new StringBuilder();
-    InputStream in = url.openConnection().getInputStream();
-    byte[] buffer = new byte[64 * 1024];
-    int len = in.read(buffer);
-    while (len > 0) {
-      out.append(new String(buffer, 0, len));
-      len = in.read(buffer);
-    }
-    return out.toString();
-  }
-
-  /**
-   * Recursively deletes a {@link File}.
-   */
-  protected static void deleteRecursively(File d) {
-    if (d.isDirectory()) {
-      for (String name : d.list()) {
-        File child = new File(d, name);
-        if (child.isFile()) {
-          child.delete();
-        } else {
-          deleteRecursively(child);
-        }
-      }
-    }
-    d.delete();
-  }
-
-  /**
-   * Picks a free port on the host by binding a Socket to '0'.
-   */
-  protected static int getFreePort() throws IOException {
-    ServerSocket s = new ServerSocket(0);
-    try {
-      s.setReuseAddress(true);
-      int port = s.getLocalPort();
-      return port;
-    } finally {
-      if (null != s) {
-        s.close();
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java
deleted file mode 100644
index 729dd06..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.URL;
-import java.net.URLConnection;
-import java.util.Set;
-import java.util.TreeSet;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.net.NetUtils;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestGlobalFilter extends HttpServerFunctionalTest {
-  private static final Log LOG = LogFactory.getLog(HttpServer.class);
-  static final Set<String> RECORDS = new TreeSet<>();
-
-  /** A very simple filter that records accessed uri's */
-  static public class RecordingFilter implements Filter {
-    private FilterConfig filterConfig = null;
-
-    @Override
-    public void init(FilterConfig filterConfig) {
-      this.filterConfig = filterConfig;
-    }
-
-    @Override
-    public void destroy() {
-      this.filterConfig = null;
-    }
-
-    @Override
-    public void doFilter(ServletRequest request, ServletResponse response,
-        FilterChain chain) throws IOException, ServletException {
-      if (filterConfig == null)
-         return;
-
-      String uri = ((HttpServletRequest)request).getRequestURI();
-      LOG.info("filtering " + uri);
-      RECORDS.add(uri);
-      chain.doFilter(request, response);
-    }
-
-    /** Configuration for RecordingFilter */
-    static public class Initializer extends FilterInitializer {
-      public Initializer() {}
-
-      @Override
-      public void initFilter(FilterContainer container, Configuration conf) {
-        container.addGlobalFilter("recording", RecordingFilter.class.getName(), null);
-      }
-    }
-  }
-
-
-  /** access a url, ignoring some IOException such as the page does not exist */
-  static void access(String urlstring) throws IOException {
-    LOG.warn("access " + urlstring);
-    URL url = new URL(urlstring);
-    URLConnection connection = url.openConnection();
-    connection.connect();
-
-    try {
-      BufferedReader in = new BufferedReader(new InputStreamReader(
-          connection.getInputStream()));
-      try {
-        for(; in.readLine() != null; );
-      } finally {
-        in.close();
-      }
-    } catch(IOException ioe) {
-      LOG.warn("urlstring=" + urlstring, ioe);
-    }
-  }
-
-  @Test
-  public void testServletFilter() throws Exception {
-    Configuration conf = new Configuration();
-
-    //start a http server with CountingFilter
-    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
-        RecordingFilter.Initializer.class.getName());
-    HttpServer http = createTestServer(conf);
-    http.start();
-
-    final String fsckURL = "/fsck";
-    final String stacksURL = "/stacks";
-    final String ajspURL = "/a.jsp";
-    final String listPathsURL = "/listPaths";
-    final String dataURL = "/data";
-    final String streamFile = "/streamFile";
-    final String rootURL = "/";
-    final String allURL = "/*";
-    final String outURL = "/static/a.out";
-    final String logURL = "/logs/a.log";
-
-    final String[] urls = {fsckURL, stacksURL, ajspURL, listPathsURL,
-        dataURL, streamFile, rootURL, allURL, outURL, logURL};
-
-    //access the urls
-    final String prefix = "http://"
-        + NetUtils.getHostPortString(http.getConnectorAddress(0));
-    try {
-      for(int i = 0; i < urls.length; i++) {
-        access(prefix + urls[i]);
-      }
-    } finally {
-      http.stop();
-    }
-
-    LOG.info("RECORDS = " + RECORDS);
-
-    //verify records
-    for(int i = 0; i < urls.length; i++) {
-      assertTrue(RECORDS.remove(urls[i]));
-    }
-    assertTrue(RECORDS.isEmpty());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHtmlQuoting.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHtmlQuoting.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHtmlQuoting.java
deleted file mode 100644
index 5bc026c..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHtmlQuoting.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import static org.junit.Assert.*;
-
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.mockito.Mockito;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestHtmlQuoting {
-
-  @Test public void testNeedsQuoting() throws Exception {
-    assertTrue(HtmlQuoting.needsQuoting("abcde>"));
-    assertTrue(HtmlQuoting.needsQuoting("<abcde"));
-    assertTrue(HtmlQuoting.needsQuoting("abc'de"));
-    assertTrue(HtmlQuoting.needsQuoting("abcde\""));
-    assertTrue(HtmlQuoting.needsQuoting("&"));
-    assertFalse(HtmlQuoting.needsQuoting(""));
-    assertFalse(HtmlQuoting.needsQuoting("ab\ncdef"));
-    assertFalse(HtmlQuoting.needsQuoting(null));
-  }
-
-  @Test public void testQuoting() throws Exception {
-    assertEquals("ab&lt;cd", HtmlQuoting.quoteHtmlChars("ab<cd"));
-    assertEquals("ab&gt;", HtmlQuoting.quoteHtmlChars("ab>"));
-    assertEquals("&amp;&amp;&amp;", HtmlQuoting.quoteHtmlChars("&&&"));
-    assertEquals(" &apos;\n", HtmlQuoting.quoteHtmlChars(" '\n"));
-    assertEquals("&quot;", HtmlQuoting.quoteHtmlChars("\""));
-    assertEquals(null, HtmlQuoting.quoteHtmlChars(null));
-  }
-
-  private void runRoundTrip(String str) throws Exception {
-    assertEquals(str,
-                 HtmlQuoting.unquoteHtmlChars(HtmlQuoting.quoteHtmlChars(str)));
-  }
-
-  @Test public void testRoundtrip() throws Exception {
-    runRoundTrip("");
-    runRoundTrip("<>&'\"");
-    runRoundTrip("ab>cd<ef&ghi'\"");
-    runRoundTrip("A string\n with no quotable chars in it!");
-    runRoundTrip(null);
-    StringBuilder buffer = new StringBuilder();
-    for(char ch=0; ch < 127; ++ch) {
-      buffer.append(ch);
-    }
-    runRoundTrip(buffer.toString());
-  }
-
-
-  @Test
-  public void testRequestQuoting() throws Exception {
-    HttpServletRequest mockReq = Mockito.mock(HttpServletRequest.class);
-    HttpServer.QuotingInputFilter.RequestQuoter quoter =
-      new HttpServer.QuotingInputFilter.RequestQuoter(mockReq);
-
-    Mockito.doReturn("a<b").when(mockReq).getParameter("x");
-    assertEquals("Test simple param quoting",
-        "a&lt;b", quoter.getParameter("x"));
-
-    Mockito.doReturn(null).when(mockReq).getParameter("x");
-    assertEquals("Test that missing parameters dont cause NPE",
-        null, quoter.getParameter("x"));
-
-    Mockito.doReturn(new String[]{"a<b", "b"}).when(mockReq).getParameterValues("x");
-    assertArrayEquals("Test escaping of an array",
-        new String[]{"a&lt;b", "b"}, quoter.getParameterValues("x"));
-
-    Mockito.doReturn(null).when(mockReq).getParameterValues("x");
-    assertArrayEquals("Test that missing parameters dont cause NPE for array",
-        null, quoter.getParameterValues("x"));
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLog.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLog.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLog.java
deleted file mode 100644
index b8d21d1..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLog.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.log4j.Logger;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-import org.eclipse.jetty.server.RequestLog;
-import org.eclipse.jetty.server.NCSARequestLog;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestHttpRequestLog {
-
-  @Test
-  public void testAppenderUndefined() {
-    RequestLog requestLog = HttpRequestLog.getRequestLog("test");
-    assertNull("RequestLog should be null", requestLog);
-  }
-
-  @Test
-  public void testAppenderDefined() {
-    HttpRequestLogAppender requestLogAppender = new HttpRequestLogAppender();
-    requestLogAppender.setName("testrequestlog");
-    Logger.getLogger("http.requests.test").addAppender(requestLogAppender);
-    RequestLog requestLog = HttpRequestLog.getRequestLog("test");
-    Logger.getLogger("http.requests.test").removeAppender(requestLogAppender);
-    assertNotNull("RequestLog should not be null", requestLog);
-    assertEquals("Class mismatch", NCSARequestLog.class, requestLog.getClass());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLogAppender.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLogAppender.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLogAppender.java
deleted file mode 100644
index a17b9e9..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLogAppender.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-import static org.junit.Assert.assertEquals;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestHttpRequestLogAppender {
-
-  @Test
-  public void testParameterPropagation() {
-
-    HttpRequestLogAppender requestLogAppender = new HttpRequestLogAppender();
-    requestLogAppender.setFilename("jetty-namenode-yyyy_mm_dd.log");
-    requestLogAppender.setRetainDays(17);
-    assertEquals("Filename mismatch", "jetty-namenode-yyyy_mm_dd.log",
-        requestLogAppender.getFilename());
-    assertEquals("Retain days mismatch", 17,
-        requestLogAppender.getRetainDays());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
deleted file mode 100644
index fddb2a4..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
+++ /dev/null
@@ -1,617 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.net.HttpURLConnection;
-import java.net.URI;
-import java.net.URL;
-import java.util.Arrays;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.SortedSet;
-import java.util.TreeSet;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.Executor;
-import java.util.concurrent.Executors;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletRequestWrapper;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.hbase.http.HttpServer.QuotingInputFilter.RequestQuoter;
-import org.apache.hadoop.hbase.http.resource.JerseyResource;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.Groups;
-import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authorize.AccessControlList;
-import org.eclipse.jetty.server.ServerConnector;
-import org.eclipse.jetty.util.ajax.JSON;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.mockito.Mockito;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestHttpServer extends HttpServerFunctionalTest {
-  private static final Log LOG = LogFactory.getLog(TestHttpServer.class);
-  private static HttpServer server;
-  private static URL baseUrl;
-  // jetty 9.4.x needs this many threads to start, even in the small.
-  static final int MAX_THREADS = 16;
-
-  @SuppressWarnings("serial")
-  public static class EchoMapServlet extends HttpServlet {
-    @Override
-    public void doGet(HttpServletRequest request,
-                      HttpServletResponse response
-                      ) throws ServletException, IOException {
-      PrintWriter out = response.getWriter();
-      Map<String, String[]> params = request.getParameterMap();
-      SortedSet<String> keys = new TreeSet<>(params.keySet());
-      for(String key: keys) {
-        out.print(key);
-        out.print(':');
-        String[] values = params.get(key);
-        if (values.length > 0) {
-          out.print(values[0]);
-          for(int i=1; i < values.length; ++i) {
-            out.print(',');
-            out.print(values[i]);
-          }
-        }
-        out.print('\n');
-      }
-      out.close();
-    }
-  }
-
-  @SuppressWarnings("serial")
-  public static class EchoServlet extends HttpServlet {
-    @Override
-    public void doGet(HttpServletRequest request,
-                      HttpServletResponse response
-                      ) throws ServletException, IOException {
-      PrintWriter out = response.getWriter();
-      SortedSet<String> sortedKeys = new TreeSet<>();
-      Enumeration<String> keys = request.getParameterNames();
-      while(keys.hasMoreElements()) {
-        sortedKeys.add(keys.nextElement());
-      }
-      for(String key: sortedKeys) {
-        out.print(key);
-        out.print(':');
-        out.print(request.getParameter(key));
-        out.print('\n');
-      }
-      out.close();
-    }
-  }
-
-  @SuppressWarnings("serial")
-  public static class LongHeaderServlet extends HttpServlet {
-    @Override
-    public void doGet(HttpServletRequest request,
-                      HttpServletResponse response
-    ) throws ServletException, IOException {
-      Assert.assertEquals(63 * 1024, request.getHeader("longheader").length());
-      response.setStatus(HttpServletResponse.SC_OK);
-    }
-  }
-
-  @SuppressWarnings("serial")
-  public static class HtmlContentServlet extends HttpServlet {
-    @Override
-    public void doGet(HttpServletRequest request,
-                      HttpServletResponse response
-                      ) throws ServletException, IOException {
-      response.setContentType("text/html");
-      PrintWriter out = response.getWriter();
-      out.print("hello world");
-      out.close();
-    }
-  }
-
-  @BeforeClass public static void setup() throws Exception {
-    Configuration conf = new Configuration();
-    conf.setInt(HttpServer.HTTP_MAX_THREADS, MAX_THREADS);
-    server = createTestServer(conf);
-    server.addServlet("echo", "/echo", EchoServlet.class);
-    server.addServlet("echomap", "/echomap", EchoMapServlet.class);
-    server.addServlet("htmlcontent", "/htmlcontent", HtmlContentServlet.class);
-    server.addServlet("longheader", "/longheader", LongHeaderServlet.class);
-    server.addJerseyResourcePackage(
-        JerseyResource.class.getPackage().getName(), "/jersey/*");
-    server.start();
-    baseUrl = getServerURL(server);
-    LOG.info("HTTP server started: "+ baseUrl);
-  }
-
-  @AfterClass public static void cleanup() throws Exception {
-    server.stop();
-  }
-
-  /** Test the maximum number of threads cannot be exceeded. */
-  @Test public void testMaxThreads() throws Exception {
-    int clientThreads = MAX_THREADS * 10;
-    Executor executor = Executors.newFixedThreadPool(clientThreads);
-    // Run many clients to make server reach its maximum number of threads
-    final CountDownLatch ready = new CountDownLatch(clientThreads);
-    final CountDownLatch start = new CountDownLatch(1);
-    for (int i = 0; i < clientThreads; i++) {
-      executor.execute(new Runnable() {
-        @Override
-        public void run() {
-          ready.countDown();
-          try {
-            start.await();
-            assertEquals("a:b\nc:d\n",
-                         readOutput(new URL(baseUrl, "/echo?a=b&c=d")));
-            int serverThreads = server.webServer.getThreadPool().getThreads();
-            assertTrue("More threads are started than expected, Server Threads count: "
-                    + serverThreads, serverThreads <= MAX_THREADS);
-            System.out.println("Number of threads = " + serverThreads +
-                " which is less or equal than the max = " + MAX_THREADS);
-          } catch (Exception e) {
-            // do nothing
-          }
-        }
-      });
-    }
-    // Start the client threads when they are all ready
-    ready.await();
-    start.countDown();
-  }
-
-  @Test public void testEcho() throws Exception {
-    assertEquals("a:b\nc:d\n",
-                 readOutput(new URL(baseUrl, "/echo?a=b&c=d")));
-    assertEquals("a:b\nc&lt;:d\ne:&gt;\n",
-                 readOutput(new URL(baseUrl, "/echo?a=b&c<=d&e=>")));
-  }
-
-  /** Test the echo map servlet that uses getParameterMap. */
-  @Test public void testEchoMap() throws Exception {
-    assertEquals("a:b\nc:d\n",
-                 readOutput(new URL(baseUrl, "/echomap?a=b&c=d")));
-    assertEquals("a:b,&gt;\nc&lt;:d\n",
-                 readOutput(new URL(baseUrl, "/echomap?a=b&c<=d&a=>")));
-  }
-
-  /**
-   *  Test that verifies headers can be up to 64K long.
-   *  The test adds a 63K header leaving 1K for other headers.
-   *  This is because the header buffer setting is for ALL headers,
-   *  names and values included. */
-  @Test public void testLongHeader() throws Exception {
-    URL url = new URL(baseUrl, "/longheader");
-    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    StringBuilder sb = new StringBuilder();
-    for (int i = 0 ; i < 63 * 1024; i++) {
-      sb.append("a");
-    }
-    conn.setRequestProperty("longheader", sb.toString());
-    assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
-  }
-
-  @Test
-  public void testContentTypes() throws Exception {
-    // Static CSS files should have text/css
-    URL cssUrl = new URL(baseUrl, "/static/test.css");
-    HttpURLConnection conn = (HttpURLConnection)cssUrl.openConnection();
-    conn.connect();
-    assertEquals(200, conn.getResponseCode());
-    assertEquals("text/css", conn.getContentType());
-
-    // Servlets should have text/plain with proper encoding by default
-    URL servletUrl = new URL(baseUrl, "/echo?a=b");
-    conn = (HttpURLConnection)servletUrl.openConnection();
-    conn.connect();
-    assertEquals(200, conn.getResponseCode());
-    assertEquals("text/plain;charset=utf-8", conn.getContentType());
-
-    // We should ignore parameters for mime types - ie a parameter
-    // ending in .css should not change mime type
-    servletUrl = new URL(baseUrl, "/echo?a=b.css");
-    conn = (HttpURLConnection)servletUrl.openConnection();
-    conn.connect();
-    assertEquals(200, conn.getResponseCode());
-    assertEquals("text/plain;charset=utf-8", conn.getContentType());
-
-    // Servlets that specify text/html should get that content type
-    servletUrl = new URL(baseUrl, "/htmlcontent");
-    conn = (HttpURLConnection)servletUrl.openConnection();
-    conn.connect();
-    assertEquals(200, conn.getResponseCode());
-    assertEquals("text/html;charset=utf-8", conn.getContentType());
-
-    // JSPs should default to text/html with utf8
-    // JSPs do not work from unit tests
-    // servletUrl = new URL(baseUrl, "/testjsp.jsp");
-    // conn = (HttpURLConnection)servletUrl.openConnection();
-    // conn.connect();
-    // assertEquals(200, conn.getResponseCode());
-    // assertEquals("text/html; charset=utf-8", conn.getContentType());
-  }
-
-  /**
-   * Dummy filter that mimics as an authentication filter. Obtains user identity
-   * from the request parameter user.name. Wraps around the request so that
-   * request.getRemoteUser() returns the user identity.
-   *
-   */
-  public static class DummyServletFilter implements Filter {
-    @Override
-    public void destroy() { }
-
-    @Override
-    public void doFilter(ServletRequest request, ServletResponse response,
-        FilterChain filterChain) throws IOException, ServletException {
-      final String userName = request.getParameter("user.name");
-      ServletRequest requestModified =
-        new HttpServletRequestWrapper((HttpServletRequest) request) {
-        @Override
-        public String getRemoteUser() {
-          return userName;
-        }
-      };
-      filterChain.doFilter(requestModified, response);
-    }
-
-    @Override
-    public void init(FilterConfig arg0) throws ServletException { }
-  }
-
-  /**
-   * FilterInitializer that initialized the DummyFilter.
-   *
-   */
-  public static class DummyFilterInitializer extends FilterInitializer {
-    public DummyFilterInitializer() {
-    }
-
-    @Override
-    public void initFilter(FilterContainer container, Configuration conf) {
-      container.addFilter("DummyFilter", DummyServletFilter.class.getName(), null);
-    }
-  }
-
-  /**
-   * Access a URL and get the corresponding return Http status code. The URL
-   * will be accessed as the passed user, by sending user.name request
-   * parameter.
-   *
-   * @param urlstring
-   * @param userName
-   * @return
-   * @throws IOException
-   */
-  static int getHttpStatusCode(String urlstring, String userName)
-      throws IOException {
-    URL url = new URL(urlstring + "?user.name=" + userName);
-    System.out.println("Accessing " + url + " as user " + userName);
-    HttpURLConnection connection = (HttpURLConnection)url.openConnection();
-    connection.connect();
-    return connection.getResponseCode();
-  }
-
-  /**
-   * Custom user->group mapping service.
-   */
-  public static class MyGroupsProvider extends ShellBasedUnixGroupsMapping {
-    static Map<String, List<String>> mapping = new HashMap<>();
-
-    static void clearMapping() {
-      mapping.clear();
-    }
-
-    @Override
-    public List<String> getGroups(String user) throws IOException {
-      return mapping.get(user);
-    }
-  }
-
-  /**
-   * Verify the access for /logs, /stacks, /conf, /logLevel and /metrics
-   * servlets, when authentication filters are set, but authorization is not
-   * enabled.
-   * @throws Exception
-   */
-  @Test
-  @Ignore
-  public void testDisabledAuthorizationOfDefaultServlets() throws Exception {
-
-    Configuration conf = new Configuration();
-
-    // Authorization is disabled by default
-    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
-        DummyFilterInitializer.class.getName());
-    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
-        MyGroupsProvider.class.getName());
-    Groups.getUserToGroupsMappingService(conf);
-    MyGroupsProvider.clearMapping();
-    MyGroupsProvider.mapping.put("userA", Arrays.asList("groupA"));
-    MyGroupsProvider.mapping.put("userB", Arrays.asList("groupB"));
-
-    HttpServer myServer = new HttpServer.Builder().setName("test")
-        .addEndpoint(new URI("http://localhost:0")).setFindPort(true).build();
-    myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
-    myServer.start();
-    String serverURL = "http://" + NetUtils.getHostPortString(myServer.getConnectorAddress(0)) + "/";
-    for (String servlet : new String[] { "conf", "logs", "stacks",
-        "logLevel", "metrics" }) {
-      for (String user : new String[] { "userA", "userB" }) {
-        assertEquals(HttpURLConnection.HTTP_OK, getHttpStatusCode(serverURL
-            + servlet, user));
-      }
-    }
-    myServer.stop();
-  }
-
-  /**
-   * Verify the administrator access for /logs, /stacks, /conf, /logLevel and
-   * /metrics servlets.
-   *
-   * @throws Exception
-   */
-  @Test
-  @Ignore
-  public void testAuthorizationOfDefaultServlets() throws Exception {
-    Configuration conf = new Configuration();
-    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
-        true);
-    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN,
-        true);
-    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
-        DummyFilterInitializer.class.getName());
-
-    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
-        MyGroupsProvider.class.getName());
-    Groups.getUserToGroupsMappingService(conf);
-    MyGroupsProvider.clearMapping();
-    MyGroupsProvider.mapping.put("userA", Arrays.asList("groupA"));
-    MyGroupsProvider.mapping.put("userB", Arrays.asList("groupB"));
-    MyGroupsProvider.mapping.put("userC", Arrays.asList("groupC"));
-    MyGroupsProvider.mapping.put("userD", Arrays.asList("groupD"));
-    MyGroupsProvider.mapping.put("userE", Arrays.asList("groupE"));
-
-    HttpServer myServer = new HttpServer.Builder().setName("test")
-        .addEndpoint(new URI("http://localhost:0")).setFindPort(true).setConf(conf)
-        .setACL(new AccessControlList("userA,userB groupC,groupD")).build();
-    myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
-    myServer.start();
-
-    String serverURL = "http://"
-        + NetUtils.getHostPortString(myServer.getConnectorAddress(0)) + "/";
-    for (String servlet : new String[] { "conf", "logs", "stacks",
-        "logLevel", "metrics" }) {
-      for (String user : new String[] { "userA", "userB", "userC", "userD" }) {
-        assertEquals(HttpURLConnection.HTTP_OK, getHttpStatusCode(serverURL
-            + servlet, user));
-      }
-      assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, getHttpStatusCode(
-          serverURL + servlet, "userE"));
-    }
-    myServer.stop();
-  }
-
-  @Test
-  public void testRequestQuoterWithNull() throws Exception {
-    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
-    Mockito.doReturn(null).when(request).getParameterValues("dummy");
-    RequestQuoter requestQuoter = new RequestQuoter(request);
-    String[] parameterValues = requestQuoter.getParameterValues("dummy");
-    Assert.assertEquals("It should return null "
-        + "when there are no values for the parameter", null, parameterValues);
-  }
-
-  @Test
-  public void testRequestQuoterWithNotNull() throws Exception {
-    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
-    String[] values = new String[] { "abc", "def" };
-    Mockito.doReturn(values).when(request).getParameterValues("dummy");
-    RequestQuoter requestQuoter = new RequestQuoter(request);
-    String[] parameterValues = requestQuoter.getParameterValues("dummy");
-    Assert.assertTrue("It should return Parameter Values", Arrays.equals(
-        values, parameterValues));
-  }
-
-  @SuppressWarnings("unchecked")
-  private static Map<String, Object> parse(String jsonString) {
-    return (Map<String, Object>)JSON.parse(jsonString);
-  }
-
-  @Test public void testJersey() throws Exception {
-    LOG.info("BEGIN testJersey()");
-    final String js = readOutput(new URL(baseUrl, "/jersey/foo?op=bar"));
-    final Map<String, Object> m = parse(js);
-    LOG.info("m=" + m);
-    assertEquals("foo", m.get(JerseyResource.PATH));
-    assertEquals("bar", m.get(JerseyResource.OP));
-    LOG.info("END testJersey()");
-  }
-
-  @Test
-  public void testHasAdministratorAccess() throws Exception {
-    Configuration conf = new Configuration();
-    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false);
-    ServletContext context = Mockito.mock(ServletContext.class);
-    Mockito.when(context.getAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE)).thenReturn(conf);
-    Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(null);
-    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
-    Mockito.when(request.getRemoteUser()).thenReturn(null);
-    HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
-
-    //authorization OFF
-    Assert.assertTrue(HttpServer.hasAdministratorAccess(context, request, response));
-
-    //authorization ON & user NULL
-    response = Mockito.mock(HttpServletResponse.class);
-    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true);
-    Assert.assertFalse(HttpServer.hasAdministratorAccess(context, request, response));
-    Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
-
-    //authorization ON & user NOT NULL & ACLs NULL
-    response = Mockito.mock(HttpServletResponse.class);
-    Mockito.when(request.getRemoteUser()).thenReturn("foo");
-    Assert.assertTrue(HttpServer.hasAdministratorAccess(context, request, response));
-
-    //authorization ON & user NOT NULL & ACLs NOT NULL & user not in ACLs
-    response = Mockito.mock(HttpServletResponse.class);
-    AccessControlList acls = Mockito.mock(AccessControlList.class);
-    Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(false);
-    Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls);
-    Assert.assertFalse(HttpServer.hasAdministratorAccess(context, request, response));
-    Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
-
-    //authorization ON & user NOT NULL & ACLs NOT NULL & user in in ACLs
-    response = Mockito.mock(HttpServletResponse.class);
-    Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(true);
-    Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls);
-    Assert.assertTrue(HttpServer.hasAdministratorAccess(context, request, response));
-
-  }
-
-  @Test
-  public void testRequiresAuthorizationAccess() throws Exception {
-    Configuration conf = new Configuration();
-    ServletContext context = Mockito.mock(ServletContext.class);
-    Mockito.when(context.getAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE)).thenReturn(conf);
-    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
-    HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
-
-    //requires admin access to instrumentation, FALSE by default
-    Assert.assertTrue(HttpServer.isInstrumentationAccessAllowed(context, request, response));
-
-    //requires admin access to instrumentation, TRUE
-    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN, true);
-    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true);
-    AccessControlList acls = Mockito.mock(AccessControlList.class);
-    Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(false);
-    Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls);
-    Assert.assertFalse(HttpServer.isInstrumentationAccessAllowed(context, request, response));
-  }
-
-  @Test public void testBindAddress() throws Exception {
-    checkBindAddress("localhost", 0, false).stop();
-    // hang onto this one for a bit more testing
-    HttpServer myServer = checkBindAddress("localhost", 0, false);
-    HttpServer myServer2 = null;
-    try {
-      int port = myServer.getConnectorAddress(0).getPort();
-      // it's already in use, true = expect a higher port
-      myServer2 = checkBindAddress("localhost", port, true);
-      // try to reuse the port
-      port = myServer2.getConnectorAddress(0).getPort();
-      myServer2.stop();
-      assertNull(myServer2.getConnectorAddress(0)); // not bound
-      myServer2.openListeners();
-      assertEquals(port, myServer2.getConnectorAddress(0).getPort()); // expect same port
-    } finally {
-      myServer.stop();
-      if (myServer2 != null) {
-        myServer2.stop();
-      }
-    }
-  }
-
-  private HttpServer checkBindAddress(String host, int port, boolean findPort)
-      throws Exception {
-    HttpServer server = createServer(host, port);
-    try {
-      // not bound, ephemeral should return requested port (0 for ephemeral)
-      ServerConnector listener = server.getServerConnectors().get(0);
-
-      assertEquals(port, listener.getPort());
-      // verify hostname is what was given
-      server.openListeners();
-      assertEquals(host, server.getConnectorAddress(0).getHostName());
-
-      int boundPort = server.getConnectorAddress(0).getPort();
-      if (port == 0) {
-        assertTrue(boundPort != 0); // ephemeral should now return bound port
-      } else if (findPort) {
-        assertTrue(boundPort > port);
-        // allow a little wiggle room to prevent random test failures if
-        // some consecutive ports are already in use
-        assertTrue(boundPort - port < 8);
-      }
-    } catch (Exception e) {
-      server.stop();
-      throw e;
-    }
-    return server;
-  }
-
-  @Test
-  public void testXFrameHeaderSameOrigin() throws Exception {
-    Configuration conf = new Configuration();
-    conf.set("hbase.http.filter.xframeoptions.mode", "SAMEORIGIN");
-
-    HttpServer myServer = new HttpServer.Builder().setName("test")
-            .addEndpoint(new URI("http://localhost:0"))
-            .setFindPort(true).setConf(conf).build();
-    myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
-    myServer.addServlet("echo", "/echo", EchoServlet.class);
-    myServer.start();
-
-    String serverURL = "http://"
-            + NetUtils.getHostPortString(myServer.getConnectorAddress(0));
-    URL url = new URL(new URL(serverURL), "/echo?a=b&c=d");
-    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
-    assertEquals("SAMEORIGIN", conn.getHeaderField("X-Frame-Options"));
-    myServer.stop();
-  }
-
-
-
-  @Test
-  public void testNoCacheHeader() throws Exception {
-    URL url = new URL(baseUrl, "/echo?a=b&c=d");
-    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
-    assertEquals("no-cache", conn.getHeaderField("Cache-Control"));
-    assertEquals("no-cache", conn.getHeaderField("Pragma"));
-    assertNotNull(conn.getHeaderField("Expires"));
-    assertNotNull(conn.getHeaderField("Date"));
-    assertEquals(conn.getHeaderField("Expires"), conn.getHeaderField("Date"));
-    assertEquals("DENY", conn.getHeaderField("X-Frame-Options"));
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java
deleted file mode 100644
index d0f2825..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestHttpServerLifecycle extends HttpServerFunctionalTest {
-
-  /**
-   * Check that a server is alive by probing the {@link HttpServer#isAlive()} method
-   * and the text of its toString() description
-   * @param server server
-   */
-  private void assertAlive(HttpServer server) {
-    assertTrue("Server is not alive", server.isAlive());
-    assertToStringContains(server, HttpServer.STATE_DESCRIPTION_ALIVE);
-  }
-
-  private void assertNotLive(HttpServer server) {
-    assertTrue("Server should not be live", !server.isAlive());
-    assertToStringContains(server, HttpServer.STATE_DESCRIPTION_NOT_LIVE);
-  }
-
-  /**
-   * Test that the server is alive once started
-   *
-   * @throws Throwable on failure
-   */
-  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
-  public void testCreatedServerIsNotAlive() throws Throwable {
-    HttpServer server = createTestServer();
-    assertNotLive(server);
-  }
-
-  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
-  public void testStopUnstartedServer() throws Throwable {
-    HttpServer server = createTestServer();
-    stop(server);
-  }
-
-  /**
-   * Test that the server is alive once started
-   *
-   * @throws Throwable on failure
-   */
-  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
-  public void testStartedServerIsAlive() throws Throwable {
-    HttpServer server = null;
-    server = createTestServer();
-    assertNotLive(server);
-    server.start();
-    assertAlive(server);
-    stop(server);
-  }
-
-  /**
-   * Assert that the result of {@link HttpServer#toString()} contains the specific text
-   * @param server server to examine
-   * @param text text to search for
-   */
-  private void assertToStringContains(HttpServer server, String text) {
-    String description = server.toString();
-    assertTrue("Did not find \"" + text + "\" in \"" + description + "\"",
-               description.contains(text));
-  }
-
-  /**
-   * Test that the server is not alive once stopped
-   *
-   * @throws Throwable on failure
-   */
-  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
-  public void testStoppedServerIsNotAlive() throws Throwable {
-    HttpServer server = createAndStartTestServer();
-    assertAlive(server);
-    stop(server);
-    assertNotLive(server);
-  }
-
-  /**
-   * Test that the server is not alive once stopped
-   *
-   * @throws Throwable on failure
-   */
-  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
-  public void testStoppingTwiceServerIsAllowed() throws Throwable {
-    HttpServer server = createAndStartTestServer();
-    assertAlive(server);
-    stop(server);
-    assertNotLive(server);
-    stop(server);
-    assertNotLive(server);
-  }
-
-  /**
-   * Test that the server is alive once started
-   *
-   * @throws Throwable
-   *           on failure
-   */
-  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
-  public void testWepAppContextAfterServerStop() throws Throwable {
-    HttpServer server = null;
-    String key = "test.attribute.key";
-    String value = "test.attribute.value";
-    server = createTestServer();
-    assertNotLive(server);
-    server.start();
-    server.setAttribute(key, value);
-    assertAlive(server);
-    assertEquals(value, server.getAttribute(key));
-    stop(server);
-    assertNull("Server context should have cleared", server.getAttribute(key));
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
deleted file mode 100644
index db394a8..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.Log;
-
-import java.io.FileNotFoundException;
-
-/**
- * Test webapp loading
- */
-@Category({MiscTests.class, SmallTests.class})
-public class TestHttpServerWebapps extends HttpServerFunctionalTest {
-  private static final Log log = LogFactory.getLog(TestHttpServerWebapps.class);
-
-  /**
-   * Test that the test server is loadable on the classpath
-   * @throws Throwable if something went wrong
-   */
-  @Test
-  public void testValidServerResource() throws Throwable {
-    HttpServer server = null;
-    try {
-      server = createServer("test");
-    } finally {
-      stop(server);
-    }
-  }
-
-  /**
-   * Test that an invalid webapp triggers an exception
-   * @throws Throwable if something went wrong
-   */
-  @Test
-  public void testMissingServerResource() throws Throwable {
-    try {
-      HttpServer server = createServer("NoSuchWebapp");
-      //should not have got here.
-      //close the server
-      String serverDescription = server.toString();
-      stop(server);
-      fail("Expected an exception, got " + serverDescription);
-    } catch (FileNotFoundException expected) {
-      log.debug("Expected exception " + expected, expected);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
deleted file mode 100644
index 5eff2b4..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.URL;
-import java.net.URLConnection;
-import java.util.Set;
-import java.util.TreeSet;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.net.NetUtils;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestPathFilter extends HttpServerFunctionalTest {
-  private static final Log LOG = LogFactory.getLog(HttpServer.class);
-  static final Set<String> RECORDS = new TreeSet<>();
-
-  /** A very simple filter that records accessed uri's */
-  static public class RecordingFilter implements Filter {
-    private FilterConfig filterConfig = null;
-
-    @Override
-    public void init(FilterConfig filterConfig) {
-      this.filterConfig = filterConfig;
-    }
-
-    @Override
-    public void destroy() {
-      this.filterConfig = null;
-    }
-
-    @Override
-    public void doFilter(ServletRequest request, ServletResponse response,
-        FilterChain chain) throws IOException, ServletException {
-      if (filterConfig == null)
-         return;
-
-      String uri = ((HttpServletRequest)request).getRequestURI();
-      LOG.info("filtering " + uri);
-      RECORDS.add(uri);
-      chain.doFilter(request, response);
-    }
-
-    /** Configuration for RecordingFilter */
-    static public class Initializer extends FilterInitializer {
-      public Initializer() {}
-
-      @Override
-      public void initFilter(FilterContainer container, Configuration conf) {
-        container.addFilter("recording", RecordingFilter.class.getName(), null);
-      }
-    }
-  }
-
-
-  /** access a url, ignoring some IOException such as the page does not exist */
-  static void access(String urlstring) throws IOException {
-    LOG.warn("access " + urlstring);
-    URL url = new URL(urlstring);
-
-    URLConnection connection = url.openConnection();
-    connection.connect();
-
-    try {
-      BufferedReader in = new BufferedReader(new InputStreamReader(
-          connection.getInputStream()));
-      try {
-        for(; in.readLine() != null; );
-      } finally {
-        in.close();
-      }
-    } catch(IOException ioe) {
-      LOG.warn("urlstring=" + urlstring, ioe);
-    }
-  }
-
-  @Test
-  public void testPathSpecFilters() throws Exception {
-    Configuration conf = new Configuration();
-
-    //start a http server with CountingFilter
-    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
-        RecordingFilter.Initializer.class.getName());
-    String[] pathSpecs = { "/path", "/path/*" };
-    HttpServer http = createTestServer(conf, pathSpecs);
-    http.start();
-
-    final String baseURL = "/path";
-    final String baseSlashURL = "/path/";
-    final String addedURL = "/path/nodes";
-    final String addedSlashURL = "/path/nodes/";
-    final String longURL = "/path/nodes/foo/job";
-    final String rootURL = "/";
-    final String allURL = "/*";
-
-    final String[] filteredUrls = {baseURL, baseSlashURL, addedURL,
-        addedSlashURL, longURL};
-    final String[] notFilteredUrls = {rootURL, allURL};
-
-    // access the urls and verify our paths specs got added to the
-    // filters
-    final String prefix = "http://"
-        + NetUtils.getHostPortString(http.getConnectorAddress(0));
-    try {
-      for(int i = 0; i < filteredUrls.length; i++) {
-        access(prefix + filteredUrls[i]);
-      }
-      for(int i = 0; i < notFilteredUrls.length; i++) {
-        access(prefix + notFilteredUrls[i]);
-      }
-    } finally {
-      http.stop();
-    }
-
-    LOG.info("RECORDS = " + RECORDS);
-
-    //verify records
-    for(int i = 0; i < filteredUrls.length; i++) {
-      assertTrue(RECORDS.remove(filteredUrls[i]));
-    }
-    assertTrue(RECORDS.isEmpty());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
deleted file mode 100644
index b599350..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.InputStream;
-import java.net.URI;
-import java.net.URL;
-
-import javax.net.ssl.HttpsURLConnection;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.ssl.SSLFactory;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-/**
- * This testcase issues SSL certificates configures the HttpServer to serve
- * HTTPS using the created certficates and calls an echo servlet using the
- * corresponding HTTPS URL.
- */
-@Category({MiscTests.class, SmallTests.class})
-public class TestSSLHttpServer extends HttpServerFunctionalTest {
-  private static final String BASEDIR = System.getProperty("test.build.dir",
-      "target/test-dir") + "/" + TestSSLHttpServer.class.getSimpleName();
-
-  private static final Log LOG = LogFactory.getLog(TestSSLHttpServer.class);
-  private static Configuration conf;
-  private static HttpServer server;
-  private static URL baseUrl;
-  private static String keystoresDir;
-  private static String sslConfDir;
-  private static SSLFactory clientSslFactory;
-
-  @BeforeClass
-  public static void setup() throws Exception {
-    conf = new Configuration();
-    conf.setInt(HttpServer.HTTP_MAX_THREADS, TestHttpServer.MAX_THREADS);
-
-    File base = new File(BASEDIR);
-    FileUtil.fullyDelete(base);
-    base.mkdirs();
-    keystoresDir = new File(BASEDIR).getAbsolutePath();
-    sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class);
-
-    KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
-    Configuration sslConf = new Configuration(false);
-    sslConf.addResource("ssl-server.xml");
-    sslConf.addResource("ssl-client.xml");
-
-    clientSslFactory = new SSLFactory(SSLFactory.Mode.CLIENT, sslConf);
-    clientSslFactory.init();
-
-    server = new HttpServer.Builder()
-        .setName("test")
-        .addEndpoint(new URI("https://localhost"))
-        .setConf(conf)
-        .keyPassword(HBaseConfiguration.getPassword(sslConf, "ssl.server.keystore.keypassword",
-            null))
-        .keyStore(sslConf.get("ssl.server.keystore.location"),
-            HBaseConfiguration.getPassword(sslConf, "ssl.server.keystore.password", null),
-            sslConf.get("ssl.server.keystore.type", "jks"))
-        .trustStore(sslConf.get("ssl.server.truststore.location"),
-            HBaseConfiguration.getPassword(sslConf, "ssl.server.truststore.password", null),
-            sslConf.get("ssl.server.truststore.type", "jks")).build();
-    server.addServlet("echo", "/echo", TestHttpServer.EchoServlet.class);
-    server.start();
-    baseUrl = new URL("https://"
-        + NetUtils.getHostPortString(server.getConnectorAddress(0)));
-    LOG.info("HTTP server started: " + baseUrl);
-  }
-
-  @AfterClass
-  public static void cleanup() throws Exception {
-    server.stop();
-    FileUtil.fullyDelete(new File(BASEDIR));
-    KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, sslConfDir);
-    clientSslFactory.destroy();
-  }
-
-  @Test
-  public void testEcho() throws Exception {
-    assertEquals("a:b\nc:d\n", readOut(new URL(baseUrl, "/echo?a=b&c=d")));
-    assertEquals("a:b\nc&lt;:d\ne:&gt;\n", readOut(new URL(baseUrl,
-        "/echo?a=b&c<=d&e=>")));
-  }
-
-  private static String readOut(URL url) throws Exception {
-    HttpsURLConnection conn = (HttpsURLConnection) url.openConnection();
-    conn.setSSLSocketFactory(clientSslFactory.createSSLSocketFactory());
-    InputStream in = conn.getInputStream();
-    ByteArrayOutputStream out = new ByteArrayOutputStream();
-    IOUtils.copyBytes(in, out, 1024);
-    return out.toString();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
deleted file mode 100644
index 32bc03e..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
+++ /dev/null
@@ -1,217 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.URL;
-import java.net.URLConnection;
-import java.util.Random;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.util.StringUtils;
-import org.junit.Assert;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestServletFilter extends HttpServerFunctionalTest {
-  private static final Log LOG = LogFactory.getLog(HttpServer.class);
-  static volatile String uri = null;
-
-  /** A very simple filter which record the uri filtered. */
-  static public class SimpleFilter implements Filter {
-    private FilterConfig filterConfig = null;
-
-    @Override
-    public void init(FilterConfig filterConfig) throws ServletException {
-      this.filterConfig = filterConfig;
-    }
-
-    @Override
-    public void destroy() {
-      this.filterConfig = null;
-    }
-
-    @Override
-    public void doFilter(ServletRequest request, ServletResponse response,
-        FilterChain chain) throws IOException, ServletException {
-      if (filterConfig == null)
-         return;
-
-      uri = ((HttpServletRequest)request).getRequestURI();
-      LOG.info("filtering " + uri);
-      chain.doFilter(request, response);
-    }
-
-    /** Configuration for the filter */
-    static public class Initializer extends FilterInitializer {
-      public Initializer() {}
-
-      @Override
-      public void initFilter(FilterContainer container, Configuration conf) {
-        container.addFilter("simple", SimpleFilter.class.getName(), null);
-      }
-    }
-  }
-
-  public static void assertExceptionContains(String string, Throwable t) {
-    String msg = t.getMessage();
-    Assert.assertTrue(
-        "Expected to find '" + string + "' but got unexpected exception:"
-        + StringUtils.stringifyException(t), msg.contains(string));
-  }
-
-  /** access a url, ignoring some IOException such as the page does not exist */
-  static void access(String urlstring) throws IOException {
-    LOG.warn("access " + urlstring);
-    URL url = new URL(urlstring);
-    URLConnection connection = url.openConnection();
-    connection.connect();
-
-    try {
-      BufferedReader in = new BufferedReader(new InputStreamReader(
-          connection.getInputStream()));
-      try {
-        for(; in.readLine() != null; );
-      } finally {
-        in.close();
-      }
-    } catch(IOException ioe) {
-      LOG.warn("urlstring=" + urlstring, ioe);
-    }
-  }
-
-  @Test
-  @Ignore
-  //From stack
-  // Its a 'foreign' test, one that came in from hadoop when we copy/pasted http
-  // It's second class. Could comment it out if only failing test (as per @nkeywal – sort of)
-  public void testServletFilter() throws Exception {
-    Configuration conf = new Configuration();
-
-    //start a http server with CountingFilter
-    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
-        SimpleFilter.Initializer.class.getName());
-    HttpServer http = createTestServer(conf);
-    http.start();
-
-    final String fsckURL = "/fsck";
-    final String stacksURL = "/stacks";
-    final String ajspURL = "/a.jsp";
-    final String logURL = "/logs/a.log";
-    final String hadooplogoURL = "/static/hadoop-logo.jpg";
-
-    final String[] urls = {fsckURL, stacksURL, ajspURL, logURL, hadooplogoURL};
-    final Random ran = new Random();
-    final int[] sequence = new int[50];
-
-    //generate a random sequence and update counts
-    for(int i = 0; i < sequence.length; i++) {
-      sequence[i] = ran.nextInt(urls.length);
-    }
-
-    //access the urls as the sequence
-    final String prefix = "http://"
-        + NetUtils.getHostPortString(http.getConnectorAddress(0));
-    try {
-      for(int i = 0; i < sequence.length; i++) {
-        access(prefix + urls[sequence[i]]);
-
-        //make sure everything except fsck get filtered
-        if (sequence[i] == 0) {
-          assertEquals(null, uri);
-        } else {
-          assertEquals(urls[sequence[i]], uri);
-          uri = null;
-        }
-      }
-    } finally {
-      http.stop();
-    }
-  }
-
-  static public class ErrorFilter extends SimpleFilter {
-    @Override
-    public void init(FilterConfig arg0) throws ServletException {
-      throw new ServletException("Throwing the exception from Filter init");
-    }
-
-    /** Configuration for the filter */
-    static public class Initializer extends FilterInitializer {
-      public Initializer() {
-      }
-
-      @Override
-      public void initFilter(FilterContainer container, Configuration conf) {
-        container.addFilter("simple", ErrorFilter.class.getName(), null);
-      }
-    }
-  }
-
-  @Test
-  public void testServletFilterWhenInitThrowsException() throws Exception {
-    Configuration conf = new Configuration();
-    // start a http server with ErrorFilter
-    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
-        ErrorFilter.Initializer.class.getName());
-    HttpServer http = createTestServer(conf);
-    try {
-      http.start();
-      fail("expecting exception");
-    } catch (IOException e) {
-      assertExceptionContains("Problem starting http server", e);
-    }
-  }
-
-  /**
-   * Similar to the above test case, except that it uses a different API to add the
-   * filter. Regression test for HADOOP-8786.
-   */
-  @Test
-  public void testContextSpecificServletFilterWhenInitThrowsException()
-      throws Exception {
-    Configuration conf = new Configuration();
-    HttpServer http = createTestServer(conf);
-    HttpServer.defineFilter(http.webAppContext,
-        "ErrorFilter", ErrorFilter.class.getName(),
-        null, null);
-    try {
-      http.start();
-      fail("expecting exception");
-    } catch (IOException e) {
-      assertExceptionContains("Unable to initialize WebAppContext", e);
-    }
-  }
-
-}


[09/13] hbase git commit: Revert "HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module"

Posted by bu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
deleted file mode 100644
index 4fad031..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
+++ /dev/null
@@ -1,258 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.security.Principal;
-import java.security.PrivilegedExceptionAction;
-import java.util.Set;
-
-import javax.security.auth.Subject;
-import javax.security.auth.kerberos.KerberosTicket;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.http.TestHttpServer.EchoServlet;
-import org.apache.hadoop.hbase.http.resource.JerseyResource;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.security.authentication.util.KerberosName;
-import org.apache.http.HttpHost;
-import org.apache.http.HttpResponse;
-import org.apache.http.auth.AuthSchemeProvider;
-import org.apache.http.auth.AuthScope;
-import org.apache.http.auth.KerberosCredentials;
-import org.apache.http.client.HttpClient;
-import org.apache.http.client.config.AuthSchemes;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.protocol.HttpClientContext;
-import org.apache.http.config.Lookup;
-import org.apache.http.config.RegistryBuilder;
-import org.apache.http.entity.ByteArrayEntity;
-import org.apache.http.entity.ContentType;
-import org.apache.http.impl.auth.SPNegoSchemeFactory;
-import org.apache.http.impl.client.BasicCredentialsProvider;
-import org.apache.http.impl.client.HttpClients;
-import org.apache.http.util.EntityUtils;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.client.JaasKrbUtil;
-import org.apache.kerby.kerberos.kerb.server.SimpleKdcServer;
-import org.ietf.jgss.GSSCredential;
-import org.ietf.jgss.GSSManager;
-import org.ietf.jgss.GSSName;
-import org.ietf.jgss.Oid;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-/**
- * Test class for SPNEGO authentication on the HttpServer. Uses Kerby's MiniKDC and Apache
- * HttpComponents to verify that a simple Servlet is reachable via SPNEGO and unreachable w/o.
- */
-@Category({MiscTests.class, SmallTests.class})
-public class TestSpnegoHttpServer extends HttpServerFunctionalTest {
-  private static final Log LOG = LogFactory.getLog(TestSpnegoHttpServer.class);
-  private static final String KDC_SERVER_HOST = "localhost";
-  private static final String CLIENT_PRINCIPAL = "client";
-
-  private static HttpServer server;
-  private static URL baseUrl;
-  private static SimpleKdcServer kdc;
-  private static File infoServerKeytab;
-  private static File clientKeytab;
-
-  @BeforeClass
-  public static void setupServer() throws Exception {
-    final String serverPrincipal = "HTTP/" + KDC_SERVER_HOST;
-    final File target = new File(System.getProperty("user.dir"), "target");
-    assertTrue(target.exists());
-
-    kdc = buildMiniKdc();
-    kdc.start();
-
-    File keytabDir = new File(target, TestSpnegoHttpServer.class.getSimpleName()
-        + "_keytabs");
-    if (keytabDir.exists()) {
-      deleteRecursively(keytabDir);
-    }
-    keytabDir.mkdirs();
-
-    infoServerKeytab = new File(keytabDir, serverPrincipal.replace('/', '_') + ".keytab");
-    clientKeytab = new File(keytabDir, CLIENT_PRINCIPAL + ".keytab");
-
-    setupUser(kdc, clientKeytab, CLIENT_PRINCIPAL);
-    setupUser(kdc, infoServerKeytab, serverPrincipal);
-
-    Configuration conf = buildSpnegoConfiguration(serverPrincipal, infoServerKeytab);
-
-    server = createTestServerWithSecurity(conf);
-    server.addServlet("echo", "/echo", EchoServlet.class);
-    server.addJerseyResourcePackage(JerseyResource.class.getPackage().getName(), "/jersey/*");
-    server.start();
-    baseUrl = getServerURL(server);
-
-    LOG.info("HTTP server started: "+ baseUrl);
-  }
-
-  @AfterClass
-  public static void stopServer() throws Exception {
-    try {
-      if (null != server) {
-        server.stop();
-      }
-    } catch (Exception e) {
-      LOG.info("Failed to stop info server", e);
-    }
-    try {
-      if (null != kdc) {
-        kdc.stop();
-      }
-    } catch (Exception e) {
-      LOG.info("Failed to stop mini KDC", e);
-    }
-  }
-
-  private static void setupUser(SimpleKdcServer kdc, File keytab, String principal)
-      throws KrbException {
-    kdc.createPrincipal(principal);
-    kdc.exportPrincipal(principal, keytab);
-  }
-
-  private static SimpleKdcServer buildMiniKdc() throws Exception {
-    SimpleKdcServer kdc = new SimpleKdcServer();
-
-    final File target = new File(System.getProperty("user.dir"), "target");
-    File kdcDir = new File(target, TestSpnegoHttpServer.class.getSimpleName());
-    if (kdcDir.exists()) {
-      deleteRecursively(kdcDir);
-    }
-    kdcDir.mkdirs();
-    kdc.setWorkDir(kdcDir);
-
-    kdc.setKdcHost(KDC_SERVER_HOST);
-    int kdcPort = getFreePort();
-    kdc.setAllowTcp(true);
-    kdc.setAllowUdp(false);
-    kdc.setKdcTcpPort(kdcPort);
-
-    LOG.info("Starting KDC server at " + KDC_SERVER_HOST + ":" + kdcPort);
-
-    kdc.init();
-
-    return kdc;
-  }
-
-  private static Configuration buildSpnegoConfiguration(String serverPrincipal, File
-      serverKeytab) {
-    Configuration conf = new Configuration();
-    KerberosName.setRules("DEFAULT");
-
-    conf.setInt(HttpServer.HTTP_MAX_THREADS, TestHttpServer.MAX_THREADS);
-
-    // Enable Kerberos (pre-req)
-    conf.set("hbase.security.authentication", "kerberos");
-    conf.set(HttpServer.HTTP_UI_AUTHENTICATION, "kerberos");
-    conf.set(HttpServer.HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY, serverPrincipal);
-    conf.set(HttpServer.HTTP_SPNEGO_AUTHENTICATION_KEYTAB_KEY, serverKeytab.getAbsolutePath());
-
-    return conf;
-  }
-
-  @Test
-  public void testUnauthorizedClientsDisallowed() throws IOException {
-    URL url = new URL(getServerURL(server), "/echo?a=b");
-    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
-  }
-
-  @Test
-  public void testAllowedClient() throws Exception {
-    // Create the subject for the client
-    final Subject clientSubject = JaasKrbUtil.loginUsingKeytab(CLIENT_PRINCIPAL, clientKeytab);
-    final Set<Principal> clientPrincipals = clientSubject.getPrincipals();
-    // Make sure the subject has a principal
-    assertFalse(clientPrincipals.isEmpty());
-
-    // Get a TGT for the subject (might have many, different encryption types). The first should
-    // be the default encryption type.
-    Set<KerberosTicket> privateCredentials =
-            clientSubject.getPrivateCredentials(KerberosTicket.class);
-    assertFalse(privateCredentials.isEmpty());
-    KerberosTicket tgt = privateCredentials.iterator().next();
-    assertNotNull(tgt);
-
-    // The name of the principal
-    final String principalName = clientPrincipals.iterator().next().getName();
-
-    // Run this code, logged in as the subject (the client)
-    HttpResponse resp = Subject.doAs(clientSubject,
-        new PrivilegedExceptionAction<HttpResponse>() {
-      @Override
-      public HttpResponse run() throws Exception {
-        // Logs in with Kerberos via GSS
-        GSSManager gssManager = GSSManager.getInstance();
-        // jGSS Kerberos login constant
-        Oid oid = new Oid("1.2.840.113554.1.2.2");
-        GSSName gssClient = gssManager.createName(principalName, GSSName.NT_USER_NAME);
-        GSSCredential credential = gssManager.createCredential(gssClient,
-            GSSCredential.DEFAULT_LIFETIME, oid, GSSCredential.INITIATE_ONLY);
-
-        HttpClientContext context = HttpClientContext.create();
-        Lookup<AuthSchemeProvider> authRegistry = RegistryBuilder.<AuthSchemeProvider>create()
-            .register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory(true, true))
-            .build();
-
-        HttpClient client = HttpClients.custom().setDefaultAuthSchemeRegistry(authRegistry).build();
-        BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider();
-        credentialsProvider.setCredentials(AuthScope.ANY, new KerberosCredentials(credential));
-
-        URL url = new URL(getServerURL(server), "/echo?a=b");
-        context.setTargetHost(new HttpHost(url.getHost(), url.getPort()));
-        context.setCredentialsProvider(credentialsProvider);
-        context.setAuthSchemeRegistry(authRegistry);
-
-        HttpGet get = new HttpGet(url.toURI());
-        return client.execute(get, context);
-      }
-    });
-
-    assertNotNull(resp);
-    assertEquals(HttpURLConnection.HTTP_OK, resp.getStatusLine().getStatusCode());
-    assertEquals("a:b", EntityUtils.toString(resp.getEntity()).trim());
-  }
-
-  @Test(expected = IllegalArgumentException.class)
-  public void testMissingConfigurationThrowsException() throws Exception {
-    Configuration conf = new Configuration();
-    conf.setInt(HttpServer.HTTP_MAX_THREADS, TestHttpServer.MAX_THREADS);
-    // Enable Kerberos (pre-req)
-    conf.set("hbase.security.authentication", "kerberos");
-    // Intentionally skip keytab and principal
-
-    HttpServer customServer = createTestServerWithSecurity(conf);
-    customServer.addServlet("echo", "/echo", EchoServlet.class);
-    customServer.addJerseyResourcePackage(JerseyResource.class.getPackage().getName(), "/jersey/*");
-    customServer.start();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java
deleted file mode 100644
index 8bd1e6d..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http.conf;
-
-import java.io.StringReader;
-import java.io.StringWriter;
-import java.util.Map;
-
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-
-import junit.framework.TestCase;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.eclipse.jetty.util.ajax.JSON;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-import org.xml.sax.InputSource;
-
-/**
- * Basic test case that the ConfServlet can write configuration
- * to its output in XML and JSON format.
- */
-@Category({MiscTests.class, SmallTests.class})
-public class TestConfServlet extends TestCase {
-  private static final String TEST_KEY = "testconfservlet.key";
-  private static final String TEST_VAL = "testval";
-
-  private Configuration getTestConf() {
-    Configuration testConf = new Configuration();
-    testConf.set(TEST_KEY, TEST_VAL);
-    return testConf;
-  }
-
-  @Test
-  @SuppressWarnings("unchecked")
-  public void testWriteJson() throws Exception {
-    StringWriter sw = new StringWriter();
-    ConfServlet.writeResponse(getTestConf(), sw, "json");
-    String json = sw.toString();
-    boolean foundSetting = false;
-    Object parsed = JSON.parse(json);
-    Object[] properties = ((Map<String, Object[]>)parsed).get("properties");
-    for (Object o : properties) {
-      Map<String, Object> propertyInfo = (Map<String, Object>)o;
-      String key = (String)propertyInfo.get("key");
-      String val = (String)propertyInfo.get("value");
-      String resource = (String)propertyInfo.get("resource");
-      System.err.println("k: " + key + " v: " + val + " r: " + resource);
-      if (TEST_KEY.equals(key) && TEST_VAL.equals(val)
-          && "programatically".equals(resource)) {
-        foundSetting = true;
-      }
-    }
-    assertTrue(foundSetting);
-  }
-
-  @Test
-  public void testWriteXml() throws Exception {
-    StringWriter sw = new StringWriter();
-    ConfServlet.writeResponse(getTestConf(), sw, "xml");
-    String xml = sw.toString();
-
-    DocumentBuilderFactory docBuilderFactory
-      = DocumentBuilderFactory.newInstance();
-    DocumentBuilder builder = docBuilderFactory.newDocumentBuilder();
-    Document doc = builder.parse(new InputSource(new StringReader(xml)));
-    NodeList nameNodes = doc.getElementsByTagName("name");
-    boolean foundSetting = false;
-    for (int i = 0; i < nameNodes.getLength(); i++) {
-      Node nameNode = nameNodes.item(i);
-      String key = nameNode.getTextContent();
-      System.err.println("xml key: " + key);
-      if (TEST_KEY.equals(key)) {
-        foundSetting = true;
-        Element propertyElem = (Element)nameNode.getParentNode();
-        String val = propertyElem.getElementsByTagName("value").item(0).getTextContent();
-        assertEquals(TEST_VAL, val);
-      }
-    }
-    assertTrue(foundSetting);
-  }
-
-  @Test
-  public void testBadFormat() throws Exception {
-    StringWriter sw = new StringWriter();
-    try {
-      ConfServlet.writeResponse(getTestConf(), sw, "not a format");
-      fail("writeResponse with bad format didn't throw!");
-    } catch (ConfServlet.BadFormatException bfe) {
-      // expected
-    }
-    assertEquals("", sw.toString());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java
deleted file mode 100644
index 484162a..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.http.jmx;
-
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.net.URLEncoder;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.http.HttpServer;
-import org.apache.hadoop.hbase.http.HttpServerFunctionalTest;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestJMXJsonServlet extends HttpServerFunctionalTest {
-  private   static final Log LOG = LogFactory.getLog(TestJMXJsonServlet.class);
-  private static HttpServer server;
-  private static URL baseUrl;
-
-  @BeforeClass public static void setup() throws Exception {
-    // Eclipse doesn't pick this up correctly from the plugin
-    // configuration in the pom.
-    System.setProperty(HttpServerFunctionalTest.TEST_BUILD_WEBAPPS, "target/test-classes/webapps");
-    server = createTestServer();
-    server.start();
-    baseUrl = getServerURL(server);
-  }
-
-  @AfterClass public static void cleanup() throws Exception {
-    server.stop();
-  }
-
-  public static void assertReFind(String re, String value) {
-    Pattern p = Pattern.compile(re);
-    Matcher m = p.matcher(value);
-    assertTrue("'"+p+"' does not match "+value, m.find());
-  }
-
-  @Test public void testQuery() throws Exception {
-    String result = readOutput(new URL(baseUrl, "/jmx?qry=java.lang:type=Runtime"));
-    LOG.info("/jmx?qry=java.lang:type=Runtime RESULT: "+result);
-    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Runtime\"", result);
-    assertReFind("\"modelerType\"", result);
-
-    result = readOutput(new URL(baseUrl, "/jmx?qry=java.lang:type=Memory"));
-    LOG.info("/jmx?qry=java.lang:type=Memory RESULT: "+result);
-    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
-    assertReFind("\"modelerType\"", result);
-
-    result = readOutput(new URL(baseUrl, "/jmx"));
-    LOG.info("/jmx RESULT: "+result);
-    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
-
-    // test to get an attribute of a mbean
-    result = readOutput(new URL(baseUrl,
-        "/jmx?get=java.lang:type=Memory::HeapMemoryUsage"));
-    LOG.info("/jmx RESULT: "+result);
-    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
-    assertReFind("\"committed\"\\s*:", result);
-
-    // negative test to get an attribute of a mbean
-    result = readOutput(new URL(baseUrl,
-        "/jmx?get=java.lang:type=Memory::"));
-    LOG.info("/jmx RESULT: "+result);
-    assertReFind("\"ERROR\"", result);
-
-    // test to get JSONP result
-    result = readOutput(new URL(baseUrl, "/jmx?qry=java.lang:type=Memory&callback=mycallback1"));
-    LOG.info("/jmx?qry=java.lang:type=Memory&callback=mycallback RESULT: "+result);
-    assertReFind("^mycallback1\\(\\{", result);
-    assertReFind("\\}\\);$", result);
-
-    // negative test to get an attribute of a mbean as JSONP
-    result = readOutput(new URL(baseUrl,
-        "/jmx?get=java.lang:type=Memory::&callback=mycallback2"));
-    LOG.info("/jmx RESULT: "+result);
-    assertReFind("^mycallback2\\(\\{", result);
-    assertReFind("\"ERROR\"", result);
-    assertReFind("\\}\\);$", result);
-
-    // test to get an attribute of a mbean as JSONP
-    result = readOutput(new URL(baseUrl,
-        "/jmx?get=java.lang:type=Memory::HeapMemoryUsage&callback=mycallback3"));
-    LOG.info("/jmx RESULT: "+result);
-    assertReFind("^mycallback3\\(\\{", result);
-    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
-    assertReFind("\"committed\"\\s*:", result);
-    assertReFind("\\}\\);$", result);
-
-  }
-
-  @Test
-  public void testDisallowedJSONPCallback() throws Exception {
-    String callback = "function(){alert('bigproblems!')};foo";
-    URL url = new URL(
-        baseUrl, "/jmx?qry=java.lang:type=Memory&callback="+URLEncoder.encode(callback, "UTF-8"));
-    HttpURLConnection cnxn = (HttpURLConnection) url.openConnection();
-    assertEquals(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, cnxn.getResponseCode());
-  }
-
-  @Test
-  public void testUnderscoresInJSONPCallback() throws Exception {
-    String callback = "my_function";
-    URL url = new URL(
-        baseUrl, "/jmx?qry=java.lang:type=Memory&callback="+URLEncoder.encode(callback, "UTF-8"));
-    HttpURLConnection cnxn = (HttpURLConnection) url.openConnection();
-    assertEquals(HttpServletResponse.SC_OK, cnxn.getResponseCode());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java
deleted file mode 100644
index 3adca50..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http.lib;
-
-import static org.junit.Assert.assertEquals;
-import static org.mockito.Mockito.mock;
-
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletRequestWrapper;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.http.ServerConfigurationKeys;
-import org.apache.hadoop.hbase.http.lib.StaticUserWebFilter.StaticUserFilter;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.mockito.ArgumentCaptor;
-import org.mockito.Mockito;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestStaticUserWebFilter {
-  private FilterConfig mockConfig(String username) {
-    FilterConfig mock = Mockito.mock(FilterConfig.class);
-    Mockito.doReturn(username).when(mock).getInitParameter(
-            ServerConfigurationKeys.HBASE_HTTP_STATIC_USER);
-    return mock;
-  }
-
-  @Test
-  public void testFilter() throws Exception {
-    FilterConfig config = mockConfig("myuser");
-    StaticUserFilter suf = new StaticUserFilter();
-    suf.init(config);
-
-    ArgumentCaptor<HttpServletRequestWrapper> wrapperArg =
-      ArgumentCaptor.forClass(HttpServletRequestWrapper.class);
-
-    FilterChain chain = mock(FilterChain.class);
-
-    suf.doFilter(mock(HttpServletRequest.class), mock(ServletResponse.class),
-        chain);
-
-    Mockito.verify(chain).doFilter(wrapperArg.capture(), Mockito.<ServletResponse>anyObject());
-
-    HttpServletRequestWrapper wrapper = wrapperArg.getValue();
-    assertEquals("myuser", wrapper.getUserPrincipal().getName());
-    assertEquals("myuser", wrapper.getRemoteUser());
-
-    suf.destroy();
-  }
-
-  @Test
-  public void testOldStyleConfiguration() {
-    Configuration conf = new Configuration();
-    conf.set("dfs.web.ugi", "joe,group1,group2");
-    assertEquals("joe", StaticUserWebFilter.getUsernameFromConf(conf));
-  }
-
-  @Test
-  public void testConfiguration() {
-    Configuration conf = new Configuration();
-    conf.set(CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER, "dr.stack");
-    assertEquals("dr.stack", StaticUserWebFilter.getUsernameFromConf(conf));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
deleted file mode 100644
index e14e3b4..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/**
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-package org.apache.hadoop.hbase.http.log;
-
-import static org.junit.Assert.assertTrue;
-
-import java.io.*;
-import java.net.*;
-
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.http.HttpServer;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.commons.logging.*;
-import org.apache.commons.logging.impl.*;
-import org.apache.log4j.*;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestLogLevel {
-  static final PrintStream out = System.out;
-
-  @Test (timeout=60000)
-  @SuppressWarnings("deprecation")
-  public void testDynamicLogLevel() throws Exception {
-    String logName = TestLogLevel.class.getName();
-    Log testlog = LogFactory.getLog(logName);
-
-    //only test Log4JLogger
-    if (testlog instanceof Log4JLogger) {
-      Logger log = ((Log4JLogger)testlog).getLogger();
-      log.debug("log.debug1");
-      log.info("log.info1");
-      log.error("log.error1");
-      assertTrue(!Level.ERROR.equals(log.getEffectiveLevel()));
-
-      HttpServer server = null;
-      try {
-        server = new HttpServer.Builder().setName("..")
-            .addEndpoint(new URI("http://localhost:0")).setFindPort(true)
-            .build();
-
-        server.start();
-        String authority = NetUtils.getHostPortString(server
-            .getConnectorAddress(0));
-
-        //servlet
-        URL url =
-            new URL("http://" + authority + "/logLevel?log=" + logName + "&level=" + Level.ERROR);
-        out.println("*** Connecting to " + url);
-        try (BufferedReader in = new BufferedReader(new InputStreamReader(url.openStream()))) {
-          for(String line; (line = in.readLine()) != null; out.println(line));
-        }
-        log.debug("log.debug2");
-        log.info("log.info2");
-        log.error("log.error2");
-        assertTrue(Level.ERROR.equals(log.getEffectiveLevel()));
-
-        //command line
-        String[] args = {"-setlevel", authority, logName, Level.DEBUG.toString()};
-        LogLevel.main(args);
-        log.debug("log.debug3");
-        log.info("log.info3");
-        log.error("log.error3");
-        assertTrue(Level.DEBUG.equals(log.getEffectiveLevel()));
-      } finally {
-        if (server != null) {
-          server.stop();
-        }
-      }
-    }
-    else {
-      out.println(testlog.getClass() + " not tested.");
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
deleted file mode 100644
index bf0e609..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http.resource;
-
-import java.io.IOException;
-import java.util.Map;
-import java.util.TreeMap;
-
-import javax.ws.rs.DefaultValue;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.eclipse.jetty.util.ajax.JSON;
-
-/**
- * A simple Jersey resource class TestHttpServer.
- * The servlet simply puts the path and the op parameter in a map
- * and return it in JSON format in the response.
- */
-@Path("")
-public class JerseyResource {
-  private static final Log LOG = LogFactory.getLog(JerseyResource.class);
-
-  public static final String PATH = "path";
-  public static final String OP = "op";
-
-  @GET
-  @Path("{" + PATH + ":.*}")
-  @Produces({MediaType.APPLICATION_JSON})
-  public Response get(
-      @PathParam(PATH) @DefaultValue("UNKNOWN_" + PATH) final String path,
-      @QueryParam(OP) @DefaultValue("UNKNOWN_" + OP) final String op
-      ) throws IOException {
-    LOG.info("get: " + PATH + "=" + path + ", " + OP + "=" + op);
-
-    final Map<String, Object> m = new TreeMap<>();
-    m.put(PATH, path);
-    m.put(OP, op);
-    final String js = JSON.toString(m);
-    return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/java/org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java
deleted file mode 100644
index 35d6236..0000000
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java
+++ /dev/null
@@ -1,342 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.http.ssl;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.Writer;
-import java.math.BigInteger;
-import java.net.URL;
-import java.security.GeneralSecurityException;
-import java.security.InvalidKeyException;
-import java.security.Key;
-import java.security.KeyPair;
-import java.security.KeyPairGenerator;
-import java.security.KeyStore;
-import java.security.NoSuchAlgorithmException;
-import java.security.NoSuchProviderException;
-import java.security.SecureRandom;
-import java.security.SignatureException;
-import java.security.cert.Certificate;
-import java.security.cert.CertificateEncodingException;
-import java.security.cert.X509Certificate;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
-
-import javax.security.auth.x500.X500Principal;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory;
-import org.apache.hadoop.security.ssl.SSLFactory;
-import org.bouncycastle.x509.X509V1CertificateGenerator;
-
-public class KeyStoreTestUtil {
-
-  public static String getClasspathDir(Class<?> klass) throws Exception {
-    String file = klass.getName();
-    file = file.replace('.', '/') + ".class";
-    URL url = Thread.currentThread().getContextClassLoader().getResource(file);
-    String baseDir = url.toURI().getPath();
-    baseDir = baseDir.substring(0, baseDir.length() - file.length() - 1);
-    return baseDir;
-  }
-
-  /**
-   * Create a self-signed X.509 Certificate.
-   *
-   * @param dn the X.509 Distinguished Name, eg "CN=Test, L=London, C=GB"
-   * @param pair the KeyPair
-   * @param days how many days from now the Certificate is valid for
-   * @param algorithm the signing algorithm, eg "SHA1withRSA"
-   * @return the self-signed certificate
-   */
-  public static X509Certificate generateCertificate(String dn, KeyPair pair, int days, String algorithm)
-      throws CertificateEncodingException, InvalidKeyException, IllegalStateException,
-      NoSuchProviderException, NoSuchAlgorithmException, SignatureException {
-    Date from = new Date();
-    Date to = new Date(from.getTime() + days * 86400000l);
-    BigInteger sn = new BigInteger(64, new SecureRandom());
-    KeyPair keyPair = pair;
-    X509V1CertificateGenerator certGen = new X509V1CertificateGenerator();
-    X500Principal  dnName = new X500Principal(dn);
-
-    certGen.setSerialNumber(sn);
-    certGen.setIssuerDN(dnName);
-    certGen.setNotBefore(from);
-    certGen.setNotAfter(to);
-    certGen.setSubjectDN(dnName);
-    certGen.setPublicKey(keyPair.getPublic());
-    certGen.setSignatureAlgorithm(algorithm);
-    X509Certificate cert = certGen.generate(pair.getPrivate());
-    return cert;
-  }
-
-  public static KeyPair generateKeyPair(String algorithm)
-    throws NoSuchAlgorithmException {
-    KeyPairGenerator keyGen = KeyPairGenerator.getInstance(algorithm);
-    keyGen.initialize(1024);
-    return keyGen.genKeyPair();
-  }
-
-  private static KeyStore createEmptyKeyStore()
-    throws GeneralSecurityException, IOException {
-    KeyStore ks = KeyStore.getInstance("JKS");
-    ks.load(null, null); // initialize
-    return ks;
-  }
-
-  private static void saveKeyStore(KeyStore ks, String filename,
-                                   String password)
-    throws GeneralSecurityException, IOException {
-    FileOutputStream out = new FileOutputStream(filename);
-    try {
-      ks.store(out, password.toCharArray());
-    } finally {
-      out.close();
-    }
-  }
-
-  public static void createKeyStore(String filename,
-                                    String password, String alias,
-                                    Key privateKey, Certificate cert)
-    throws GeneralSecurityException, IOException {
-    KeyStore ks = createEmptyKeyStore();
-    ks.setKeyEntry(alias, privateKey, password.toCharArray(),
-                   new Certificate[]{cert});
-    saveKeyStore(ks, filename, password);
-  }
-
-  /**
-   * Creates a keystore with a single key and saves it to a file.
-   *
-   * @param filename String file to save
-   * @param password String store password to set on keystore
-   * @param keyPassword String key password to set on key
-   * @param alias String alias to use for the key
-   * @param privateKey Key to save in keystore
-   * @param cert Certificate to use as certificate chain associated to key
-   * @throws GeneralSecurityException for any error with the security APIs
-   * @throws IOException if there is an I/O error saving the file
-   */
-  public static void createKeyStore(String filename,
-                                    String password, String keyPassword, String alias,
-                                    Key privateKey, Certificate cert)
-    throws GeneralSecurityException, IOException {
-    KeyStore ks = createEmptyKeyStore();
-    ks.setKeyEntry(alias, privateKey, keyPassword.toCharArray(),
-                   new Certificate[]{cert});
-    saveKeyStore(ks, filename, password);
-  }
-
-  public static void createTrustStore(String filename,
-                                      String password, String alias,
-                                      Certificate cert)
-    throws GeneralSecurityException, IOException {
-    KeyStore ks = createEmptyKeyStore();
-    ks.setCertificateEntry(alias, cert);
-    saveKeyStore(ks, filename, password);
-  }
-
-  public static <T extends Certificate> void createTrustStore(
-    String filename, String password, Map<String, T> certs)
-    throws GeneralSecurityException, IOException {
-    KeyStore ks = createEmptyKeyStore();
-    for (Map.Entry<String, T> cert : certs.entrySet()) {
-      ks.setCertificateEntry(cert.getKey(), cert.getValue());
-    }
-    saveKeyStore(ks, filename, password);
-  }
-
-  public static void cleanupSSLConfig(String keystoresDir, String sslConfDir)
-    throws Exception {
-    File f = new File(keystoresDir + "/clientKS.jks");
-    f.delete();
-    f = new File(keystoresDir + "/serverKS.jks");
-    f.delete();
-    f = new File(keystoresDir + "/trustKS.jks");
-    f.delete();
-    f = new File(sslConfDir + "/ssl-client.xml");
-    f.delete();
-    f = new File(sslConfDir +  "/ssl-server.xml");
-    f.delete();
-  }
-
-  /**
-   * Performs complete setup of SSL configuration in preparation for testing an
-   * SSLFactory.  This includes keys, certs, keystores, truststores, the server
-   * SSL configuration file, the client SSL configuration file, and the master
-   * configuration file read by the SSLFactory.
-   *
-   * @param keystoresDir String directory to save keystores
-   * @param sslConfDir String directory to save SSL configuration files
-   * @param conf Configuration master configuration to be used by an SSLFactory,
-   *   which will be mutated by this method
-   * @param useClientCert boolean true to make the client present a cert in the
-   *   SSL handshake
-   */
-  public static void setupSSLConfig(String keystoresDir, String sslConfDir,
-                                    Configuration conf, boolean useClientCert)
-    throws Exception {
-    String clientKS = keystoresDir + "/clientKS.jks";
-    String clientPassword = "clientP";
-    String serverKS = keystoresDir + "/serverKS.jks";
-    String serverPassword = "serverP";
-    String trustKS = keystoresDir + "/trustKS.jks";
-    String trustPassword = "trustP";
-
-    File sslClientConfFile = new File(sslConfDir + "/ssl-client.xml");
-    File sslServerConfFile = new File(sslConfDir + "/ssl-server.xml");
-
-    Map<String, X509Certificate> certs = new HashMap<>();
-
-    if (useClientCert) {
-      KeyPair cKP = KeyStoreTestUtil.generateKeyPair("RSA");
-      X509Certificate cCert =
-        KeyStoreTestUtil.generateCertificate("CN=localhost, O=client", cKP, 30,
-                                             "SHA1withRSA");
-      KeyStoreTestUtil.createKeyStore(clientKS, clientPassword, "client",
-                                      cKP.getPrivate(), cCert);
-      certs.put("client", cCert);
-    }
-
-    KeyPair sKP = KeyStoreTestUtil.generateKeyPair("RSA");
-    X509Certificate sCert =
-      KeyStoreTestUtil.generateCertificate("CN=localhost, O=server", sKP, 30,
-                                           "SHA1withRSA");
-    KeyStoreTestUtil.createKeyStore(serverKS, serverPassword, "server",
-                                    sKP.getPrivate(), sCert);
-    certs.put("server", sCert);
-
-    KeyStoreTestUtil.createTrustStore(trustKS, trustPassword, certs);
-
-    Configuration clientSSLConf = createClientSSLConfig(clientKS, clientPassword,
-      clientPassword, trustKS);
-    Configuration serverSSLConf = createServerSSLConfig(serverKS, serverPassword,
-      serverPassword, trustKS);
-
-    saveConfig(sslClientConfFile, clientSSLConf);
-    saveConfig(sslServerConfFile, serverSSLConf);
-
-    conf.set(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, "ALLOW_ALL");
-    conf.set(SSLFactory.SSL_CLIENT_CONF_KEY, sslClientConfFile.getName());
-    conf.set(SSLFactory.SSL_SERVER_CONF_KEY, sslServerConfFile.getName());
-    conf.setBoolean(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY, useClientCert);
-  }
-
-  /**
-   * Creates SSL configuration for a client.
-   *
-   * @param clientKS String client keystore file
-   * @param password String store password, or null to avoid setting store
-   *   password
-   * @param keyPassword String key password, or null to avoid setting key
-   *   password
-   * @param trustKS String truststore file
-   * @return Configuration for client SSL
-   */
-  public static Configuration createClientSSLConfig(String clientKS,
-      String password, String keyPassword, String trustKS) {
-    Configuration clientSSLConf = createSSLConfig(SSLFactory.Mode.CLIENT,
-      clientKS, password, keyPassword, trustKS);
-    return clientSSLConf;
-  }
-
-  /**
-   * Creates SSL configuration for a server.
-   *
-   * @param serverKS String server keystore file
-   * @param password String store password, or null to avoid setting store
-   *   password
-   * @param keyPassword String key password, or null to avoid setting key
-   *   password
-   * @param trustKS String truststore file
-   * @return Configuration for server SSL
-   */
-  public static Configuration createServerSSLConfig(String serverKS,
-      String password, String keyPassword, String trustKS) throws IOException {
-    Configuration serverSSLConf = createSSLConfig(SSLFactory.Mode.SERVER,
-      serverKS, password, keyPassword, trustKS);
-    return serverSSLConf;
-  }
-
-  /**
-   * Creates SSL configuration.
-   *
-   * @param mode SSLFactory.Mode mode to configure
-   * @param keystore String keystore file
-   * @param password String store password, or null to avoid setting store
-   *   password
-   * @param keyPassword String key password, or null to avoid setting key
-   *   password
-   * @param trustKS String truststore file
-   * @return Configuration for SSL
-   */
-  private static Configuration createSSLConfig(SSLFactory.Mode mode,
-      String keystore, String password, String keyPassword, String trustKS) {
-    String trustPassword = "trustP";
-
-    Configuration sslConf = new Configuration(false);
-    if (keystore != null) {
-      sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
-        FileBasedKeyStoresFactory.SSL_KEYSTORE_LOCATION_TPL_KEY), keystore);
-    }
-    if (password != null) {
-      sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
-        FileBasedKeyStoresFactory.SSL_KEYSTORE_PASSWORD_TPL_KEY), password);
-    }
-    if (keyPassword != null) {
-      sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
-        FileBasedKeyStoresFactory.SSL_KEYSTORE_KEYPASSWORD_TPL_KEY),
-        keyPassword);
-    }
-    if (trustKS != null) {
-      sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
-        FileBasedKeyStoresFactory.SSL_TRUSTSTORE_LOCATION_TPL_KEY), trustKS);
-    }
-    if (trustPassword != null) {
-      sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
-        FileBasedKeyStoresFactory.SSL_TRUSTSTORE_PASSWORD_TPL_KEY),
-        trustPassword);
-    }
-    sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
-      FileBasedKeyStoresFactory.SSL_TRUSTSTORE_RELOAD_INTERVAL_TPL_KEY), "1000");
-
-    return sslConf;
-  }
-
-  /**
-   * Saves configuration to a file.
-   *
-   * @param file File to save
-   * @param conf Configuration contents to write to file
-   * @throws IOException if there is an I/O error saving the file
-   */
-  public static void saveConfig(File file, Configuration conf)
-      throws IOException {
-    Writer writer = new FileWriter(file);
-    try {
-      conf.writeXml(writer);
-    } finally {
-      writer.close();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/resources/log4j.properties b/hbase-http/src/test/resources/log4j.properties
deleted file mode 100644
index c322699..0000000
--- a/hbase-http/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,68 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hbase.root.logger=INFO,console
-hbase.log.dir=.
-hbase.log.file=hbase.log
-
-# Define the root logger to the system property "hbase.root.logger".
-log4j.rootLogger=${hbase.root.logger}
-
-# Logging Threshold
-log4j.threshold=ALL
-
-#
-# Daily Rolling File Appender
-#
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-# Debugging Pattern format
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
-
-# Custom Logging levels
-
-#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
-
-log4j.logger.org.apache.hadoop=WARN
-log4j.logger.org.apache.zookeeper=ERROR
-log4j.logger.org.apache.hadoop.hbase=DEBUG
-
-#These settings are workarounds against spurious logs from the minicluster.
-#See HBASE-4709
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
-log4j.logger.org.apache.hadoop.metrics2.util.MBeans=WARN
-# Enable this to get detailed connection error/retry logging.
-# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/resources/webapps/static/test.css
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/resources/webapps/static/test.css b/hbase-http/src/test/resources/webapps/static/test.css
deleted file mode 100644
index ae43828..0000000
--- a/hbase-http/src/test/resources/webapps/static/test.css
+++ /dev/null
@@ -1,21 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/**
- * Test CSS file for content type handling - empty, since we just check
- * returned content type!
- */

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-http/src/test/resources/webapps/test/testjsp.jsp
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/resources/webapps/test/testjsp.jsp b/hbase-http/src/test/resources/webapps/test/testjsp.jsp
deleted file mode 100644
index 540adf9..0000000
--- a/hbase-http/src/test/resources/webapps/test/testjsp.jsp
+++ /dev/null
@@ -1,21 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?><%!
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-%>
-<%@ page contentType="text/html; charset=UTF-8" %>
-Hello world!

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
index 360ab9d..c2f6cf6 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
@@ -20,8 +20,10 @@ package org.apache.hadoop.hbase.rest;
 
 import java.lang.management.ManagementFactory;
 import java.util.ArrayList;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 import java.util.EnumSet;
 import java.util.concurrent.ArrayBlockingQueue;
 
@@ -44,7 +46,7 @@ import org.apache.hadoop.hbase.rest.filter.GzipFilter;
 import org.apache.hadoop.hbase.rest.filter.RestCsrfPreventionFilter;
 import org.apache.hadoop.hbase.security.UserProvider;
 import org.apache.hadoop.hbase.util.DNS;
-import org.apache.hadoop.hbase.http.HttpServerUtil;
+import org.apache.hadoop.hbase.util.HttpServerUtil;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.hbase.util.VersionInfo;

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
index 4c977fd..0714c7b 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
@@ -24,7 +24,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.security.UserProvider;
-import org.apache.hadoop.hbase.http.HttpServerUtil;
+import org.apache.hadoop.hbase.util.HttpServerUtil;
 import org.apache.hadoop.util.StringUtils;
 
 import org.eclipse.jetty.server.HttpConfiguration;
@@ -99,7 +99,7 @@ public class HBaseRESTTestingUtility {
     // get the port
     testServletPort = ((ServerConnector)server.getConnectors()[0]).getLocalPort();
 
-    LOG.info("started " + server.getClass().getName() + " on port " +
+    LOG.info("started " + server.getClass().getName() + " on port " + 
       testServletPort);
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index d665538..218812c 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -360,16 +360,6 @@
       <artifactId>hbase-common</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-http</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-http</artifactId>
-      <type>test-jar</type>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
       <!--Needed by the visiblity tags and acl CPEP things
            in here in hbase-server (that should be out in hbase-endpoints
            or integrated). -->
@@ -470,13 +460,48 @@
     </dependency>
     <dependency>
       <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-util</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-util-ajax</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-jsp</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
       <artifactId>jetty-webapp</artifactId>
     </dependency>
     <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-http</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-security</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-databind</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.glassfish.jersey.containers</groupId>
+      <artifactId>jersey-container-servlet-core</artifactId>
+    </dependency>
+    <dependency>
       <!--For JspC used in ant task-->
       <groupId>org.glassfish.web</groupId>
       <artifactId>javax.servlet.jsp</artifactId>
     </dependency>
+    <!-- Specifically needed for jetty-jsp, included
+         to bypass version scanning that hits a bad repo
+         see HBASE-18831 -->
+    <dependency>
+      <groupId>org.glassfish</groupId>
+      <artifactId>javax.el</artifactId>
+    </dependency>
     <dependency>
       <groupId>org.codehaus.jettison</groupId>
       <artifactId>jettison</artifactId>

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedServlet.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedServlet.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedServlet.java
new file mode 100644
index 0000000..bd8570e
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedServlet.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.yetus.audience.InterfaceStability;
+import org.eclipse.jetty.servlet.DefaultServlet;
+
+/**
+ * General servlet which is admin-authorized.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class AdminAuthorizedServlet extends DefaultServlet {
+
+  private static final long serialVersionUID = 1L;
+
+  @Override
+  protected void doGet(HttpServletRequest request, HttpServletResponse response)
+    throws ServletException, IOException {
+    // Do the authorization
+    if (HttpServer.hasAdministratorAccess(getServletContext(), request,
+        response)) {
+      // Authorization is done. Just call super.
+      super.doGet(request, response);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/ClickjackingPreventionFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/ClickjackingPreventionFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/ClickjackingPreventionFilter.java
new file mode 100644
index 0000000..9944d29
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/ClickjackingPreventionFilter.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+
+@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
+public class ClickjackingPreventionFilter implements Filter {
+
+    private FilterConfig filterConfig;
+
+    @Override
+    public void init(FilterConfig filterConfig) throws ServletException {
+        this.filterConfig = filterConfig;
+    }
+
+    @Override
+    public void doFilter(ServletRequest req, ServletResponse res,
+                         FilterChain chain)
+            throws IOException, ServletException {
+        HttpServletResponse httpRes = (HttpServletResponse) res;
+        httpRes.addHeader("X-Frame-Options", filterConfig.getInitParameter("xframeoptions"));
+        chain.doFilter(req, res);
+    }
+
+    @Override
+    public void destroy() {
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/FilterContainer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/FilterContainer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/FilterContainer.java
new file mode 100644
index 0000000..7a79acc
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/FilterContainer.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import java.util.Map;
+
+/**
+ * A container interface to add javax.servlet.Filter.
+ */
+public interface FilterContainer {
+  /**
+   * Add a filter to the container.
+   * @param name Filter name
+   * @param classname Filter class name
+   * @param parameters a map from parameter names to initial values
+   */
+  void addFilter(String name, String classname, Map<String, String> parameters);
+  /**
+   * Add a global filter to the container - This global filter will be
+   * applied to all available web contexts.
+   * @param name filter name
+   * @param classname filter class name
+   * @param parameters a map from parameter names to initial values
+   */
+  void addGlobalFilter(String name, String classname, Map<String, String> parameters);
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/FilterInitializer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/FilterInitializer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/FilterInitializer.java
new file mode 100644
index 0000000..e033c10
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/FilterInitializer.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * Initialize a javax.servlet.Filter. 
+ */
+public abstract class FilterInitializer {
+  /**
+   * Initialize a Filter to a FilterContainer.
+   * @param container The filter container
+   * @param conf Configuration for run-time parameters
+   */
+  public abstract void initFilter(FilterContainer container, Configuration conf);
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HtmlQuoting.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HtmlQuoting.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HtmlQuoting.java
new file mode 100644
index 0000000..60a74b7
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HtmlQuoting.java
@@ -0,0 +1,215 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+
+/**
+ * This class is responsible for quoting HTML characters.
+ */
+public class HtmlQuoting {
+  private static final byte[] ampBytes = "&amp;".getBytes();
+  private static final byte[] aposBytes = "&apos;".getBytes();
+  private static final byte[] gtBytes = "&gt;".getBytes();
+  private static final byte[] ltBytes = "&lt;".getBytes();
+  private static final byte[] quotBytes = "&quot;".getBytes();
+
+  /**
+   * Does the given string need to be quoted?
+   * @param data the string to check
+   * @param off the starting position
+   * @param len the number of bytes to check
+   * @return does the string contain any of the active html characters?
+   */
+  public static boolean needsQuoting(byte[] data, int off, int len) {
+    if (off+len > data.length) {
+        throw new IllegalStateException("off+len=" + off+len + " should be lower"
+                + " than data length=" + data.length);
+    }
+    for(int i=off; i< off+len; ++i) {
+      switch(data[i]) {
+      case '&':
+      case '<':
+      case '>':
+      case '\'':
+      case '"':
+        return true;
+      default:
+        break;
+      }
+    }
+    return false;
+  }
+
+  /**
+   * Does the given string need to be quoted?
+   * @param str the string to check
+   * @return does the string contain any of the active html characters?
+   */
+  public static boolean needsQuoting(String str) {
+    if (str == null) {
+      return false;
+    }
+    byte[] bytes = str.getBytes();
+    return needsQuoting(bytes, 0 , bytes.length);
+  }
+
+  /**
+   * Quote all of the active HTML characters in the given string as they
+   * are added to the buffer.
+   * @param output the stream to write the output to
+   * @param buffer the byte array to take the characters from
+   * @param off the index of the first byte to quote
+   * @param len the number of bytes to quote
+   */
+  public static void quoteHtmlChars(OutputStream output, byte[] buffer,
+                                    int off, int len) throws IOException {
+    for(int i=off; i < off+len; i++) {
+      switch (buffer[i]) {
+      case '&': output.write(ampBytes); break;
+      case '<': output.write(ltBytes); break;
+      case '>': output.write(gtBytes); break;
+      case '\'': output.write(aposBytes); break;
+      case '"': output.write(quotBytes); break;
+      default: output.write(buffer, i, 1);
+      }
+    }
+  }
+  
+  /**
+   * Quote the given item to make it html-safe.
+   * @param item the string to quote
+   * @return the quoted string
+   */
+  public static String quoteHtmlChars(String item) {
+    if (item == null) {
+      return null;
+    }
+    byte[] bytes = item.getBytes();
+    if (needsQuoting(bytes, 0, bytes.length)) {
+      ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+      try {
+        quoteHtmlChars(buffer, bytes, 0, bytes.length);
+      } catch (IOException ioe) {
+        // Won't happen, since it is a bytearrayoutputstream
+      }
+      return buffer.toString();
+    } else {
+      return item;
+    }
+  }
+
+  /**
+   * Return an output stream that quotes all of the output.
+   * @param out the stream to write the quoted output to
+   * @return a new stream that the application show write to
+   * @throws IOException if the underlying output fails
+   */
+  public static OutputStream quoteOutputStream(final OutputStream out
+                                               ) throws IOException {
+    return new OutputStream() {
+      private byte[] data = new byte[1];
+      @Override
+      public void write(byte[] data, int off, int len) throws IOException {
+        quoteHtmlChars(out, data, off, len);
+      }
+      
+      @Override
+      public void write(int b) throws IOException {
+        data[0] = (byte) b;
+        quoteHtmlChars(out, data, 0, 1);
+      }
+      
+      @Override
+      public void flush() throws IOException {
+        out.flush();
+      }
+      
+      @Override
+      public void close() throws IOException {
+        out.close();
+      }
+    };
+  }
+
+  /**
+   * Remove HTML quoting from a string.
+   * @param item the string to unquote
+   * @return the unquoted string
+   */
+  public static String unquoteHtmlChars(String item) {
+    if (item == null) {
+      return null;
+    }
+    int next = item.indexOf('&');
+    // nothing was quoted
+    if (next == -1) {
+      return item;
+    }
+    int len = item.length();
+    int posn = 0;
+    StringBuilder buffer = new StringBuilder();
+    while (next != -1) {
+      buffer.append(item.substring(posn, next));
+      if (item.startsWith("&amp;", next)) {
+        buffer.append('&');
+        next += 5;
+      } else if (item.startsWith("&apos;", next)) {
+        buffer.append('\'');
+        next += 6;        
+      } else if (item.startsWith("&gt;", next)) {
+        buffer.append('>');
+        next += 4;
+      } else if (item.startsWith("&lt;", next)) {
+        buffer.append('<');
+        next += 4;
+      } else if (item.startsWith("&quot;", next)) {
+        buffer.append('"');
+        next += 6;
+      } else {
+        int end = item.indexOf(';', next)+1;
+        if (end == 0) {
+          end = len;
+        }
+        throw new IllegalArgumentException("Bad HTML quoting for " + 
+                                           item.substring(next,end));
+      }
+      posn = next;
+      next = item.indexOf('&', posn);
+    }
+    buffer.append(item.substring(posn, len));
+    return buffer.toString();
+  }
+  
+  public static void main(String[] args) throws Exception {
+    if (args.length == 0) {
+        throw new IllegalArgumentException("Please provide some arguments");
+    }
+    for(String arg:args) {
+      System.out.println("Original: " + arg);
+      String quoted = quoteHtmlChars(arg);
+      System.out.println("Quoted: "+ quoted);
+      String unquoted = unquoteHtmlChars(quoted);
+      System.out.println("Unquoted: " + unquoted);
+      System.out.println();
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpConfig.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpConfig.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpConfig.java
new file mode 100644
index 0000000..b9dde23
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpConfig.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.yetus.audience.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * Statics to get access to Http related configuration.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class HttpConfig {
+  private Policy policy;
+  public enum Policy {
+    HTTP_ONLY,
+    HTTPS_ONLY,
+    HTTP_AND_HTTPS;
+
+    public Policy fromString(String value) {
+      if (HTTPS_ONLY.name().equalsIgnoreCase(value)) {
+        return HTTPS_ONLY;
+      } else if (HTTP_AND_HTTPS.name().equalsIgnoreCase(value)) {
+        return HTTP_AND_HTTPS;
+      }
+      return HTTP_ONLY;
+    }
+
+    public boolean isHttpEnabled() {
+      return this == HTTP_ONLY || this == HTTP_AND_HTTPS;
+    }
+
+    public boolean isHttpsEnabled() {
+      return this == HTTPS_ONLY || this == HTTP_AND_HTTPS;
+    }
+  }
+
+   public HttpConfig(final Configuration conf) {
+    boolean sslEnabled = conf.getBoolean(
+      ServerConfigurationKeys.HBASE_SSL_ENABLED_KEY,
+      ServerConfigurationKeys.HBASE_SSL_ENABLED_DEFAULT);
+    policy = sslEnabled ? Policy.HTTPS_ONLY : Policy.HTTP_ONLY;
+    if (sslEnabled) {
+      conf.addResource("ssl-server.xml");
+      conf.addResource("ssl-client.xml");
+    }
+  }
+
+  public void setPolicy(Policy policy) {
+    this.policy = policy;
+  }
+
+  public boolean isSecure() {
+    return policy == Policy.HTTPS_ONLY;
+  }
+
+  public String getSchemePrefix() {
+    return (isSecure()) ? "https://" : "http://";
+  }
+
+  public String getScheme(Policy policy) {
+    return policy == Policy.HTTPS_ONLY ? "https://" : "http://";
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java
new file mode 100644
index 0000000..cfc0640
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import java.util.HashMap;
+
+import org.apache.commons.logging.impl.Log4JLogger;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogConfigurationException;
+import org.apache.commons.logging.LogFactory;
+import org.apache.log4j.Appender;
+import org.apache.log4j.Logger;
+
+import org.eclipse.jetty.server.RequestLog;
+import org.eclipse.jetty.server.NCSARequestLog;
+
+/**
+ * RequestLog object for use with Http
+ */
+public class HttpRequestLog {
+
+  private static final Log LOG = LogFactory.getLog(HttpRequestLog.class);
+  private static final HashMap<String, String> serverToComponent;
+
+  static {
+    serverToComponent = new HashMap<>();
+    serverToComponent.put("master", "master");
+    serverToComponent.put("region", "regionserver");
+  }
+
+  public static RequestLog getRequestLog(String name) {
+
+    String lookup = serverToComponent.get(name);
+    if (lookup != null) {
+      name = lookup;
+    }
+    String loggerName = "http.requests." + name;
+    String appenderName = name + "requestlog";
+    Log logger = LogFactory.getLog(loggerName);
+
+    if (logger instanceof Log4JLogger) {
+      Log4JLogger httpLog4JLog = (Log4JLogger)logger;
+      Logger httpLogger = httpLog4JLog.getLogger();
+      Appender appender = null;
+
+      try {
+        appender = httpLogger.getAppender(appenderName);
+      } catch (LogConfigurationException e) {
+        LOG.warn("Http request log for " + loggerName
+            + " could not be created");
+        throw e;
+      }
+
+      if (appender == null) {
+        LOG.info("Http request log for " + loggerName
+            + " is not defined");
+        return null;
+      }
+
+      if (appender instanceof HttpRequestLogAppender) {
+        HttpRequestLogAppender requestLogAppender
+          = (HttpRequestLogAppender)appender;
+        NCSARequestLog requestLog = new NCSARequestLog();
+        requestLog.setFilename(requestLogAppender.getFilename());
+        requestLog.setRetainDays(requestLogAppender.getRetainDays());
+        return requestLog;
+      } else {
+        LOG.warn("Jetty request log for " + loggerName
+            + " was of the wrong class");
+        return null;
+      }
+    }
+    else {
+      LOG.warn("Jetty request log can only be enabled using Log4j");
+      return null;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLogAppender.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLogAppender.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLogAppender.java
new file mode 100644
index 0000000..8039b34
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLogAppender.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import org.apache.log4j.spi.LoggingEvent;
+import org.apache.log4j.AppenderSkeleton;
+
+/**
+ * Log4j Appender adapter for HttpRequestLog
+ */
+public class HttpRequestLogAppender extends AppenderSkeleton {
+
+  private String filename;
+  private int retainDays;
+
+  public HttpRequestLogAppender() {
+  }
+
+  public void setRetainDays(int retainDays) {
+    this.retainDays = retainDays;
+  }
+
+  public int getRetainDays() {
+    return retainDays;
+  }
+
+  public void setFilename(String filename) {
+    this.filename = filename;
+  }
+
+  public String getFilename() {
+    return filename;
+  }
+
+  @Override
+  public void append(LoggingEvent event) {
+  }
+
+  @Override
+  public void close() {
+      // Do nothing, we don't have close() on AppenderSkeleton.
+  }
+
+  @Override
+  public boolean requiresLayout() {
+    return false;
+  }
+}


[08/13] hbase git commit: Revert "HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module"

Posted by bu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
new file mode 100644
index 0000000..c2b5944
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
@@ -0,0 +1,1387 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import com.google.common.annotations.VisibleForTesting;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InterruptedIOException;
+import java.io.PrintStream;
+import java.net.BindException;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletRequestWrapper;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.HadoopIllegalArgumentException;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.yetus.audience.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+import org.apache.hadoop.hbase.http.conf.ConfServlet;
+import org.apache.hadoop.hbase.http.jmx.JMXJsonServlet;
+import org.apache.hadoop.hbase.http.log.LogLevel;
+import org.apache.hadoop.hbase.util.Threads;
+import org.apache.hadoop.hbase.util.ReflectionUtils;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.apache.hadoop.security.authorize.AccessControlList;
+import org.apache.hadoop.util.Shell;
+
+import org.eclipse.jetty.http.HttpVersion;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.Handler;
+import org.eclipse.jetty.server.HttpConfiguration;
+import org.eclipse.jetty.server.HttpConnectionFactory;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.SecureRequestCustomizer;
+import org.eclipse.jetty.server.SslConnectionFactory;
+import org.eclipse.jetty.server.handler.ContextHandlerCollection;
+import org.eclipse.jetty.server.handler.HandlerCollection;
+import org.eclipse.jetty.server.RequestLog;
+import org.eclipse.jetty.server.handler.RequestLogHandler;
+import org.eclipse.jetty.servlet.FilterMapping;
+import org.eclipse.jetty.servlet.ServletHandler;
+import org.eclipse.jetty.servlet.FilterHolder;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.DefaultServlet;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.eclipse.jetty.util.MultiException;
+import org.eclipse.jetty.util.ssl.SslContextFactory;
+import org.eclipse.jetty.util.thread.QueuedThreadPool;
+import org.eclipse.jetty.webapp.WebAppContext;
+
+import org.glassfish.jersey.server.ResourceConfig;
+import org.glassfish.jersey.servlet.ServletContainer;
+
+/**
+ * Create a Jetty embedded server to answer http requests. The primary goal
+ * is to serve up status information for the server.
+ * There are three contexts:
+ *   "/logs/" -&gt; points to the log directory
+ *   "/static/" -&gt; points to common static files (src/webapps/static)
+ *   "/" -&gt; the jsp server code from (src/webapps/&lt;name&gt;)
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class HttpServer implements FilterContainer {
+  private static final Log LOG = LogFactory.getLog(HttpServer.class);
+  private static final String EMPTY_STRING = "";
+
+  private static final int DEFAULT_MAX_HEADER_SIZE = 64 * 1024; // 64K
+
+  static final String FILTER_INITIALIZERS_PROPERTY
+      = "hbase.http.filter.initializers";
+  static final String HTTP_MAX_THREADS = "hbase.http.max.threads";
+
+  public static final String HTTP_UI_AUTHENTICATION = "hbase.security.authentication.ui";
+  static final String HTTP_AUTHENTICATION_PREFIX = "hbase.security.authentication.";
+  static final String HTTP_SPNEGO_AUTHENTICATION_PREFIX = HTTP_AUTHENTICATION_PREFIX
+      + "spnego.";
+  static final String HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX = "kerberos.principal";
+  public static final String HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY =
+      HTTP_SPNEGO_AUTHENTICATION_PREFIX + HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX;
+  static final String HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX = "kerberos.keytab";
+  public static final String HTTP_SPNEGO_AUTHENTICATION_KEYTAB_KEY =
+      HTTP_SPNEGO_AUTHENTICATION_PREFIX + HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX;
+  static final String HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_SUFFIX = "kerberos.name.rules";
+  public static final String HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_KEY =
+      HTTP_SPNEGO_AUTHENTICATION_PREFIX + HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_SUFFIX;
+  static final String HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_SUFFIX =
+      "signature.secret.file";
+  public static final String HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_KEY =
+      HTTP_AUTHENTICATION_PREFIX + HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_SUFFIX;
+
+  // The ServletContext attribute where the daemon Configuration
+  // gets stored.
+  public static final String CONF_CONTEXT_ATTRIBUTE = "hbase.conf";
+  public static final String ADMINS_ACL = "admins.acl";
+  public static final String BIND_ADDRESS = "bind.address";
+  public static final String SPNEGO_FILTER = "SpnegoFilter";
+  public static final String NO_CACHE_FILTER = "NoCacheFilter";
+  public static final String APP_DIR = "webapps";
+
+  private final AccessControlList adminsAcl;
+
+  protected final Server webServer;
+  protected String appDir;
+  protected String logDir;
+
+  private static class ListenerInfo {
+    /**
+     * Boolean flag to determine whether the HTTP server should clean up the
+     * listener in stop().
+     */
+    private final boolean isManaged;
+    private final ServerConnector listener;
+    private ListenerInfo(boolean isManaged, ServerConnector listener) {
+      this.isManaged = isManaged;
+      this.listener = listener;
+    }
+  }
+
+  private final List<ListenerInfo> listeners = Lists.newArrayList();
+
+  @VisibleForTesting
+  public List<ServerConnector> getServerConnectors() {
+    return listeners.stream().map(info -> info.listener).collect(Collectors.toList());
+  }
+
+  protected final WebAppContext webAppContext;
+  protected final boolean findPort;
+  protected final Map<ServletContextHandler, Boolean> defaultContexts = new HashMap<>();
+  protected final List<String> filterNames = new ArrayList<>();
+  static final String STATE_DESCRIPTION_ALIVE = " - alive";
+  static final String STATE_DESCRIPTION_NOT_LIVE = " - not live";
+
+  /**
+   * Class to construct instances of HTTP server with specific options.
+   */
+  public static class Builder {
+    private ArrayList<URI> endpoints = Lists.newArrayList();
+    private Configuration conf;
+    private String[] pathSpecs;
+    private AccessControlList adminsAcl;
+    private boolean securityEnabled = false;
+    private String usernameConfKey;
+    private String keytabConfKey;
+    private boolean needsClientAuth;
+
+    private String hostName;
+    private String appDir = APP_DIR;
+    private String logDir;
+    private boolean findPort;
+
+    private String trustStore;
+    private String trustStorePassword;
+    private String trustStoreType;
+
+    private String keyStore;
+    private String keyStorePassword;
+    private String keyStoreType;
+
+    // The -keypass option in keytool
+    private String keyPassword;
+
+    private String kerberosNameRulesKey;
+    private String signatureSecretFileKey;
+
+    @Deprecated
+    private String name;
+    @Deprecated
+    private String bindAddress;
+    @Deprecated
+    private int port = -1;
+
+    /**
+     * Add an endpoint that the HTTP server should listen to.
+     *
+     * @param endpoint
+     *          the endpoint of that the HTTP server should listen to. The
+     *          scheme specifies the protocol (i.e. HTTP / HTTPS), the host
+     *          specifies the binding address, and the port specifies the
+     *          listening port. Unspecified or zero port means that the server
+     *          can listen to any port.
+     */
+    public Builder addEndpoint(URI endpoint) {
+      endpoints.add(endpoint);
+      return this;
+    }
+
+    /**
+     * Set the hostname of the http server. The host name is used to resolve the
+     * _HOST field in Kerberos principals. The hostname of the first listener
+     * will be used if the name is unspecified.
+     */
+    public Builder hostName(String hostName) {
+      this.hostName = hostName;
+      return this;
+    }
+
+    public Builder trustStore(String location, String password, String type) {
+      this.trustStore = location;
+      this.trustStorePassword = password;
+      this.trustStoreType = type;
+      return this;
+    }
+
+    public Builder keyStore(String location, String password, String type) {
+      this.keyStore = location;
+      this.keyStorePassword = password;
+      this.keyStoreType = type;
+      return this;
+    }
+
+    public Builder keyPassword(String password) {
+      this.keyPassword = password;
+      return this;
+    }
+
+    /**
+     * Specify whether the server should authorize the client in SSL
+     * connections.
+     */
+    public Builder needsClientAuth(boolean value) {
+      this.needsClientAuth = value;
+      return this;
+    }
+
+    /**
+     * Use setAppDir() instead.
+     */
+    @Deprecated
+    public Builder setName(String name){
+      this.name = name;
+      return this;
+    }
+
+    /**
+     * Use addEndpoint() instead.
+     */
+    @Deprecated
+    public Builder setBindAddress(String bindAddress){
+      this.bindAddress = bindAddress;
+      return this;
+    }
+
+    /**
+     * Use addEndpoint() instead.
+     */
+    @Deprecated
+    public Builder setPort(int port) {
+      this.port = port;
+      return this;
+    }
+
+    public Builder setFindPort(boolean findPort) {
+      this.findPort = findPort;
+      return this;
+    }
+
+    public Builder setConf(Configuration conf) {
+      this.conf = conf;
+      return this;
+    }
+
+    public Builder setPathSpec(String[] pathSpec) {
+      this.pathSpecs = pathSpec;
+      return this;
+    }
+
+    public Builder setACL(AccessControlList acl) {
+      this.adminsAcl = acl;
+      return this;
+    }
+
+    public Builder setSecurityEnabled(boolean securityEnabled) {
+      this.securityEnabled = securityEnabled;
+      return this;
+    }
+
+    public Builder setUsernameConfKey(String usernameConfKey) {
+      this.usernameConfKey = usernameConfKey;
+      return this;
+    }
+
+    public Builder setKeytabConfKey(String keytabConfKey) {
+      this.keytabConfKey = keytabConfKey;
+      return this;
+    }
+
+    public Builder setKerberosNameRulesKey(String kerberosNameRulesKey) {
+      this.kerberosNameRulesKey = kerberosNameRulesKey;
+      return this;
+    }
+
+    public Builder setSignatureSecretFileKey(String signatureSecretFileKey) {
+      this.signatureSecretFileKey = signatureSecretFileKey;
+      return this;
+    }
+
+    public Builder setAppDir(String appDir) {
+        this.appDir = appDir;
+        return this;
+      }
+
+    public Builder setLogDir(String logDir) {
+        this.logDir = logDir;
+        return this;
+      }
+
+    public HttpServer build() throws IOException {
+
+      // Do we still need to assert this non null name if it is deprecated?
+      if (this.name == null) {
+        throw new HadoopIllegalArgumentException("name is not set");
+      }
+
+      // Make the behavior compatible with deprecated interfaces
+      if (bindAddress != null && port != -1) {
+        try {
+          endpoints.add(0, new URI("http", "", bindAddress, port, "", "", ""));
+        } catch (URISyntaxException e) {
+          throw new HadoopIllegalArgumentException("Invalid endpoint: "+ e); }
+      }
+
+      if (endpoints.isEmpty()) {
+        throw new HadoopIllegalArgumentException("No endpoints specified");
+      }
+
+      if (hostName == null) {
+        hostName = endpoints.get(0).getHost();
+      }
+
+      if (this.conf == null) {
+        conf = new Configuration();
+      }
+
+      HttpServer server = new HttpServer(this);
+
+      if (this.securityEnabled) {
+        server.initSpnego(conf, hostName, usernameConfKey, keytabConfKey, kerberosNameRulesKey,
+            signatureSecretFileKey);
+      }
+
+      for (URI ep : endpoints) {
+        ServerConnector listener = null;
+        String scheme = ep.getScheme();
+        HttpConfiguration httpConfig = new HttpConfiguration();
+        httpConfig.setSecureScheme("https");
+        httpConfig.setHeaderCacheSize(DEFAULT_MAX_HEADER_SIZE);
+        httpConfig.setResponseHeaderSize(DEFAULT_MAX_HEADER_SIZE);
+        httpConfig.setRequestHeaderSize(DEFAULT_MAX_HEADER_SIZE);
+
+        if ("http".equals(scheme)) {
+          listener = new ServerConnector(server.webServer, new HttpConnectionFactory(httpConfig));
+        } else if ("https".equals(scheme)) {
+          HttpConfiguration httpsConfig = new HttpConfiguration(httpConfig);
+          httpsConfig.addCustomizer(new SecureRequestCustomizer());
+          SslContextFactory sslCtxFactory = new SslContextFactory();
+          sslCtxFactory.setNeedClientAuth(needsClientAuth);
+          sslCtxFactory.setKeyManagerPassword(keyPassword);
+
+          if (keyStore != null) {
+            sslCtxFactory.setKeyStorePath(keyStore);
+            sslCtxFactory.setKeyStoreType(keyStoreType);
+            sslCtxFactory.setKeyStorePassword(keyStorePassword);
+          }
+
+          if (trustStore != null) {
+            sslCtxFactory.setTrustStorePath(trustStore);
+            sslCtxFactory.setTrustStoreType(trustStoreType);
+            sslCtxFactory.setTrustStorePassword(trustStorePassword);
+
+          }
+          listener = new ServerConnector(server.webServer, new SslConnectionFactory(sslCtxFactory,
+              HttpVersion.HTTP_1_1.toString()), new HttpConnectionFactory(httpsConfig));
+        } else {
+          throw new HadoopIllegalArgumentException(
+              "unknown scheme for endpoint:" + ep);
+        }
+
+        // default settings for connector
+        listener.setAcceptQueueSize(128);
+        if (Shell.WINDOWS) {
+          // result of setting the SO_REUSEADDR flag is different on Windows
+          // http://msdn.microsoft.com/en-us/library/ms740621(v=vs.85).aspx
+          // without this 2 NN's can start on the same machine and listen on
+          // the same port with indeterminate routing of incoming requests to them
+          listener.setReuseAddress(false);
+        }
+
+        listener.setHost(ep.getHost());
+        listener.setPort(ep.getPort() == -1 ? 0 : ep.getPort());
+        server.addManagedListener(listener);
+      }
+
+      server.loadListeners();
+      return server;
+
+    }
+
+  }
+
+  /** Same as this(name, bindAddress, port, findPort, null); */
+  @Deprecated
+  public HttpServer(String name, String bindAddress, int port, boolean findPort
+      ) throws IOException {
+    this(name, bindAddress, port, findPort, new Configuration());
+  }
+
+  /**
+   * Create a status server on the given port. Allows you to specify the
+   * path specifications that this server will be serving so that they will be
+   * added to the filters properly.
+   *
+   * @param name The name of the server
+   * @param bindAddress The address for this server
+   * @param port The port to use on the server
+   * @param findPort whether the server should start at the given port and
+   *        increment by 1 until it finds a free port.
+   * @param conf Configuration
+   * @param pathSpecs Path specifications that this httpserver will be serving.
+   *        These will be added to any filters.
+   */
+  @Deprecated
+  public HttpServer(String name, String bindAddress, int port,
+      boolean findPort, Configuration conf, String[] pathSpecs) throws IOException {
+    this(name, bindAddress, port, findPort, conf, null, pathSpecs);
+  }
+
+  /**
+   * Create a status server on the given port.
+   * The jsp scripts are taken from src/webapps/&lt;name&gt;.
+   * @param name The name of the server
+   * @param port The port to use on the server
+   * @param findPort whether the server should start at the given port and
+   *        increment by 1 until it finds a free port.
+   * @param conf Configuration
+   */
+  @Deprecated
+  public HttpServer(String name, String bindAddress, int port,
+      boolean findPort, Configuration conf) throws IOException {
+    this(name, bindAddress, port, findPort, conf, null, null);
+  }
+
+  @Deprecated
+  public HttpServer(String name, String bindAddress, int port,
+      boolean findPort, Configuration conf, AccessControlList adminsAcl)
+      throws IOException {
+    this(name, bindAddress, port, findPort, conf, adminsAcl, null);
+  }
+
+  /**
+   * Create a status server on the given port.
+   * The jsp scripts are taken from src/webapps/&lt;name&gt;.
+   * @param name The name of the server
+   * @param bindAddress The address for this server
+   * @param port The port to use on the server
+   * @param findPort whether the server should start at the given port and
+   *        increment by 1 until it finds a free port.
+   * @param conf Configuration
+   * @param adminsAcl {@link AccessControlList} of the admins
+   * @param pathSpecs Path specifications that this httpserver will be serving.
+   *        These will be added to any filters.
+   */
+  @Deprecated
+  public HttpServer(String name, String bindAddress, int port,
+      boolean findPort, Configuration conf, AccessControlList adminsAcl,
+      String[] pathSpecs) throws IOException {
+    this(new Builder().setName(name)
+        .addEndpoint(URI.create("http://" + bindAddress + ":" + port))
+        .setFindPort(findPort).setConf(conf).setACL(adminsAcl)
+        .setPathSpec(pathSpecs));
+  }
+
+  private HttpServer(final Builder b) throws IOException {
+    this.appDir = b.appDir;
+    this.logDir = b.logDir;
+    final String appDir = getWebAppsPath(b.name);
+
+
+    int maxThreads = b.conf.getInt(HTTP_MAX_THREADS, 16);
+    // If HTTP_MAX_THREADS is less than or equal to 0, QueueThreadPool() will use the
+    // default value (currently 200).
+    QueuedThreadPool threadPool = maxThreads <= 0 ? new QueuedThreadPool()
+        : new QueuedThreadPool(maxThreads);
+    threadPool.setDaemon(true);
+    this.webServer = new Server(threadPool);
+
+    this.adminsAcl = b.adminsAcl;
+    this.webAppContext = createWebAppContext(b.name, b.conf, adminsAcl, appDir);
+    this.findPort = b.findPort;
+    initializeWebServer(b.name, b.hostName, b.conf, b.pathSpecs);
+  }
+
+  private void initializeWebServer(String name, String hostName,
+      Configuration conf, String[] pathSpecs)
+      throws FileNotFoundException, IOException {
+
+    Preconditions.checkNotNull(webAppContext);
+
+    HandlerCollection handlerCollection = new HandlerCollection();
+
+    ContextHandlerCollection contexts = new ContextHandlerCollection();
+    RequestLog requestLog = HttpRequestLog.getRequestLog(name);
+
+    if (requestLog != null) {
+      RequestLogHandler requestLogHandler = new RequestLogHandler();
+      requestLogHandler.setRequestLog(requestLog);
+      handlerCollection.addHandler(requestLogHandler);
+    }
+
+    final String appDir = getWebAppsPath(name);
+
+    handlerCollection.addHandler(contexts);
+    handlerCollection.addHandler(webAppContext);
+
+    webServer.setHandler(handlerCollection);
+
+    addDefaultApps(contexts, appDir, conf);
+
+    addGlobalFilter("safety", QuotingInputFilter.class.getName(), null);
+    Map<String, String> params = new HashMap<>();
+    params.put("xframeoptions", conf.get("hbase.http.filter.xframeoptions.mode", "DENY"));
+    addGlobalFilter("clickjackingprevention",
+            ClickjackingPreventionFilter.class.getName(), params);
+    final FilterInitializer[] initializers = getFilterInitializers(conf);
+    if (initializers != null) {
+      conf = new Configuration(conf);
+      conf.set(BIND_ADDRESS, hostName);
+      for (FilterInitializer c : initializers) {
+        c.initFilter(this, conf);
+      }
+    }
+
+    addDefaultServlets();
+
+    if (pathSpecs != null) {
+      for (String path : pathSpecs) {
+        LOG.info("adding path spec: " + path);
+        addFilterPathMapping(path, webAppContext);
+      }
+    }
+  }
+
+  private void addManagedListener(ServerConnector connector) {
+    listeners.add(new ListenerInfo(true, connector));
+  }
+
+  private static WebAppContext createWebAppContext(String name,
+      Configuration conf, AccessControlList adminsAcl, final String appDir) {
+    WebAppContext ctx = new WebAppContext();
+    ctx.setDisplayName(name);
+    ctx.setContextPath("/");
+    ctx.setWar(appDir + "/" + name);
+    ctx.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
+    ctx.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
+    addNoCacheFilter(ctx);
+    return ctx;
+  }
+
+  private static void addNoCacheFilter(WebAppContext ctxt) {
+    defineFilter(ctxt, NO_CACHE_FILTER, NoCacheFilter.class.getName(),
+        Collections.<String, String> emptyMap(), new String[] { "/*" });
+  }
+
+  /** Get an array of FilterConfiguration specified in the conf */
+  private static FilterInitializer[] getFilterInitializers(Configuration conf) {
+    if (conf == null) {
+      return null;
+    }
+
+    Class<?>[] classes = conf.getClasses(FILTER_INITIALIZERS_PROPERTY);
+    if (classes == null) {
+      return null;
+    }
+
+    FilterInitializer[] initializers = new FilterInitializer[classes.length];
+    for(int i = 0; i < classes.length; i++) {
+      initializers[i] = (FilterInitializer)ReflectionUtils.newInstance(classes[i]);
+    }
+    return initializers;
+  }
+
+  /**
+   * Add default apps.
+   * @param appDir The application directory
+   * @throws IOException
+   */
+  protected void addDefaultApps(ContextHandlerCollection parent,
+      final String appDir, Configuration conf) throws IOException {
+    // set up the context for "/logs/" if "hadoop.log.dir" property is defined.
+    String logDir = this.logDir;
+    if (logDir == null) {
+        logDir = System.getProperty("hadoop.log.dir");
+    }
+    if (logDir != null) {
+      ServletContextHandler logContext = new ServletContextHandler(parent, "/logs");
+      logContext.addServlet(AdminAuthorizedServlet.class, "/*");
+      logContext.setResourceBase(logDir);
+
+      if (conf.getBoolean(
+          ServerConfigurationKeys.HBASE_JETTY_LOGS_SERVE_ALIASES,
+          ServerConfigurationKeys.DEFAULT_HBASE_JETTY_LOGS_SERVE_ALIASES)) {
+        Map<String, String> params = logContext.getInitParams();
+        params.put(
+            "org.mortbay.jetty.servlet.Default.aliases", "true");
+      }
+      logContext.setDisplayName("logs");
+      setContextAttributes(logContext, conf);
+      addNoCacheFilter(webAppContext);
+      defaultContexts.put(logContext, true);
+    }
+    // set up the context for "/static/*"
+    ServletContextHandler staticContext = new ServletContextHandler(parent, "/static");
+    staticContext.setResourceBase(appDir + "/static");
+    staticContext.addServlet(DefaultServlet.class, "/*");
+    staticContext.setDisplayName("static");
+    setContextAttributes(staticContext, conf);
+    defaultContexts.put(staticContext, true);
+  }
+
+  private void setContextAttributes(ServletContextHandler context, Configuration conf) {
+    context.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
+    context.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
+  }
+
+  /**
+   * Add default servlets.
+   */
+  protected void addDefaultServlets() {
+    // set up default servlets
+    addServlet("stacks", "/stacks", StackServlet.class);
+    addServlet("logLevel", "/logLevel", LogLevel.Servlet.class);
+
+    // Hadoop3 has moved completely to metrics2, and  dropped support for Metrics v1's
+    // MetricsServlet (see HADOOP-12504).  We'll using reflection to load if against hadoop2.
+    // Remove when we drop support for hbase on hadoop2.x.
+    try {
+      Class clz = Class.forName("org.apache.hadoop.metrics.MetricsServlet");
+      addServlet("metrics", "/metrics", clz);
+    } catch (Exception e) {
+      // do nothing
+    }
+
+    addServlet("jmx", "/jmx", JMXJsonServlet.class);
+    addServlet("conf", "/conf", ConfServlet.class);
+  }
+
+  /**
+   * Set a value in the webapp context. These values are available to the jsp
+   * pages as "application.getAttribute(name)".
+   * @param name The name of the attribute
+   * @param value The value of the attribute
+   */
+  public void setAttribute(String name, Object value) {
+    webAppContext.setAttribute(name, value);
+  }
+
+  /**
+   * Add a Jersey resource package.
+   * @param packageName The Java package name containing the Jersey resource.
+   * @param pathSpec The path spec for the servlet
+   */
+  public void addJerseyResourcePackage(final String packageName,
+      final String pathSpec) {
+    LOG.info("addJerseyResourcePackage: packageName=" + packageName
+        + ", pathSpec=" + pathSpec);
+
+    ResourceConfig application = new ResourceConfig().packages(packageName);
+    final ServletHolder sh = new ServletHolder(new ServletContainer(application));
+    webAppContext.addServlet(sh, pathSpec);
+  }
+
+  /**
+   * Add a servlet in the server.
+   * @param name The name of the servlet (can be passed as null)
+   * @param pathSpec The path spec for the servlet
+   * @param clazz The servlet class
+   */
+  public void addServlet(String name, String pathSpec,
+      Class<? extends HttpServlet> clazz) {
+    addInternalServlet(name, pathSpec, clazz, false);
+    addFilterPathMapping(pathSpec, webAppContext);
+  }
+
+  /**
+   * Add an internal servlet in the server.
+   * Note: This method is to be used for adding servlets that facilitate
+   * internal communication and not for user facing functionality. For
+   * servlets added using this method, filters are not enabled.
+   *
+   * @param name The name of the servlet (can be passed as null)
+   * @param pathSpec The path spec for the servlet
+   * @param clazz The servlet class
+   */
+  public void addInternalServlet(String name, String pathSpec,
+      Class<? extends HttpServlet> clazz) {
+    addInternalServlet(name, pathSpec, clazz, false);
+  }
+
+  /**
+   * Add an internal servlet in the server, specifying whether or not to
+   * protect with Kerberos authentication.
+   * Note: This method is to be used for adding servlets that facilitate
+   * internal communication and not for user facing functionality. For
+   +   * servlets added using this method, filters (except internal Kerberos
+   * filters) are not enabled.
+   *
+   * @param name The name of the servlet (can be passed as null)
+   * @param pathSpec The path spec for the servlet
+   * @param clazz The servlet class
+   * @param requireAuth Require Kerberos authenticate to access servlet
+   */
+  public void addInternalServlet(String name, String pathSpec,
+      Class<? extends HttpServlet> clazz, boolean requireAuth) {
+    ServletHolder holder = new ServletHolder(clazz);
+    if (name != null) {
+      holder.setName(name);
+    }
+    webAppContext.addServlet(holder, pathSpec);
+
+    if(requireAuth && UserGroupInformation.isSecurityEnabled()) {
+       LOG.info("Adding Kerberos (SPNEGO) filter to " + name);
+       ServletHandler handler = webAppContext.getServletHandler();
+       FilterMapping fmap = new FilterMapping();
+       fmap.setPathSpec(pathSpec);
+       fmap.setFilterName(SPNEGO_FILTER);
+       fmap.setDispatches(FilterMapping.ALL);
+       handler.addFilterMapping(fmap);
+    }
+  }
+
+  @Override
+  public void addFilter(String name, String classname,
+      Map<String, String> parameters) {
+
+    final String[] USER_FACING_URLS = { "*.html", "*.jsp" };
+    defineFilter(webAppContext, name, classname, parameters, USER_FACING_URLS);
+    LOG.info("Added filter " + name + " (class=" + classname
+        + ") to context " + webAppContext.getDisplayName());
+    final String[] ALL_URLS = { "/*" };
+    for (Map.Entry<ServletContextHandler, Boolean> e : defaultContexts.entrySet()) {
+      if (e.getValue()) {
+        ServletContextHandler handler = e.getKey();
+        defineFilter(handler, name, classname, parameters, ALL_URLS);
+        LOG.info("Added filter " + name + " (class=" + classname
+            + ") to context " + handler.getDisplayName());
+      }
+    }
+    filterNames.add(name);
+  }
+
+  @Override
+  public void addGlobalFilter(String name, String classname,
+      Map<String, String> parameters) {
+    final String[] ALL_URLS = { "/*" };
+    defineFilter(webAppContext, name, classname, parameters, ALL_URLS);
+    for (ServletContextHandler ctx : defaultContexts.keySet()) {
+      defineFilter(ctx, name, classname, parameters, ALL_URLS);
+    }
+    LOG.info("Added global filter '" + name + "' (class=" + classname + ")");
+  }
+
+  /**
+   * Define a filter for a context and set up default url mappings.
+   */
+  public static void defineFilter(ServletContextHandler handler, String name,
+      String classname, Map<String,String> parameters, String[] urls) {
+
+    FilterHolder holder = new FilterHolder();
+    holder.setName(name);
+    holder.setClassName(classname);
+    if (parameters != null) {
+      holder.setInitParameters(parameters);
+    }
+    FilterMapping fmap = new FilterMapping();
+    fmap.setPathSpecs(urls);
+    fmap.setDispatches(FilterMapping.ALL);
+    fmap.setFilterName(name);
+    handler.getServletHandler().addFilter(holder, fmap);
+  }
+
+  /**
+   * Add the path spec to the filter path mapping.
+   * @param pathSpec The path spec
+   * @param webAppCtx The WebApplicationContext to add to
+   */
+  protected void addFilterPathMapping(String pathSpec,
+      WebAppContext webAppCtx) {
+    for(String name : filterNames) {
+      FilterMapping fmap = new FilterMapping();
+      fmap.setPathSpec(pathSpec);
+      fmap.setFilterName(name);
+      fmap.setDispatches(FilterMapping.ALL);
+      webAppCtx.getServletHandler().addFilterMapping(fmap);
+    }
+  }
+
+  /**
+   * Get the value in the webapp context.
+   * @param name The name of the attribute
+   * @return The value of the attribute
+   */
+  public Object getAttribute(String name) {
+    return webAppContext.getAttribute(name);
+  }
+
+  public WebAppContext getWebAppContext(){
+    return this.webAppContext;
+  }
+
+  public String getWebAppsPath(String appName) throws FileNotFoundException {
+      return getWebAppsPath(this.appDir, appName);
+  }
+
+  /**
+   * Get the pathname to the webapps files.
+   * @param appName eg "secondary" or "datanode"
+   * @return the pathname as a URL
+   * @throws FileNotFoundException if 'webapps' directory cannot be found on CLASSPATH.
+   */
+  protected String getWebAppsPath(String webapps, String appName) throws FileNotFoundException {
+    URL url = getClass().getClassLoader().getResource(webapps + "/" + appName);
+    if (url == null)
+      throw new FileNotFoundException(webapps + "/" + appName
+          + " not found in CLASSPATH");
+    String urlString = url.toString();
+    return urlString.substring(0, urlString.lastIndexOf('/'));
+  }
+
+  /**
+   * Get the port that the server is on
+   * @return the port
+   */
+  @Deprecated
+  public int getPort() {
+    return ((ServerConnector)webServer.getConnectors()[0]).getLocalPort();
+  }
+
+  /**
+   * Get the address that corresponds to a particular connector.
+   *
+   * @return the corresponding address for the connector, or null if there's no
+   *         such connector or the connector is not bounded.
+   */
+  public InetSocketAddress getConnectorAddress(int index) {
+    Preconditions.checkArgument(index >= 0);
+    if (index > webServer.getConnectors().length)
+      return null;
+
+    ServerConnector c = (ServerConnector)webServer.getConnectors()[index];
+    if (c.getLocalPort() == -1 || c.getLocalPort() == -2) {
+      // -1 if the connector has not been opened
+      // -2 if it has been closed
+      return null;
+    }
+
+    return new InetSocketAddress(c.getHost(), c.getLocalPort());
+  }
+
+  /**
+   * Set the min, max number of worker threads (simultaneous connections).
+   */
+  public void setThreads(int min, int max) {
+    QueuedThreadPool pool = (QueuedThreadPool) webServer.getThreadPool();
+    pool.setMinThreads(min);
+    pool.setMaxThreads(max);
+  }
+
+  private void initSpnego(Configuration conf, String hostName,
+      String usernameConfKey, String keytabConfKey, String kerberosNameRuleKey,
+      String signatureSecretKeyFileKey) throws IOException {
+    Map<String, String> params = new HashMap<>();
+    String principalInConf = getOrEmptyString(conf, usernameConfKey);
+    if (!principalInConf.isEmpty()) {
+      params.put(HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX, SecurityUtil.getServerPrincipal(
+          principalInConf, hostName));
+    }
+    String httpKeytab = getOrEmptyString(conf, keytabConfKey);
+    if (!httpKeytab.isEmpty()) {
+      params.put(HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX, httpKeytab);
+    }
+    String kerberosNameRule = getOrEmptyString(conf, kerberosNameRuleKey);
+    if (!kerberosNameRule.isEmpty()) {
+      params.put(HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_SUFFIX, kerberosNameRule);
+    }
+    String signatureSecretKeyFile = getOrEmptyString(conf, signatureSecretKeyFileKey);
+    if (!signatureSecretKeyFile.isEmpty()) {
+      params.put(HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_SUFFIX,
+          signatureSecretKeyFile);
+    }
+    params.put(AuthenticationFilter.AUTH_TYPE, "kerberos");
+
+    // Verify that the required options were provided
+    if (isMissing(params.get(HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX)) ||
+            isMissing(params.get(HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX))) {
+      throw new IllegalArgumentException(usernameConfKey + " and "
+          + keytabConfKey + " are both required in the configuration "
+          + "to enable SPNEGO/Kerberos authentication for the Web UI");
+    }
+
+    addGlobalFilter(SPNEGO_FILTER, AuthenticationFilter.class.getName(), params);
+  }
+
+  /**
+   * Returns true if the argument is non-null and not whitespace
+   */
+  private boolean isMissing(String value) {
+    if (null == value) {
+      return true;
+    }
+    return value.trim().isEmpty();
+  }
+
+  /**
+   * Extracts the value for the given key from the configuration of returns a string of
+   * zero length.
+   */
+  private String getOrEmptyString(Configuration conf, String key) {
+    if (null == key) {
+      return EMPTY_STRING;
+    }
+    final String value = conf.get(key.trim());
+    return null == value ? EMPTY_STRING : value;
+  }
+
+  /**
+   * Start the server. Does not wait for the server to start.
+   */
+  public void start() throws IOException {
+    try {
+      try {
+        openListeners();
+        webServer.start();
+      } catch (IOException ex) {
+        LOG.info("HttpServer.start() threw a non Bind IOException", ex);
+        throw ex;
+      } catch (MultiException ex) {
+        LOG.info("HttpServer.start() threw a MultiException", ex);
+        throw ex;
+      }
+      // Make sure there is no handler failures.
+      Handler[] handlers = webServer.getHandlers();
+      for (int i = 0; i < handlers.length; i++) {
+        if (handlers[i].isFailed()) {
+          throw new IOException(
+              "Problem in starting http server. Server handlers failed");
+        }
+      }
+      // Make sure there are no errors initializing the context.
+      Throwable unavailableException = webAppContext.getUnavailableException();
+      if (unavailableException != null) {
+        // Have to stop the webserver, or else its non-daemon threads
+        // will hang forever.
+        webServer.stop();
+        throw new IOException("Unable to initialize WebAppContext",
+            unavailableException);
+      }
+    } catch (IOException e) {
+      throw e;
+    } catch (InterruptedException e) {
+      throw (IOException) new InterruptedIOException(
+          "Interrupted while starting HTTP server").initCause(e);
+    } catch (Exception e) {
+      throw new IOException("Problem starting http server", e);
+    }
+  }
+
+  private void loadListeners() {
+    for (ListenerInfo li : listeners) {
+      webServer.addConnector(li.listener);
+    }
+  }
+
+  /**
+   * Open the main listener for the server
+   * @throws Exception
+   */
+  @VisibleForTesting
+  void openListeners() throws Exception {
+    for (ListenerInfo li : listeners) {
+      ServerConnector listener = li.listener;
+      if (!li.isManaged || (li.listener.getLocalPort() != -1 && li.listener.getLocalPort() != -2)) {
+        // This listener is either started externally, or has not been opened, or has been closed
+        continue;
+      }
+      int port = listener.getPort();
+      while (true) {
+        // jetty has a bug where you can't reopen a listener that previously
+        // failed to open w/o issuing a close first, even if the port is changed
+        try {
+          listener.close();
+          listener.open();
+          LOG.info("Jetty bound to port " + listener.getLocalPort());
+          break;
+        } catch (BindException ex) {
+          if (port == 0 || !findPort) {
+            BindException be = new BindException("Port in use: "
+                + listener.getHost() + ":" + listener.getPort());
+            be.initCause(ex);
+            throw be;
+          }
+        }
+        // try the next port number
+        listener.setPort(++port);
+        Thread.sleep(100);
+      }
+    }
+  }
+
+  /**
+   * stop the server
+   */
+  public void stop() throws Exception {
+    MultiException exception = null;
+    for (ListenerInfo li : listeners) {
+      if (!li.isManaged) {
+        continue;
+      }
+
+      try {
+        li.listener.close();
+      } catch (Exception e) {
+        LOG.error(
+            "Error while stopping listener for webapp"
+                + webAppContext.getDisplayName(), e);
+        exception = addMultiException(exception, e);
+      }
+    }
+
+    try {
+      // clear & stop webAppContext attributes to avoid memory leaks.
+      webAppContext.clearAttributes();
+      webAppContext.stop();
+    } catch (Exception e) {
+      LOG.error("Error while stopping web app context for webapp "
+          + webAppContext.getDisplayName(), e);
+      exception = addMultiException(exception, e);
+    }
+
+    try {
+      webServer.stop();
+    } catch (Exception e) {
+      LOG.error("Error while stopping web server for webapp "
+          + webAppContext.getDisplayName(), e);
+      exception = addMultiException(exception, e);
+    }
+
+    if (exception != null) {
+      exception.ifExceptionThrow();
+    }
+
+  }
+
+  private MultiException addMultiException(MultiException exception, Exception e) {
+    if(exception == null){
+      exception = new MultiException();
+    }
+    exception.add(e);
+    return exception;
+  }
+
+  public void join() throws InterruptedException {
+    webServer.join();
+  }
+
+  /**
+   * Test for the availability of the web server
+   * @return true if the web server is started, false otherwise
+   */
+  public boolean isAlive() {
+    return webServer != null && webServer.isStarted();
+  }
+
+  /**
+   * Return the host and port of the HttpServer, if live
+   * @return the classname and any HTTP URL
+   */
+  @Override
+  public String toString() {
+    if (listeners.isEmpty()) {
+      return "Inactive HttpServer";
+    } else {
+      StringBuilder sb = new StringBuilder("HttpServer (")
+        .append(isAlive() ? STATE_DESCRIPTION_ALIVE : STATE_DESCRIPTION_NOT_LIVE).append("), listening at:");
+      for (ListenerInfo li : listeners) {
+        ServerConnector l = li.listener;
+        sb.append(l.getHost()).append(":").append(l.getPort()).append("/,");
+      }
+      return sb.toString();
+    }
+  }
+
+  /**
+   * Checks the user has privileges to access to instrumentation servlets.
+   * <p>
+   * If <code>hadoop.security.instrumentation.requires.admin</code> is set to FALSE
+   * (default value) it always returns TRUE.
+   * </p><p>
+   * If <code>hadoop.security.instrumentation.requires.admin</code> is set to TRUE
+   * it will check that if the current user is in the admin ACLS. If the user is
+   * in the admin ACLs it returns TRUE, otherwise it returns FALSE.
+   * </p>
+   *
+   * @param servletContext the servlet context.
+   * @param request the servlet request.
+   * @param response the servlet response.
+   * @return TRUE/FALSE based on the logic decribed above.
+   */
+  public static boolean isInstrumentationAccessAllowed(
+    ServletContext servletContext, HttpServletRequest request,
+    HttpServletResponse response) throws IOException {
+    Configuration conf =
+      (Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE);
+
+    boolean access = true;
+    boolean adminAccess = conf.getBoolean(
+      CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN,
+      false);
+    if (adminAccess) {
+      access = hasAdministratorAccess(servletContext, request, response);
+    }
+    return access;
+  }
+
+  /**
+   * Does the user sending the HttpServletRequest has the administrator ACLs? If
+   * it isn't the case, response will be modified to send an error to the user.
+   *
+   * @param servletContext
+   * @param request
+   * @param response used to send the error response if user does not have admin access.
+   * @return true if admin-authorized, false otherwise
+   * @throws IOException
+   */
+  public static boolean hasAdministratorAccess(
+      ServletContext servletContext, HttpServletRequest request,
+      HttpServletResponse response) throws IOException {
+    Configuration conf =
+        (Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE);
+    // If there is no authorization, anybody has administrator access.
+    if (!conf.getBoolean(
+        CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false)) {
+      return true;
+    }
+
+    String remoteUser = request.getRemoteUser();
+    if (remoteUser == null) {
+      response.sendError(HttpServletResponse.SC_UNAUTHORIZED,
+                         "Unauthenticated users are not " +
+                         "authorized to access this page.");
+      return false;
+    }
+
+    if (servletContext.getAttribute(ADMINS_ACL) != null &&
+        !userHasAdministratorAccess(servletContext, remoteUser)) {
+      response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "User "
+          + remoteUser + " is unauthorized to access this page.");
+      return false;
+    }
+
+    return true;
+  }
+
+  /**
+   * Get the admin ACLs from the given ServletContext and check if the given
+   * user is in the ACL.
+   *
+   * @param servletContext the context containing the admin ACL.
+   * @param remoteUser the remote user to check for.
+   * @return true if the user is present in the ACL, false if no ACL is set or
+   *         the user is not present
+   */
+  public static boolean userHasAdministratorAccess(ServletContext servletContext,
+      String remoteUser) {
+    AccessControlList adminsAcl = (AccessControlList) servletContext
+        .getAttribute(ADMINS_ACL);
+    UserGroupInformation remoteUserUGI =
+        UserGroupInformation.createRemoteUser(remoteUser);
+    return adminsAcl != null && adminsAcl.isUserAllowed(remoteUserUGI);
+  }
+
+  /**
+   * A very simple servlet to serve up a text representation of the current
+   * stack traces. It both returns the stacks to the caller and logs them.
+   * Currently the stack traces are done sequentially rather than exactly the
+   * same data.
+   */
+  public static class StackServlet extends HttpServlet {
+    private static final long serialVersionUID = -6284183679759467039L;
+
+    @Override
+    public void doGet(HttpServletRequest request, HttpServletResponse response)
+      throws ServletException, IOException {
+      if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(),
+                                                     request, response)) {
+        return;
+      }
+      response.setContentType("text/plain; charset=UTF-8");
+      try (PrintStream out = new PrintStream(
+        response.getOutputStream(), false, "UTF-8")) {
+        Threads.printThreadInfo(out, "");
+        out.flush();
+      }
+      ReflectionUtils.logThreadInfo(LOG, "jsp requested", 1);
+    }
+  }
+
+  /**
+   * A Servlet input filter that quotes all HTML active characters in the
+   * parameter names and values. The goal is to quote the characters to make
+   * all of the servlets resistant to cross-site scripting attacks.
+   */
+  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
+  public static class QuotingInputFilter implements Filter {
+    private FilterConfig config;
+
+    public static class RequestQuoter extends HttpServletRequestWrapper {
+      private final HttpServletRequest rawRequest;
+      public RequestQuoter(HttpServletRequest rawRequest) {
+        super(rawRequest);
+        this.rawRequest = rawRequest;
+      }
+
+      /**
+       * Return the set of parameter names, quoting each name.
+       */
+      @Override
+      public Enumeration<String> getParameterNames() {
+        return new Enumeration<String>() {
+          private Enumeration<String> rawIterator =
+            rawRequest.getParameterNames();
+          @Override
+          public boolean hasMoreElements() {
+            return rawIterator.hasMoreElements();
+          }
+
+          @Override
+          public String nextElement() {
+            return HtmlQuoting.quoteHtmlChars(rawIterator.nextElement());
+          }
+        };
+      }
+
+      /**
+       * Unquote the name and quote the value.
+       */
+      @Override
+      public String getParameter(String name) {
+        return HtmlQuoting.quoteHtmlChars(rawRequest.getParameter
+                                     (HtmlQuoting.unquoteHtmlChars(name)));
+      }
+
+      @Override
+      public String[] getParameterValues(String name) {
+        String unquoteName = HtmlQuoting.unquoteHtmlChars(name);
+        String[] unquoteValue = rawRequest.getParameterValues(unquoteName);
+        if (unquoteValue == null) {
+          return null;
+        }
+        String[] result = new String[unquoteValue.length];
+        for(int i=0; i < result.length; ++i) {
+          result[i] = HtmlQuoting.quoteHtmlChars(unquoteValue[i]);
+        }
+        return result;
+      }
+
+      @Override
+      public Map<String, String[]> getParameterMap() {
+        Map<String, String[]> result = new HashMap<>();
+        Map<String, String[]> raw = rawRequest.getParameterMap();
+        for (Map.Entry<String,String[]> item: raw.entrySet()) {
+          String[] rawValue = item.getValue();
+          String[] cookedValue = new String[rawValue.length];
+          for(int i=0; i< rawValue.length; ++i) {
+            cookedValue[i] = HtmlQuoting.quoteHtmlChars(rawValue[i]);
+          }
+          result.put(HtmlQuoting.quoteHtmlChars(item.getKey()), cookedValue);
+        }
+        return result;
+      }
+
+      /**
+       * Quote the url so that users specifying the HOST HTTP header
+       * can't inject attacks.
+       */
+      @Override
+      public StringBuffer getRequestURL(){
+        String url = rawRequest.getRequestURL().toString();
+        return new StringBuffer(HtmlQuoting.quoteHtmlChars(url));
+      }
+
+      /**
+       * Quote the server name so that users specifying the HOST HTTP header
+       * can't inject attacks.
+       */
+      @Override
+      public String getServerName() {
+        return HtmlQuoting.quoteHtmlChars(rawRequest.getServerName());
+      }
+    }
+
+    @Override
+    public void init(FilterConfig config) throws ServletException {
+      this.config = config;
+    }
+
+    @Override
+    public void destroy() {
+    }
+
+    @Override
+    public void doFilter(ServletRequest request,
+                         ServletResponse response,
+                         FilterChain chain
+                         ) throws IOException, ServletException {
+      HttpServletRequestWrapper quoted =
+        new RequestQuoter((HttpServletRequest) request);
+      HttpServletResponse httpResponse = (HttpServletResponse) response;
+
+      String mime = inferMimeType(request);
+      if (mime == null) {
+        httpResponse.setContentType("text/plain; charset=utf-8");
+      } else if (mime.startsWith("text/html")) {
+        // HTML with unspecified encoding, we want to
+        // force HTML with utf-8 encoding
+        // This is to avoid the following security issue:
+        // http://openmya.hacker.jp/hasegawa/security/utf7cs.html
+        httpResponse.setContentType("text/html; charset=utf-8");
+      } else if (mime.startsWith("application/xml")) {
+        httpResponse.setContentType("text/xml; charset=utf-8");
+      }
+      chain.doFilter(quoted, httpResponse);
+    }
+
+    /**
+     * Infer the mime type for the response based on the extension of the request
+     * URI. Returns null if unknown.
+     */
+    private String inferMimeType(ServletRequest request) {
+      String path = ((HttpServletRequest)request).getRequestURI();
+      ServletContext context = config.getServletContext();
+      return context.getMimeType(path);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java
new file mode 100644
index 0000000..b5f4183
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java
@@ -0,0 +1,112 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.http;
+
+import java.io.IOException;
+import java.net.URI;
+
+import javax.servlet.http.HttpServlet;
+
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * Create a Jetty embedded server to answer http requests. The primary goal
+ * is to serve up status information for the server.
+ * There are three contexts:
+ *   "/stacks/" -&gt; points to stack trace
+ *   "/static/" -&gt; points to common static files (src/hbase-webapps/static)
+ *   "/" -&gt; the jsp server code from (src/hbase-webapps/&lt;name&gt;)
+ */
+@InterfaceAudience.Private
+public class InfoServer {
+  
+  private static final String HBASE_APP_DIR = "hbase-webapps";
+  private final org.apache.hadoop.hbase.http.HttpServer httpServer;
+
+  /**
+   * Create a status server on the given port.
+   * The jsp scripts are taken from src/hbase-webapps/<code>name</code>.
+   * @param name The name of the server
+   * @param bindAddress address to bind to
+   * @param port The port to use on the server
+   * @param findPort whether the server should start at the given port and
+   * increment by 1 until it finds a free port.
+   * @throws IOException e
+   */
+  public InfoServer(String name, String bindAddress, int port, boolean findPort,
+      final Configuration c)
+  throws IOException {
+    HttpConfig httpConfig = new HttpConfig(c);
+    HttpServer.Builder builder =
+      new org.apache.hadoop.hbase.http.HttpServer.Builder();
+
+      builder.setName(name).addEndpoint(URI.create(httpConfig.getSchemePrefix() +
+        bindAddress + ":" +
+        port)).setAppDir(HBASE_APP_DIR).setFindPort(findPort).setConf(c);
+      String logDir = System.getProperty("hbase.log.dir");
+      if (logDir != null) {
+        builder.setLogDir(logDir);
+      }
+    if (httpConfig.isSecure()) {
+    builder.keyPassword(HBaseConfiguration.getPassword(c, "ssl.server.keystore.keypassword", null))
+      .keyStore(c.get("ssl.server.keystore.location"),
+        HBaseConfiguration.getPassword(c,"ssl.server.keystore.password", null),
+        c.get("ssl.server.keystore.type", "jks"))
+      .trustStore(c.get("ssl.server.truststore.location"),
+        HBaseConfiguration.getPassword(c, "ssl.server.truststore.password", null),
+        c.get("ssl.server.truststore.type", "jks"));
+    }
+    // Enable SPNEGO authentication
+    if ("kerberos".equalsIgnoreCase(c.get(HttpServer.HTTP_UI_AUTHENTICATION, null))) {
+      builder.setUsernameConfKey(HttpServer.HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY)
+        .setKeytabConfKey(HttpServer.HTTP_SPNEGO_AUTHENTICATION_KEYTAB_KEY)
+        .setKerberosNameRulesKey(HttpServer.HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_KEY)
+        .setSignatureSecretFileKey(
+            HttpServer.HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_KEY)
+        .setSecurityEnabled(true);
+    }
+    this.httpServer = builder.build();
+  }
+
+  public void addServlet(String name, String pathSpec,
+          Class<? extends HttpServlet> clazz) {
+      this.httpServer.addServlet(name, pathSpec, clazz);
+  }
+
+  public void setAttribute(String name, Object value) {
+    this.httpServer.setAttribute(name, value);
+  }
+
+  public void start() throws IOException {
+    this.httpServer.start();
+  }
+
+  @Deprecated
+  public int getPort() {
+    return this.httpServer.getPort();
+  }
+
+  public void stop() throws Exception {
+    this.httpServer.stop();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/NoCacheFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/NoCacheFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/NoCacheFilter.java
new file mode 100644
index 0000000..a1daf15
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/NoCacheFilter.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+
+@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
+public class NoCacheFilter implements Filter {
+
+  @Override
+  public void init(FilterConfig filterConfig) throws ServletException {
+  }
+
+  @Override
+  public void doFilter(ServletRequest req, ServletResponse res,
+                       FilterChain chain)
+    throws IOException, ServletException {
+    HttpServletResponse httpRes = (HttpServletResponse) res;
+    httpRes.setHeader("Cache-Control", "no-cache");
+    long now = System.currentTimeMillis();
+    httpRes.addDateHeader("Expires", now);
+    httpRes.addDateHeader("Date", now);
+    httpRes.addHeader("Pragma", "no-cache");
+    chain.doFilter(req, res);
+  }
+
+  @Override
+  public void destroy() {
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/ServerConfigurationKeys.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/ServerConfigurationKeys.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/ServerConfigurationKeys.java
new file mode 100644
index 0000000..4ae4a2f
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/ServerConfigurationKeys.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.yetus.audience.InterfaceStability;
+
+/** 
+ * This interface contains constants for configuration keys used
+ * in the hbase http server code.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public interface ServerConfigurationKeys {
+
+  /** Enable/Disable ssl for http server */
+  public static final String HBASE_SSL_ENABLED_KEY = "hbase.ssl.enabled";
+
+  public static final boolean HBASE_SSL_ENABLED_DEFAULT = false;
+
+  /** Enable/Disable aliases serving from jetty */
+  public static final String HBASE_JETTY_LOGS_SERVE_ALIASES =
+      "hbase.jetty.logs.serve.aliases";
+
+  public static final boolean DEFAULT_HBASE_JETTY_LOGS_SERVE_ALIASES =
+      true;
+
+  public static final String HBASE_HTTP_STATIC_USER = "hbase.http.staticuser.user";
+
+  public static final String DEFAULT_HBASE_HTTP_STATIC_USER = "dr.stack";
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/conf/ConfServlet.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/conf/ConfServlet.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/conf/ConfServlet.java
new file mode 100644
index 0000000..d9aa7b6
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/conf/ConfServlet.java
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http.conf;
+
+import java.io.IOException;
+import java.io.Writer;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.yetus.audience.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.http.HttpServer;
+
+/**
+ * A servlet to print out the running configuration data.
+ */
+@InterfaceAudience.LimitedPrivate({"HBase"})
+@InterfaceStability.Unstable
+public class ConfServlet extends HttpServlet {
+  private static final long serialVersionUID = 1L;
+
+  private static final String FORMAT_JSON = "json";
+  private static final String FORMAT_XML = "xml";
+  private static final String FORMAT_PARAM = "format";
+
+  /**
+   * Return the Configuration of the daemon hosting this servlet.
+   * This is populated when the HttpServer starts.
+   */
+  private Configuration getConfFromContext() {
+    Configuration conf = (Configuration)getServletContext().getAttribute(
+        HttpServer.CONF_CONTEXT_ATTRIBUTE);
+    assert conf != null;
+    return conf;
+  }
+
+  @Override
+  public void doGet(HttpServletRequest request, HttpServletResponse response)
+      throws ServletException, IOException {
+
+    if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(),
+                                                   request, response)) {
+      return;
+    }
+
+    String format = request.getParameter(FORMAT_PARAM);
+    if (null == format) {
+      format = FORMAT_XML;
+    }
+
+    if (FORMAT_XML.equals(format)) {
+      response.setContentType("text/xml; charset=utf-8");
+    } else if (FORMAT_JSON.equals(format)) {
+      response.setContentType("application/json; charset=utf-8");
+    }
+
+    Writer out = response.getWriter();
+    try {
+      writeResponse(getConfFromContext(), out, format);
+    } catch (BadFormatException bfe) {
+      response.sendError(HttpServletResponse.SC_BAD_REQUEST, bfe.getMessage());
+    }
+    out.close();
+  }
+
+  /**
+   * Guts of the servlet - extracted for easy testing.
+   */
+  static void writeResponse(Configuration conf, Writer out, String format)
+    throws IOException, BadFormatException {
+    if (FORMAT_JSON.equals(format)) {
+      Configuration.dumpConfiguration(conf, out);
+    } else if (FORMAT_XML.equals(format)) {
+      conf.writeXml(out);
+    } else {
+      throw new BadFormatException("Bad format: " + format);
+    }
+  }
+
+  public static class BadFormatException extends Exception {
+    private static final long serialVersionUID = 1L;
+
+    public BadFormatException(String msg) {
+      super(msg);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/jmx/JMXJsonServlet.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/jmx/JMXJsonServlet.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/jmx/JMXJsonServlet.java
new file mode 100644
index 0000000..3abad3a
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/jmx/JMXJsonServlet.java
@@ -0,0 +1,240 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.http.jmx;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.lang.management.ManagementFactory;
+
+import javax.management.MBeanServer;
+import javax.management.MalformedObjectNameException;
+import javax.management.ObjectName;
+import javax.management.openmbean.CompositeData;
+import javax.management.openmbean.TabularData;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.http.HttpServer;
+import org.apache.hadoop.hbase.util.JSONBean;
+
+/*
+ * This servlet is based off of the JMXProxyServlet from Tomcat 7.0.14. It has
+ * been rewritten to be read only and to output in a JSON format so it is not
+ * really that close to the original.
+ */
+/**
+ * Provides Read only web access to JMX.
+ * <p>
+ * This servlet generally will be placed under the /jmx URL for each
+ * HttpServer.  It provides read only
+ * access to JMX metrics.  The optional <code>qry</code> parameter
+ * may be used to query only a subset of the JMX Beans.  This query
+ * functionality is provided through the
+ * {@link MBeanServer#queryNames(ObjectName, javax.management.QueryExp)}
+ * method.
+ * </p>
+ * <p>
+ * For example <code>http://.../jmx?qry=Hadoop:*</code> will return
+ * all hadoop metrics exposed through JMX.
+ * </p>
+ * <p>
+ * The optional <code>get</code> parameter is used to query an specific 
+ * attribute of a JMX bean.  The format of the URL is
+ * <code>http://.../jmx?get=MXBeanName::AttributeName</code>
+ * </p>
+ * <p>
+ * For example 
+ * <code>
+ * http://../jmx?get=Hadoop:service=NameNode,name=NameNodeInfo::ClusterId
+ * </code> will return the cluster id of the namenode mxbean.
+ * </p>
+ * <p>
+ * If the <code>qry</code> or the <code>get</code> parameter is not formatted 
+ * correctly then a 400 BAD REQUEST http response code will be returned. 
+ * </p>
+ * <p>
+ * If a resouce such as a mbean or attribute can not be found, 
+ * a 404 SC_NOT_FOUND http response code will be returned. 
+ * </p>
+ * <p>
+ * The return format is JSON and in the form
+ * </p>
+ *  <pre><code>
+ *  {
+ *    "beans" : [
+ *      {
+ *        "name":"bean-name"
+ *        ...
+ *      }
+ *    ]
+ *  }
+ *  </code></pre>
+ *  <p>
+ *  The servlet attempts to convert the the JMXBeans into JSON. Each
+ *  bean's attributes will be converted to a JSON object member.
+ *  
+ *  If the attribute is a boolean, a number, a string, or an array
+ *  it will be converted to the JSON equivalent. 
+ *  
+ *  If the value is a {@link CompositeData} then it will be converted
+ *  to a JSON object with the keys as the name of the JSON member and
+ *  the value is converted following these same rules.
+ *  
+ *  If the value is a {@link TabularData} then it will be converted
+ *  to an array of the {@link CompositeData} elements that it contains.
+ *  
+ *  All other objects will be converted to a string and output as such.
+ *  
+ *  The bean's name and modelerType will be returned for all beans.
+ *
+ *  Optional paramater "callback" should be used to deliver JSONP response.
+ * </p>
+ *  
+ */
+public class JMXJsonServlet extends HttpServlet {
+  private static final Log LOG = LogFactory.getLog(JMXJsonServlet.class);
+
+  private static final long serialVersionUID = 1L;
+
+  private static final String CALLBACK_PARAM = "callback";
+  /**
+   * If query string includes 'description', then we will emit bean and attribute descriptions to
+   * output IFF they are not null and IFF the description is not the same as the attribute name:
+   * i.e. specify an URL like so: /jmx?description=true
+   */
+  private static final String INCLUDE_DESCRIPTION = "description";
+
+  /**
+   * MBean server.
+   */
+  protected transient MBeanServer mBeanServer;
+
+  protected transient JSONBean jsonBeanWriter;
+
+  /**
+   * Initialize this servlet.
+   */
+  @Override
+  public void init() throws ServletException {
+    // Retrieve the MBean server
+    mBeanServer = ManagementFactory.getPlatformMBeanServer();
+    this.jsonBeanWriter = new JSONBean();
+  }
+
+  /**
+   * Process a GET request for the specified resource.
+   * 
+   * @param request
+   *          The servlet request we are processing
+   * @param response
+   *          The servlet response we are creating
+   */
+  @Override
+  public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
+    try {
+      if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(), request, response)) {
+        return;
+      }
+      String jsonpcb = null;
+      PrintWriter writer = null;
+      JSONBean.Writer beanWriter = null;
+      try {
+        jsonpcb = checkCallbackName(request.getParameter(CALLBACK_PARAM));
+        writer = response.getWriter();
+        beanWriter = this.jsonBeanWriter.open(writer);
+ 
+        // "callback" parameter implies JSONP outpout
+        if (jsonpcb != null) {
+          response.setContentType("application/javascript; charset=utf8");
+          writer.write(jsonpcb + "(");
+        } else {
+          response.setContentType("application/json; charset=utf8");
+        }
+        // Should we output description on each attribute and bean?
+        String tmpStr = request.getParameter(INCLUDE_DESCRIPTION);
+        boolean description = tmpStr != null && tmpStr.length() > 0;
+
+        // query per mbean attribute
+        String getmethod = request.getParameter("get");
+        if (getmethod != null) {
+          String[] splitStrings = getmethod.split("\\:\\:");
+          if (splitStrings.length != 2) {
+            beanWriter.write("result", "ERROR");
+            beanWriter.write("message", "query format is not as expected.");
+            beanWriter.flush();
+            response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
+            return;
+          }
+          if (beanWriter.write(this.mBeanServer, new ObjectName(splitStrings[0]),
+              splitStrings[1], description) != 0) {
+            beanWriter.flush();
+            response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
+          }
+          return;
+        }
+
+        // query per mbean
+        String qry = request.getParameter("qry");
+        if (qry == null) {
+          qry = "*:*";
+        }
+        if (beanWriter.write(this.mBeanServer, new ObjectName(qry), null, description) != 0) {
+          beanWriter.flush();
+          response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
+        }
+      } finally {
+        if (beanWriter != null) beanWriter.close();
+        if (jsonpcb != null) {
+           writer.write(");");
+        }
+        if (writer != null) {
+          writer.close();
+        }
+      }
+    } catch (IOException e) {
+      LOG.error("Caught an exception while processing JMX request", e);
+      response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+    } catch (MalformedObjectNameException e) {
+      LOG.error("Caught an exception while processing JMX request", e);
+      response.sendError(HttpServletResponse.SC_BAD_REQUEST);
+    }
+  }
+
+  /**
+   * Verifies that the callback property, if provided, is purely alphanumeric.
+   * This prevents a malicious callback name (that is javascript code) from being
+   * returned by the UI to an unsuspecting user.
+   *
+   * @param callbackName The callback name, can be null.
+   * @return The callback name
+   * @throws IOException If the name is disallowed.
+   */
+  private String checkCallbackName(String callbackName) throws IOException {
+    if (null == callbackName) {
+      return null;
+    }
+    if (callbackName.matches("[A-Za-z0-9_]+")) {
+      return callbackName;
+    }
+    throw new IOException("'callback' must be alphanumeric");
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/jmx/package-info.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/jmx/package-info.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/jmx/package-info.java
new file mode 100644
index 0000000..324cc2d
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/jmx/package-info.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This package provides access to JMX primarily through the
+ * {@link org.apache.hadoop.hbase.http.jmx.JMXJsonServlet} class.
+ * <p>
+ * Copied from hadoop source code.<br>
+ * See https://issues.apache.org/jira/browse/HADOOP-10232 to know why.
+ * </p>
+ */
+package org.apache.hadoop.hbase.http.jmx;

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/lib/StaticUserWebFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/lib/StaticUserWebFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/lib/StaticUserWebFilter.java
new file mode 100644
index 0000000..bce3a07
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/lib/StaticUserWebFilter.java
@@ -0,0 +1,155 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http.lib;
+
+import java.io.IOException;
+import java.security.Principal;
+import java.util.HashMap;
+
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletRequestWrapper;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+import org.apache.hadoop.hbase.http.FilterContainer;
+import org.apache.hadoop.hbase.http.FilterInitializer;
+
+import javax.servlet.Filter;
+
+import static org.apache.hadoop.hbase.http.ServerConfigurationKeys.HBASE_HTTP_STATIC_USER;
+import static org.apache.hadoop.hbase.http.ServerConfigurationKeys.DEFAULT_HBASE_HTTP_STATIC_USER;
+
+/**
+ * Provides a servlet filter that pretends to authenticate a fake user (Dr.Who)
+ * so that the web UI is usable for a secure cluster without authentication.
+ */
+@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
+public class StaticUserWebFilter extends FilterInitializer {
+  static final String DEPRECATED_UGI_KEY = "dfs.web.ugi";
+
+  private static final Log LOG = LogFactory.getLog(StaticUserWebFilter.class);
+
+  static class User implements Principal {
+    private final String name;
+    public User(String name) {
+      this.name = name;
+    }
+    @Override
+    public String getName() {
+      return name;
+    }
+    @Override
+    public int hashCode() {
+      return name.hashCode();
+    }
+    @Override
+    public boolean equals(Object other) {
+      if (other == this) {
+        return true;
+      } else if (other == null || other.getClass() != getClass()) {
+        return false;
+      }
+      return ((User) other).name.equals(name);
+    }
+    @Override
+    public String toString() {
+      return name;
+    }    
+  }
+
+  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
+  public static class StaticUserFilter implements Filter {
+    private User user;
+    private String username;
+
+    @Override
+    public void destroy() {
+      // NOTHING
+    }
+
+    @Override
+    public void doFilter(ServletRequest request, ServletResponse response,
+                         FilterChain chain
+                         ) throws IOException, ServletException {
+      HttpServletRequest httpRequest = (HttpServletRequest) request;
+      // if the user is already authenticated, don't override it
+      if (httpRequest.getRemoteUser() != null) {
+        chain.doFilter(request, response);
+      } else {
+        HttpServletRequestWrapper wrapper = 
+            new HttpServletRequestWrapper(httpRequest) {
+          @Override
+          public Principal getUserPrincipal() {
+            return user;
+          }
+          @Override
+          public String getRemoteUser() {
+            return username;
+          }
+        };
+        chain.doFilter(wrapper, response);
+      }
+    }
+
+    @Override
+    public void init(FilterConfig conf) throws ServletException {
+      this.username = conf.getInitParameter(HBASE_HTTP_STATIC_USER);
+      this.user = new User(username);
+    }
+    
+  }
+
+  @Override
+  public void initFilter(FilterContainer container, Configuration conf) {
+    HashMap<String, String> options = new HashMap<>();
+    
+    String username = getUsernameFromConf(conf);
+    options.put(HBASE_HTTP_STATIC_USER, username);
+
+    container.addFilter("static_user_filter", 
+                        StaticUserFilter.class.getName(), 
+                        options);
+  }
+
+  /**
+   * Retrieve the static username from the configuration.
+   */
+  static String getUsernameFromConf(Configuration conf) {
+    String oldStyleUgi = conf.get(DEPRECATED_UGI_KEY);
+    if (oldStyleUgi != null) {
+      // We can't use the normal configuration deprecation mechanism here
+      // since we need to split out the username from the configured UGI.
+      LOG.warn(DEPRECATED_UGI_KEY + " should not be used. Instead, use " + 
+          HBASE_HTTP_STATIC_USER + ".");
+      String[] parts = oldStyleUgi.split(",");
+      return parts[0];
+    } else {
+      return conf.get(HBASE_HTTP_STATIC_USER,
+        DEFAULT_HBASE_HTTP_STATIC_USER);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/851f239f/hbase-server/src/main/java/org/apache/hadoop/hbase/http/lib/package-info.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/lib/package-info.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/lib/package-info.java
new file mode 100644
index 0000000..7bb9a0f
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/lib/package-info.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * <p>
+ * This package provides user-selectable (via configuration) classes that add
+ * functionality to the web UI. They are configured as a list of classes in the
+ * configuration parameter <b>hadoop.http.filter.initializers</b>.
+ * </p>
+ * <ul>
+ * <li> <b>StaticUserWebFilter</b> - An authorization plugin that makes all
+ * users a static configured user.
+ * </ul>
+ * <p>
+ * Copied from hadoop source code.<br>
+ * See https://issues.apache.org/jira/browse/HADOOP-10232 to know why
+ * </p>
+ */
+@InterfaceAudience.LimitedPrivate({"HBase"})
+@InterfaceStability.Unstable
+package org.apache.hadoop.hbase.http.lib;
+
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.yetus.audience.InterfaceStability;


[03/13] hbase git commit: Revert "HBASE-19119 hbase-http shouldn't have native profile"

Posted by bu...@apache.org.
Revert "HBASE-19119 hbase-http shouldn't have native profile"

This reverts commit a79b66b32b4e3eb98fb1c03cb545b8edeab26647.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6ea42884
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6ea42884
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6ea42884

Branch: refs/heads/HBASE-19124
Commit: 6ea42884cd2f121e22bf66951a3c033a24ffe141
Parents: fb93767
Author: Sean Busbey <bu...@apache.org>
Authored: Wed Nov 1 09:18:57 2017 -0500
Committer: Sean Busbey <bu...@apache.org>
Committed: Wed Nov 1 19:13:09 2017 -0500

----------------------------------------------------------------------
 hbase-http/pom.xml | 32 ++++++++++++++++++++++++++++++++
 1 file changed, 32 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/6ea42884/hbase-http/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-http/pom.xml b/hbase-http/pom.xml
index 13fae09..9bd6fcd 100644
--- a/hbase-http/pom.xml
+++ b/hbase-http/pom.xml
@@ -365,6 +365,38 @@
       </properties>
     </profile>
     <!-- Special builds -->
+    <profile>
+      <id>native</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>make</id>
+                <phase>compile</phase>
+                <goals><goal>run</goal></goals>
+                <configuration>
+                  <target>
+                    <mkdir dir="${project.build.directory}/native"/>
+                    <exec executable="cmake" dir="${project.build.directory}/native" failonerror="true">
+                      <arg line="${basedir}/src/main/native -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}"/>
+                    </exec>
+                    <exec executable="make" dir="${project.build.directory}/native" failonerror="true">
+                      <arg line="VERBOSE=1"/>
+                    </exec>
+                  </target>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
     <!-- Profiles for building against different hadoop versions -->
     <!-- There are a lot of common dependencies used here, should investigate
     if we can combine these profiles somehow -->


[02/13] hbase git commit: HBASE-18925 Update mockito dependency from mockito-all:1.10.19 to mockito-core:2.1.0 for JDK8 support.

Posted by bu...@apache.org.
HBASE-18925 Update mockito dependency from mockito-all:1.10.19 to mockito-core:2.1.0 for JDK8 support.

Last mockito-all release was in Dec'14. Mockito-core has had many releases since then.

>From mockito's site:
- "Mockito does not produce the mockito-all artifact anymore ; this one was primarily
aimed at ant users, and contained other dependencies. We felt it was time to move on
and remove such artifacts as they cause problems in dependency management system like
maven or gradle."
- anyX() and any(SomeType.class) matchers now reject nulls and check type.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/71a55dcd
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/71a55dcd
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/71a55dcd

Branch: refs/heads/HBASE-19124
Commit: 71a55dcd6418fbff9fb90b94a855897077ac6e4d
Parents: b3e438b
Author: Apekshit Sharma <ap...@apache.org>
Authored: Mon Oct 2 22:53:34 2017 -0700
Committer: Apekshit Sharma <ap...@apache.org>
Committed: Wed Nov 1 14:21:38 2017 -0700

----------------------------------------------------------------------
 hbase-client/pom.xml                            |   4 +-
 .../hadoop/hbase/client/TestClientScanner.java  |  35 +++----
 .../client/TestHTableMultiplexerViaMocks.java   |   2 +-
 .../hbase/client/TestSnapshotFromAdmin.java     |  10 +-
 .../hbase/security/TestHBaseSaslRpcClient.java  |   4 +-
 hbase-common/pom.xml                            |   2 +-
 hbase-endpoint/pom.xml                          |   2 +-
 hbase-examples/pom.xml                          |   2 +-
 .../hbase/mapreduce/TestMapReduceExamples.java  |  16 +--
 hbase-http/pom.xml                              |   2 +-
 .../apache/hadoop/hbase/http/HttpServer.java    |  13 ++-
 .../hadoop/hbase/http/TestHttpServer.java       |   6 +-
 hbase-mapreduce/pom.xml                         |   2 +-
 .../apache/hadoop/hbase/mapred/TestDriver.java  |   2 +-
 .../hbase/mapred/TestGroupingTableMap.java      |   2 +-
 .../hbase/mapred/TestIdentityTableMap.java      |   2 +-
 .../hadoop/hbase/mapred/TestRowCounter.java     |   4 +-
 .../mapreduce/TestGroupingTableMapper.java      |   2 +-
 .../hbase/mapreduce/TestImportExport.java       |   6 +-
 .../TestMultiTableSnapshotInputFormatImpl.java  |   7 +-
 .../hadoop/hbase/mapreduce/TestWALPlayer.java   |   6 +-
 hbase-metrics-api/pom.xml                       |   2 +-
 hbase-metrics/pom.xml                           |   2 +-
 hbase-rest/pom.xml                              |   2 +-
 .../rest/client/TestRemoteAdminRetries.java     |  18 ++--
 .../rest/client/TestRemoteHTableRetries.java    |  28 ++---
 hbase-rsgroup/pom.xml                           |   2 +-
 .../balancer/TestRSGroupBasedLoadBalancer.java  |   4 +-
 hbase-server/pom.xml                            |   2 +-
 .../hadoop/hbase/TestHBaseTestingUtility.java   |  27 ++---
 .../hbase/TestMetaTableAccessorNoCluster.java   |   4 +-
 .../hadoop/hbase/TestMetaTableLocator.java      |   6 +-
 .../TestZooKeeperTableArchiveClient.java        |   2 +-
 .../hbase/client/HConnectionTestingUtility.java |   4 +-
 .../TestForeignExceptionDispatcher.java         |   8 +-
 .../TestTimeoutExceptionInjector.java           |   6 +-
 .../hbase/ipc/TestSimpleRpcScheduler.java       |  40 ++------
 .../hbase/master/TestSplitLogManager.java       |   2 +-
 .../master/assignment/MockMasterServices.java   |  22 ++--
 .../hbase/master/cleaner/TestCleanerChore.java  |  10 +-
 .../normalizer/TestSimpleRegionNormalizer.java  |  14 ++-
 .../hadoop/hbase/procedure/TestProcedure.java   |   4 +-
 .../procedure/TestProcedureCoordinator.java     |  22 ++--
 .../hbase/procedure/TestProcedureMember.java    |  36 +++----
 .../hadoop/hbase/procedure/TestZKProcedure.java |   4 +-
 .../procedure/TestZKProcedureControllers.java   |   7 +-
 .../quotas/TestFileSystemUtilizationChore.java  |  16 +--
 .../TestMasterSpaceQuotaObserverWithMocks.java  |   2 +-
 .../TestNamespaceQuotaViolationStore.java       |   4 +-
 ...SpaceQuotaViolationPolicyRefresherChore.java |   4 +-
 .../quotas/TestTableQuotaViolationStore.java    |   4 +-
 .../TestTableSpaceQuotaViolationNotifier.java   |  15 +--
 .../regionserver/StatefulStoreMockMaker.java    |  30 ++----
 .../hadoop/hbase/regionserver/TestBulkLoad.java | 102 +++++++++----------
 .../hbase/regionserver/TestCompaction.java      |  19 ++--
 .../TestCompactionArchiveIOException.java       |   2 +-
 .../hadoop/hbase/regionserver/TestHRegion.java  |  13 ++-
 .../hadoop/hbase/regionserver/TestHStore.java   |   2 +-
 .../TestRegionServerRegionSpaceUseReport.java   |  12 +--
 .../regionserver/TestStripeStoreEngine.java     |  10 +-
 .../regionserver/compactions/TestCompactor.java |   8 +-
 .../compactions/TestDateTieredCompactor.java    |   3 +-
 .../compactions/TestStripeCompactionPolicy.java |  21 ++--
 .../compactions/TestStripeCompactor.java        |   3 +-
 .../regionserver/wal/AbstractTestWALReplay.java |   3 +-
 .../hadoop/hbase/tool/TestCanaryTool.java       |  35 ++++---
 .../TestLoadIncrementalHFilesSplitRecovery.java |   2 +-
 .../apache/hadoop/hbase/wal/TestWALSplit.java   |   2 +-
 .../hbase-shaded-check-invariants/pom.xml       |   2 +-
 pom.xml                                         |  22 ++--
 src/main/asciidoc/_chapters/unit_testing.adoc   |   4 +-
 71 files changed, 345 insertions(+), 404 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-client/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-client/pom.xml b/hbase-client/pom.xml
index a8e73c7..675e813 100644
--- a/hbase-client/pom.xml
+++ b/hbase-client/pom.xml
@@ -192,7 +192,7 @@
     </dependency>
     <dependency>
       <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
+      <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
@@ -285,7 +285,7 @@
             <exclusion>
               <groupId>com.sun.jersey</groupId>
               <artifactId>jersey-json</artifactId>
-            </exclusion> 
+            </exclusion>
             <exclusion>
               <groupId>javax.servlet</groupId>
               <artifactId>servlet-api</artifactId>

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java
index 9d21d1a..3f0c869 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java
@@ -138,13 +138,12 @@ public class TestClientScanner {
     RpcRetryingCaller<Result[]> caller = Mockito.mock(RpcRetryingCaller.class);
 
     Mockito.when(rpcFactory.<Result[]> newCaller()).thenReturn(caller);
-    Mockito.when(caller.callWithoutRetries(Mockito.any(RetryingCallable.class),
+    Mockito.when(caller.callWithoutRetries(Mockito.any(),
       Mockito.anyInt())).thenAnswer(new Answer<Result[]>() {
         private int count = 0;
         @Override
         public Result[] answer(InvocationOnMock invocation) throws Throwable {
-            ScannerCallableWithReplicas callable = invocation.getArgumentAt(0,
-                ScannerCallableWithReplicas.class);
+            ScannerCallableWithReplicas callable = invocation.getArgument(0);
           switch (count) {
             case 0: // initialize
               count++;
@@ -176,7 +175,7 @@ public class TestClientScanner {
       // One for fetching the results
       // One for fetching empty results and quit as we do not have moreResults hint.
       inOrder.verify(caller, Mockito.times(2)).callWithoutRetries(
-          Mockito.any(RetryingCallable.class), Mockito.anyInt());
+          Mockito.any(), Mockito.anyInt());
 
       assertEquals(1, scanner.cache.size());
       Result r = scanner.cache.poll();
@@ -199,13 +198,12 @@ public class TestClientScanner {
     RpcRetryingCaller<Result[]> caller = Mockito.mock(RpcRetryingCaller.class);
 
     Mockito.when(rpcFactory.<Result[]> newCaller()).thenReturn(caller);
-    Mockito.when(caller.callWithoutRetries(Mockito.any(RetryingCallable.class),
+    Mockito.when(caller.callWithoutRetries(Mockito.any(),
       Mockito.anyInt())).thenAnswer(new Answer<Result[]>() {
         private int count = 0;
         @Override
         public Result[] answer(InvocationOnMock invocation) throws Throwable {
-          ScannerCallableWithReplicas callable = invocation.getArgumentAt(0,
-              ScannerCallableWithReplicas.class);
+          ScannerCallableWithReplicas callable = invocation.getArgument(0);
           switch (count) {
             case 0: // initialize
               count++;
@@ -235,7 +233,7 @@ public class TestClientScanner {
       scanner.loadCache();
 
       inOrder.verify(caller, Mockito.times(1)).callWithoutRetries(
-          Mockito.any(RetryingCallable.class), Mockito.anyInt());
+          Mockito.any(), Mockito.anyInt());
 
       assertEquals(1, scanner.cache.size());
       Result r = scanner.cache.poll();
@@ -260,13 +258,12 @@ public class TestClientScanner {
     RpcRetryingCaller<Result[]> caller = Mockito.mock(RpcRetryingCaller.class);
 
     Mockito.when(rpcFactory.<Result[]> newCaller()).thenReturn(caller);
-    Mockito.when(caller.callWithoutRetries(Mockito.any(RetryingCallable.class),
+    Mockito.when(caller.callWithoutRetries(Mockito.any(),
       Mockito.anyInt())).thenAnswer(new Answer<Result[]>() {
         private int count = 0;
         @Override
         public Result[] answer(InvocationOnMock invocation) throws Throwable {
-          ScannerCallableWithReplicas callable = invocation.getArgumentAt(0,
-              ScannerCallableWithReplicas.class);
+          ScannerCallableWithReplicas callable = invocation.getArgument(0);
           switch (count) {
             case 0: // initialize
               count++;
@@ -296,7 +293,7 @@ public class TestClientScanner {
       scanner.loadCache();
 
       inOrder.verify(caller, Mockito.times(1)).callWithoutRetries(
-          Mockito.any(RetryingCallable.class), Mockito.anyInt());
+          Mockito.any(), Mockito.anyInt());
 
       assertEquals(3, scanner.cache.size());
       Result r = scanner.cache.poll();
@@ -333,13 +330,12 @@ public class TestClientScanner {
     RpcRetryingCaller<Result[]> caller = Mockito.mock(RpcRetryingCaller.class);
 
     Mockito.when(rpcFactory.<Result[]> newCaller()).thenReturn(caller);
-    Mockito.when(caller.callWithoutRetries(Mockito.any(RetryingCallable.class),
+    Mockito.when(caller.callWithoutRetries(Mockito.any(),
       Mockito.anyInt())).thenAnswer(new Answer<Result[]>() {
         private int count = 0;
         @Override
         public Result[] answer(InvocationOnMock invocation) throws Throwable {
-          ScannerCallableWithReplicas callable = invocation.getArgumentAt(0,
-              ScannerCallableWithReplicas.class);
+          ScannerCallableWithReplicas callable = invocation.getArgument(0);
           switch (count) {
             case 0: // initialize
               count++;
@@ -369,7 +365,7 @@ public class TestClientScanner {
       scanner.loadCache();
 
       inOrder.verify(caller, Mockito.times(1)).callWithoutRetries(
-          Mockito.any(RetryingCallable.class), Mockito.anyInt());
+          Mockito.any(), Mockito.anyInt());
 
       assertEquals(1, scanner.cache.size());
       Result r = scanner.cache.poll();
@@ -398,13 +394,12 @@ public class TestClientScanner {
     RpcRetryingCaller<Result[]> caller = Mockito.mock(RpcRetryingCaller.class);
 
     Mockito.when(rpcFactory.<Result[]> newCaller()).thenReturn(caller);
-    Mockito.when(caller.callWithoutRetries(Mockito.any(RetryingCallable.class),
+    Mockito.when(caller.callWithoutRetries(Mockito.any(),
         Mockito.anyInt())).thenAnswer(new Answer<Result[]>() {
           private int count = 0;
           @Override
           public Result[] answer(InvocationOnMock invocation) throws Throwable {
-            ScannerCallableWithReplicas callable = invocation.getArgumentAt(0,
-                ScannerCallableWithReplicas.class);
+            ScannerCallableWithReplicas callable = invocation.getArgument(0);
             switch (count) {
               case 0: // initialize
                 count++;
@@ -436,7 +431,7 @@ public class TestClientScanner {
       scanner.loadCache();
 
       inOrder.verify(caller, Mockito.times(2)).callWithoutRetries(
-          Mockito.any(RetryingCallable.class), Mockito.anyInt());
+          Mockito.any(), Mockito.anyInt());
 
       assertEquals(2, scanner.cache.size());
       Result r = scanner.cache.poll();

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerViaMocks.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerViaMocks.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerViaMocks.java
index 7e68c21..ef59eed 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerViaMocks.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerViaMocks.java
@@ -44,7 +44,7 @@ public class TestHTableMultiplexerViaMocks {
     mockConnection = mock(ClusterConnection.class);
 
     // Call the real put(TableName, Put, int) method
-    when(mockMultiplexer.put(any(TableName.class), any(Put.class), anyInt())).thenCallRealMethod();
+    when(mockMultiplexer.put(any(TableName.class), any(), anyInt())).thenCallRealMethod();
 
     // Return the mocked ClusterConnection
     when(mockMultiplexer.getConnection()).thenReturn(mockConnection);

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
index 177b614..b2c011c 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
@@ -30,9 +30,7 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -103,14 +101,14 @@ public class TestSnapshotFromAdmin {
     Mockito
     .when(
       mockMaster.snapshot((RpcController) Mockito.any(),
-        Mockito.any(SnapshotRequest.class))).thenReturn(response);
+        Mockito.any())).thenReturn(response);
     // setup the response
     IsSnapshotDoneResponse.Builder builder = IsSnapshotDoneResponse.newBuilder();
     builder.setDone(false);
     // first five times, we return false, last we get success
     Mockito.when(
       mockMaster.isSnapshotDone((RpcController) Mockito.any(),
-        Mockito.any(IsSnapshotDoneRequest.class))).thenReturn(builder.build(), builder.build(),
+        Mockito.any())).thenReturn(builder.build(), builder.build(),
           builder.build(), builder.build(), builder.build(), builder.setDone(true).build());
 
     // setup the admin and run the test
@@ -162,12 +160,12 @@ public class TestSnapshotFromAdmin {
     Mockito.when(mockConnection.getKeepAliveMasterService()).thenReturn(master);
     SnapshotResponse response = SnapshotResponse.newBuilder().setExpectedTimeout(0).build();
     Mockito.when(
-      master.snapshot((RpcController) Mockito.any(), Mockito.any(SnapshotRequest.class)))
+      master.snapshot((RpcController) Mockito.any(), Mockito.any()))
         .thenReturn(response);
     IsSnapshotDoneResponse doneResponse = IsSnapshotDoneResponse.newBuilder().setDone(true).build();
     Mockito.when(
       master.isSnapshotDone((RpcController) Mockito.any(),
-          Mockito.any(IsSnapshotDoneRequest.class))).thenReturn(doneResponse);
+          Mockito.any())).thenReturn(doneResponse);
 
       // make sure that we can use valid names
     admin.snapshot(new SnapshotDescription("snapshot", TableName.valueOf(name.getMethodName())));

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
index 33f7872..282dc28 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
@@ -113,8 +113,8 @@ public class TestHBaseSaslRpcClient {
     final SaslClientCallbackHandler saslClCallbackHandler = new SaslClientCallbackHandler(token);
     saslClCallbackHandler.handle(callbackArray);
     verify(nameCallback).setName(anyString());
-    verify(realmCallback).setText(anyString());
-    verify(passwordCallback).setPassword(any(char[].class));
+    verify(realmCallback).setText(any());
+    verify(passwordCallback).setPassword(any());
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-common/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml
index 135e720..b732bbe 100644
--- a/hbase-common/pom.xml
+++ b/hbase-common/pom.xml
@@ -269,7 +269,7 @@
     </dependency>
     <dependency>
       <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
+      <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-endpoint/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml
index a127763..101bfdb 100644
--- a/hbase-endpoint/pom.xml
+++ b/hbase-endpoint/pom.xml
@@ -213,7 +213,7 @@
     </dependency>
     <dependency>
       <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
+      <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
     <!-- Some tests rely on Hadoop's KeyStoreTestUtil, which needs bc. -->

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-examples/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml
index 9f32fec..e706283 100644
--- a/hbase-examples/pom.xml
+++ b/hbase-examples/pom.xml
@@ -206,7 +206,7 @@
     </dependency>
     <dependency>
       <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
+      <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-examples/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMapReduceExamples.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMapReduceExamples.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMapReduceExamples.java
index 1f10cb9..089dafd 100644
--- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMapReduceExamples.java
+++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMapReduceExamples.java
@@ -4,9 +4,9 @@
  * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0
  * (the "License"); you may not use this file except in compliance with the License. You may
  * obtain a copy of the License at
- * 
+ *
  * http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software distributed under the
  * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
  * either express or implied. See the License for the specific language governing permissions and
@@ -63,13 +63,13 @@ public class TestMapReduceExamples {
 
       @Override
       public Void answer(InvocationOnMock invocation) throws Throwable {
-        ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0];
-        Put put = (Put) invocation.getArguments()[1];
+        ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArgument(0);
+        Put put = (Put) invocation.getArgument(1);
         assertEquals("row", Bytes.toString(writer.get()));
         assertEquals("row", Bytes.toString(put.getRow()));
         return null;
       }
-    }).when(ctx).write(any(ImmutableBytesWritable.class), any(Put.class));
+    }).when(ctx).write(any(), any());
 
     uploader.map(null, new Text("row,family,qualifier,value"), ctx);
 
@@ -134,13 +134,13 @@ public class TestMapReduceExamples {
 
       @Override
       public Void answer(InvocationOnMock invocation) throws Throwable {
-        ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0];
-        Put put = (Put) invocation.getArguments()[1];
+        ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArgument(0);
+        Put put = (Put) invocation.getArgument(1);
         assertEquals("tableName-column1", Bytes.toString(writer.get()));
         assertEquals("test", Bytes.toString(put.getRow()));
         return null;
       }
-    }).when(ctx).write(any(ImmutableBytesWritable.class), any(Put.class));
+    }).when(ctx).write(any(), any());
     Result result = mock(Result.class);
     when(result.getValue(Bytes.toBytes("columnFamily"), Bytes.toBytes("column1"))).thenReturn(
         Bytes.toBytes("test"));

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-http/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-http/pom.xml b/hbase-http/pom.xml
index c2ec302..13fae09 100644
--- a/hbase-http/pom.xml
+++ b/hbase-http/pom.xml
@@ -311,7 +311,7 @@
     </dependency>
     <dependency>
       <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
+      <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
index 726595b..c2b5944 100644
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hbase.http;
 
+import com.google.common.annotations.VisibleForTesting;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InterruptedIOException;
@@ -32,6 +33,7 @@ import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.stream.Collectors;
 
 import javax.servlet.Filter;
 import javax.servlet.FilterChain;
@@ -48,6 +50,8 @@ import javax.servlet.http.HttpServletResponse;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.yetus.audience.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -90,9 +94,6 @@ import org.eclipse.jetty.webapp.WebAppContext;
 import org.glassfish.jersey.server.ResourceConfig;
 import org.glassfish.jersey.servlet.ServletContainer;
 
-import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-
 /**
  * Create a Jetty embedded server to answer http requests. The primary goal
  * is to serve up status information for the server.
@@ -161,6 +162,11 @@ public class HttpServer implements FilterContainer {
 
   private final List<ListenerInfo> listeners = Lists.newArrayList();
 
+  @VisibleForTesting
+  public List<ServerConnector> getServerConnectors() {
+    return listeners.stream().map(info -> info.listener).collect(Collectors.toList());
+  }
+
   protected final WebAppContext webAppContext;
   protected final boolean findPort;
   protected final Map<ServletContextHandler, Boolean> defaultContexts = new HashMap<>();
@@ -1011,6 +1017,7 @@ public class HttpServer implements FilterContainer {
    * Open the main listener for the server
    * @throws Exception
    */
+  @VisibleForTesting
   void openListeners() throws Exception {
     for (ListenerInfo li : listeners) {
       ServerConnector listener = li.listener;

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
index 2eb6a21..fddb2a4 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
@@ -67,7 +67,6 @@ import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.mockito.Mockito;
-import org.mockito.internal.util.reflection.Whitebox;
 
 @Category({MiscTests.class, SmallTests.class})
 public class TestHttpServer extends HttpServerFunctionalTest {
@@ -557,10 +556,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
     HttpServer server = createServer(host, port);
     try {
       // not bound, ephemeral should return requested port (0 for ephemeral)
-      List<?> listeners = (List<?>) Whitebox.getInternalState(server,
-          "listeners");
-      ServerConnector listener = (ServerConnector) Whitebox.getInternalState(
-          listeners.get(0), "listener");
+      ServerConnector listener = server.getServerConnectors().get(0);
 
       assertEquals(port, listener.getPort());
       // verify hostname is what was given

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-mapreduce/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/pom.xml b/hbase-mapreduce/pom.xml
index 4a63f41..607b43b 100644
--- a/hbase-mapreduce/pom.xml
+++ b/hbase-mapreduce/pom.xml
@@ -262,7 +262,7 @@
     </dependency>
     <dependency>
       <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
+      <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java
index d085c21..fa03a17 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java
@@ -36,6 +36,6 @@ public class TestDriver {
     ProgramDriver programDriverMock = mock(ProgramDriver.class);
     Driver.setProgramDriver(programDriverMock);
     Driver.main(new String[]{});
-    verify(programDriverMock).driver(Mockito.any(String[].class));
+    verify(programDriverMock).driver(Mockito.any());
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java
index 7131cf9..5c4b6a9 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java
@@ -107,7 +107,7 @@ public class TestGroupingTableMap {
       gTableMap.map(null, result, outputCollectorMock, reporter);
       verify(result).listCells();
       verify(outputCollectorMock, times(1))
-        .collect(any(ImmutableBytesWritable.class), any(Result.class));
+        .collect(any(), any());
       verifyNoMoreInteractions(outputCollectorMock);
     } finally {
       if (gTableMap != null)

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestIdentityTableMap.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestIdentityTableMap.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestIdentityTableMap.java
index e222d0b..be65d84 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestIdentityTableMap.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestIdentityTableMap.java
@@ -55,7 +55,7 @@ public class TestIdentityTableMap {
             reporterMock);
 
       verify(outputCollectorMock, times(recordNumber)).collect(
-          Mockito.any(ImmutableBytesWritable.class), Mockito.any(Result.class));
+          Mockito.any(), Mockito.any());
     } finally {
       if (identityTableMap != null)
         identityTableMap.close();

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java
index 4ebd8bf..bc9ebb7 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java
@@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.mapred;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.anyLong;
 import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyInt;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.times;
 
@@ -89,7 +89,7 @@ public class TestRowCounter {
           mock(OutputCollector.class), reporter);
 
     Mockito.verify(reporter, times(iterationNumber)).incrCounter(
-        any(Enum.class), anyInt());
+        any(), anyLong());
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java
index 7e36602..ec7ddee 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java
@@ -50,7 +50,7 @@ public class TestGroupingTableMapper {
     @SuppressWarnings("unchecked")
     Mapper<ImmutableBytesWritable, Result, ImmutableBytesWritable, Result>.Context context =
         mock(Mapper.Context.class);
-    context.write(any(ImmutableBytesWritable.class), any(Result.class));
+    context.write(any(), any());
     List<Cell> keyValue = new ArrayList<>();
     byte[] row = {};
     keyValue.add(new KeyValue(row, Bytes.toBytes("family2"), Bytes.toBytes("clm"), Bytes

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
index 062108d..6ef3a68 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
@@ -675,13 +675,13 @@ public class TestImportExport {
 
       @Override
       public Void answer(InvocationOnMock invocation) throws Throwable {
-        ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0];
-        MapReduceCell key = (MapReduceCell) invocation.getArguments()[1];
+        ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArgument(0);
+        MapReduceCell key = (MapReduceCell) invocation.getArgument(1);
         assertEquals("Key", Bytes.toString(writer.get()));
         assertEquals("row", Bytes.toString(CellUtil.cloneRow(key)));
         return null;
       }
-    }).when(ctx).write(any(ImmutableBytesWritable.class), any(MapReduceCell.class));
+    }).when(ctx).write(any(), any());
 
     importer.setup(ctx);
     Result value = mock(Result.class);

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormatImpl.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormatImpl.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormatImpl.java
index 1c33848..8897218 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormatImpl.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormatImpl.java
@@ -23,7 +23,6 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -68,8 +67,8 @@ public class TestMultiTableSnapshotInputFormatImpl {
     // probably be the more "pure"
     // way of doing things. This is the lesser of two evils, perhaps?
     doNothing().when(this.subject).
-        restoreSnapshot(any(Configuration.class), any(String.class), any(Path.class),
-            any(Path.class), any(FileSystem.class));
+        restoreSnapshot(any(), any(), any(),
+            any(), any());
 
     this.conf = new Configuration();
     this.rootDir = new Path("file:///test-root-dir");
@@ -180,7 +179,7 @@ public class TestMultiTableSnapshotInputFormatImpl {
 
     for (Map.Entry<String, Path> entry : snapshotDirs.entrySet()) {
       verify(this.subject).restoreSnapshot(eq(this.conf), eq(entry.getKey()), eq(this.rootDir),
-          eq(entry.getValue()), any(FileSystem.class));
+          eq(entry.getValue()), any());
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
index 97b9750..23b3c04 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
@@ -184,13 +184,13 @@ public class TestWALPlayer {
 
       @Override
       public Void answer(InvocationOnMock invocation) throws Throwable {
-        ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0];
-        MapReduceCell key = (MapReduceCell) invocation.getArguments()[1];
+        ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArgument(0);
+        MapReduceCell key = (MapReduceCell) invocation.getArgument(1);
         assertEquals("row", Bytes.toString(writer.get()));
         assertEquals("row", Bytes.toString(CellUtil.cloneRow(key)));
         return null;
       }
-    }).when(context).write(any(ImmutableBytesWritable.class), any(MapReduceCell.class));
+    }).when(context).write(any(), any());
 
     mapper.map(key, value, context);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-metrics-api/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-metrics-api/pom.xml b/hbase-metrics-api/pom.xml
index 1313d19..3baa330 100644
--- a/hbase-metrics-api/pom.xml
+++ b/hbase-metrics-api/pom.xml
@@ -98,7 +98,7 @@
     </dependency>
     <dependency>
       <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
+      <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-metrics/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-metrics/pom.xml b/hbase-metrics/pom.xml
index 70653dd..f82ad54 100644
--- a/hbase-metrics/pom.xml
+++ b/hbase-metrics/pom.xml
@@ -111,7 +111,7 @@
     </dependency>
     <dependency>
       <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
+      <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-rest/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml
index 2d5d701..78855df 100644
--- a/hbase-rest/pom.xml
+++ b/hbase-rest/pom.xml
@@ -331,7 +331,7 @@
     </dependency>
     <dependency>
       <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
+      <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteAdminRetries.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteAdminRetries.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteAdminRetries.java
index b926d82..706402c 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteAdminRetries.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteAdminRetries.java
@@ -49,9 +49,9 @@ public class TestRemoteAdminRetries {
   private static final int SLEEP_TIME = 50;
   private static final int RETRIES = 3;
   private static final long MAX_TIME = SLEEP_TIME * (RETRIES - 1);
-  
+
   private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
-  
+
   private RemoteAdmin remoteAdmin;
   private Client client;
 
@@ -61,8 +61,8 @@ public class TestRemoteAdminRetries {
     Response response = new Response(509);
     when(client.get(anyString(), anyString())).thenReturn(response);
     when(client.delete(anyString())).thenReturn(response);
-    when(client.put(anyString(), anyString(), any(byte[].class))).thenReturn(response);
-    when(client.post(anyString(), anyString(), any(byte[].class))).thenReturn(response);
+    when(client.put(anyString(), anyString(), any())).thenReturn(response);
+    when(client.post(anyString(), anyString(), any())).thenReturn(response);
     Configuration configuration = TEST_UTIL.getConfiguration();
 
     configuration.setInt("hbase.rest.client.max.retries", RETRIES);
@@ -80,7 +80,7 @@ public class TestRemoteAdminRetries {
       }
     });
   }
-  
+
   @Test
   public void testFailingGetClusterStatus() throws Exception  {
     testTimedOutGetCall(new CallExecutor() {
@@ -120,7 +120,7 @@ public class TestRemoteAdminRetries {
         remoteAdmin.createTable(new HTableDescriptor(TableName.valueOf("TestTable")));
       }
     });
-    verify(client, times(RETRIES)).put(anyString(), anyString(), any(byte[].class));
+    verify(client, times(RETRIES)).put(anyString(), anyString(), any());
   }
 
   @Test
@@ -143,12 +143,12 @@ public class TestRemoteAdminRetries {
       }
     });
   }
-  
+
   private void testTimedOutGetCall(CallExecutor callExecutor) throws Exception {
     testTimedOutCall(callExecutor);
     verify(client, times(RETRIES)).get(anyString(), anyString());
   }
-  
+
   private void testTimedOutCall(CallExecutor callExecutor) throws Exception {
     long start = System.currentTimeMillis();
     try {
@@ -163,5 +163,5 @@ public class TestRemoteAdminRetries {
   private static interface CallExecutor {
     void run() throws Exception;
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java
index 4a595f3..b25b63c 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java
@@ -55,7 +55,7 @@ public class TestRemoteHTableRetries {
   private static final long MAX_TIME = SLEEP_TIME * (RETRIES - 1);
 
   private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
-  
+
   private static final byte[] ROW_1 = Bytes.toBytes("testrow1");
   private static final byte[] COLUMN_1 = Bytes.toBytes("a");
   private static final byte[] QUALIFIER_1 = Bytes.toBytes("1");
@@ -63,16 +63,16 @@ public class TestRemoteHTableRetries {
 
   private Client client;
   private RemoteHTable remoteTable;
-  
+
   @Before
   public void setup() throws Exception {
     client = mock(Client.class);
     Response response = new Response(509);
     when(client.get(anyString(), anyString())).thenReturn(response);
     when(client.delete(anyString())).thenReturn(response);
-    when(client.put(anyString(), anyString(), any(byte[].class))).thenReturn(
+    when(client.put(anyString(), anyString(), any())).thenReturn(
         response);
-    when(client.post(anyString(), anyString(), any(byte[].class))).thenReturn(
+    when(client.post(anyString(), anyString(), any())).thenReturn(
         response);
 
     Configuration configuration = TEST_UTIL.getConfiguration();
@@ -87,7 +87,7 @@ public class TestRemoteHTableRetries {
   public void tearDownAfterClass() throws Exception {
     remoteTable.close();
   }
-  
+
   @Test
   public void testDelete() throws Exception {
     testTimedOutCall(new CallExecutor() {
@@ -99,7 +99,7 @@ public class TestRemoteHTableRetries {
     });
     verify(client, times(RETRIES)).delete(anyString());
   }
-  
+
   @Test
   public void testGet() throws Exception {
     testTimedOutGetCall(new CallExecutor() {
@@ -118,9 +118,9 @@ public class TestRemoteHTableRetries {
         remoteTable.put(new Put(Bytes.toBytes("Row")));
       }
     });
-    verify(client, times(RETRIES)).put(anyString(), anyString(), any(byte[].class));
+    verify(client, times(RETRIES)).put(anyString(), anyString(), any());
   }
-  
+
   @Test
   public void testMultiRowPut() throws Exception {
     testTimedOutCall(new CallExecutor() {
@@ -131,7 +131,7 @@ public class TestRemoteHTableRetries {
         remoteTable.put(Arrays.asList(puts));
       }
     });
-    verify(client, times(RETRIES)).put(anyString(), anyString(), any(byte[].class));
+    verify(client, times(RETRIES)).put(anyString(), anyString(), any());
   }
 
   @Test
@@ -142,9 +142,9 @@ public class TestRemoteHTableRetries {
         remoteTable.getScanner(new Scan());
       }
     });
-    verify(client, times(RETRIES)).post(anyString(), anyString(), any(byte[].class));
+    verify(client, times(RETRIES)).post(anyString(), anyString(), any());
   }
-  
+
   @Test
   public void testCheckAndPut() throws Exception {
     testTimedOutCall(new CallExecutor() {
@@ -155,7 +155,7 @@ public class TestRemoteHTableRetries {
         remoteTable.checkAndPut(ROW_1, COLUMN_1, QUALIFIER_1, VALUE_1, put );
       }
     });
-    verify(client, times(RETRIES)).put(anyString(), anyString(), any(byte[].class));
+    verify(client, times(RETRIES)).put(anyString(), anyString(), any());
   }
 
   @Test
@@ -170,12 +170,12 @@ public class TestRemoteHTableRetries {
       }
     });
   }
-  
+
   private void testTimedOutGetCall(CallExecutor callExecutor) throws Exception {
     testTimedOutCall(callExecutor);
     verify(client, times(RETRIES)).get(anyString(), anyString());
   }
-  
+
   private void testTimedOutCall(CallExecutor callExecutor) throws Exception {
     long start = System.currentTimeMillis();
     try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-rsgroup/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-rsgroup/pom.xml b/hbase-rsgroup/pom.xml
index 8a26ead..ee75ef9 100644
--- a/hbase-rsgroup/pom.xml
+++ b/hbase-rsgroup/pom.xml
@@ -164,7 +164,7 @@
     </dependency>
     <dependency>
       <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
+      <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java
----------------------------------------------------------------------
diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java
index db7cf4d..5ce0c09 100644
--- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java
+++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java
@@ -582,11 +582,11 @@ public class TestRSGroupBasedLoadBalancer {
     Mockito.when(gm.listRSGroups()).thenReturn(
         Lists.newLinkedList(groupMap.values()));
     Mockito.when(gm.isOnline()).thenReturn(true);
-    Mockito.when(gm.getRSGroupOfTable(Mockito.any(TableName.class)))
+    Mockito.when(gm.getRSGroupOfTable(Mockito.any()))
         .thenAnswer(new Answer<String>() {
           @Override
           public String answer(InvocationOnMock invocation) throws Throwable {
-            return tableMap.get(invocation.getArguments()[0]);
+            return tableMap.get(invocation.getArgument(0));
           }
         });
     return gm;

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index ea02f26..d665538 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -605,7 +605,7 @@
     </dependency>
     <dependency>
       <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
+      <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
index be1307e..1d752d2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
@@ -23,6 +23,9 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
 
 import java.io.File;
 import java.io.IOException;
@@ -402,8 +405,8 @@ public class TestHBaseTestingUtility {
 
   @Test public void testResolvePortConflict() throws Exception {
     // raises port conflict between 1st call and 2nd call of randomPort() by mocking Random object
-    Random random = Mockito.mock(Random.class);
-    Mockito.when(random.nextInt(Mockito.any(Integer.class)))
+    Random random = mock(Random.class);
+    when(random.nextInt(anyInt()))
       .thenAnswer(new Answer<Integer>() {
         int[] numbers = { 1, 1, 2 };
         int count = 0;
@@ -417,8 +420,8 @@ public class TestHBaseTestingUtility {
       });
 
     HBaseTestingUtility.PortAllocator.AvailablePortChecker portChecker =
-      Mockito.mock(HBaseTestingUtility.PortAllocator.AvailablePortChecker.class);
-    Mockito.when(portChecker.available(Mockito.any(Integer.class))).thenReturn(true);
+      mock(HBaseTestingUtility.PortAllocator.AvailablePortChecker.class);
+    when(portChecker.available(anyInt())).thenReturn(true);
 
     HBaseTestingUtility.PortAllocator portAllocator =
       new HBaseTestingUtility.PortAllocator(random, portChecker);
@@ -426,7 +429,7 @@ public class TestHBaseTestingUtility {
     int port1 = portAllocator.randomFreePort();
     int port2 = portAllocator.randomFreePort();
     assertNotEquals(port1, port2);
-    Mockito.verify(random, Mockito.times(3)).nextInt(Mockito.any(Integer.class));
+    Mockito.verify(random, Mockito.times(3)).nextInt(anyInt());
   }
 
   @Test
@@ -452,7 +455,7 @@ public class TestHBaseTestingUtility {
     assertEquals(nonDefaultRegionServerPort
             , htu.getConfiguration().getInt(HConstants.REGIONSERVER_PORT, 0));
   }
-  
+
   @Test public void testMRYarnConfigsPopulation() throws IOException {
     Map<String, String> dummyProps = new HashMap<>();
     dummyProps.put("mapreduce.jobtracker.address", "dummyhost:11234");
@@ -461,27 +464,27 @@ public class TestHBaseTestingUtility {
     dummyProps.put("yarn.resourcemanager.scheduler.address", "dummyhost:11237");
     dummyProps.put("mapreduce.jobhistory.webapp.address", "dummyhost:11238");
     dummyProps.put("yarn.resourcemanager.webapp.address", "dummyhost:11239");
-  
+
     HBaseTestingUtility hbt = new HBaseTestingUtility();
-    
+
     // populate the mr props to the Configuration instance
     for (Entry<String, String> entry : dummyProps.entrySet()) {
       hbt.getConfiguration().set(entry.getKey(), entry.getValue());
     }
-    
+
     for (Entry<String,String> entry : dummyProps.entrySet()) {
       assertTrue("The Configuration for key " + entry.getKey() +" and value: " + entry.getValue() +
                  " is not populated correctly", hbt.getConfiguration().get(entry.getKey()).equals(entry.getValue()));
     }
 
     hbt.startMiniMapReduceCluster();
-    
-    // Confirm that MiniMapReduceCluster overwrites the mr properties and updates the Configuration 
+
+    // Confirm that MiniMapReduceCluster overwrites the mr properties and updates the Configuration
     for (Entry<String,String> entry : dummyProps.entrySet()) {
       assertFalse("The MR prop: " + entry.getValue() + " is not overwritten when map reduce mini"+
                   "cluster is started", hbt.getConfiguration().get(entry.getKey()).equals(entry.getValue()));
     }
-    
+
     hbt.shutdownMiniMapReduceCluster();
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java
index cec2c20..fb1c1e2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java
@@ -165,7 +165,7 @@ public class TestMetaTableAccessorNoCluster {
           .thenThrow(new ServiceException("Server not running (3 of 3)"))
           .thenAnswer(new Answer<ScanResponse>() {
             public ScanResponse answer(InvocationOnMock invocation) throws Throwable {
-              ((HBaseRpcController) invocation.getArguments()[0]).setCellScanner(CellUtil
+              ((HBaseRpcController) invocation.getArgument(0)).setCellScanner(CellUtil
                   .createCellScanner(cellScannables));
               return builder.setScannerId(1234567890L).setMoreResults(false).build();
             }
@@ -189,7 +189,7 @@ public class TestMetaTableAccessorNoCluster {
 
       // Now shove our HRI implementation into the spied-upon connection.
       Mockito.doReturn(implementation).
-        when(connection).getClient(Mockito.any(ServerName.class));
+        when(connection).getClient(Mockito.any());
 
       // Scan meta for user tables and verify we got back expected answer.
       NavigableMap<RegionInfo, Result> hris =

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java
index 8bebd8d..6a904a7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java
@@ -250,7 +250,7 @@ public class TestMetaTableLocator {
       Mockito.mock(AdminProtos.AdminService.BlockingInterface.class);
     Mockito.when(implementation.getRegionInfo((RpcController)Mockito.any(),
       (GetRegionInfoRequest)Mockito.any())).thenThrow(connectException);
-    Mockito.when(connection.getAdmin(Mockito.any(ServerName.class))).
+    Mockito.when(connection.getAdmin(Mockito.any())).
       thenReturn(implementation);
         RpcControllerFactory controllerFactory = Mockito.mock(RpcControllerFactory.class);
         Mockito.when(controllerFactory.newController()).thenReturn(
@@ -325,12 +325,12 @@ public class TestMetaTableLocator {
       thenReturn(anyLocation);
     if (admin != null) {
       // If a call to getHRegionConnection, return this implementation.
-      Mockito.when(connection.getAdmin(Mockito.any(ServerName.class))).
+      Mockito.when(connection.getAdmin(Mockito.any())).
         thenReturn(admin);
     }
     if (client != null) {
       // If a call to getClient, return this implementation.
-      Mockito.when(connection.getClient(Mockito.any(ServerName.class))).
+      Mockito.when(connection.getClient(Mockito.any())).
         thenReturn(client);
     }
     return connection;

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
index 20cb513..aa246c2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
@@ -362,7 +362,7 @@ public class TestZooKeeperTableArchiveClient {
       public Iterable<FileStatus> answer(InvocationOnMock invocation) throws Throwable {
         counter[0]++;
         LOG.debug(counter[0] + "/ " + expected + ") Wrapping call to getDeletableFiles for files: "
-            + invocation.getArguments()[0]);
+            + invocation.getArgument(0));
 
         @SuppressWarnings("unchecked")
         Iterable<FileStatus> ret = (Iterable<FileStatus>) invocation.callRealMethod();

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/client/HConnectionTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/HConnectionTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/HConnectionTestingUtility.java
index d7d1b3a..8ef784c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/HConnectionTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/HConnectionTestingUtility.java
@@ -116,12 +116,12 @@ public class HConnectionTestingUtility {
         .thenReturn(new RegionLocations(loc));
     if (admin != null) {
       // If a call to getAdmin, return this implementation.
-      Mockito.when(c.getAdmin(Mockito.any(ServerName.class))).
+      Mockito.when(c.getAdmin(Mockito.any())).
         thenReturn(admin);
     }
     if (client != null) {
       // If a call to getClient, return this client.
-      Mockito.when(c.getClient(Mockito.any(ServerName.class))).
+      Mockito.when(c.getClient(Mockito.any())).
         thenReturn(client);
     }
     NonceGenerator ng = Mockito.mock(NonceGenerator.class);

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java
index 3e3aa45..650e4d6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java
@@ -96,8 +96,8 @@ public class TestForeignExceptionDispatcher {
     assertTrue("Monitor didn't get timeout", monitor.hasException());
 
     // verify that that we propagated the error
-    Mockito.verify(listener1).receive(Mockito.any(ForeignException.class));
-    Mockito.verify(listener2).receive(Mockito.any(ForeignException.class));
+    Mockito.verify(listener1).receive(Mockito.any());
+    Mockito.verify(listener2).receive(Mockito.any());
   }
 
   /**
@@ -118,7 +118,7 @@ public class TestForeignExceptionDispatcher {
     timer.start();
     timer.trigger();
     // make sure that we got the timer error
-    Mockito.verify(listener1, Mockito.times(1)).receive(Mockito.any(ForeignException.class));
-    Mockito.verify(listener2, Mockito.times(1)).receive(Mockito.any(ForeignException.class));
+    Mockito.verify(listener1, Mockito.times(1)).receive(Mockito.any());
+    Mockito.verify(listener2, Mockito.times(1)).receive(Mockito.any());
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java
index 49f6164..37af804 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java
@@ -45,7 +45,7 @@ public class TestTimeoutExceptionInjector {
     TimeoutExceptionInjector timer = new TimeoutExceptionInjector(listener, time);
     timer.start();
     timer.trigger();
-    Mockito.verify(listener, Mockito.times(1)).receive(Mockito.any(ForeignException.class));
+    Mockito.verify(listener, Mockito.times(1)).receive(Mockito.any());
   }
 
   /**
@@ -58,7 +58,7 @@ public class TestTimeoutExceptionInjector {
     TimeoutExceptionInjector timer = new TimeoutExceptionInjector(listener, time);
     timer.start();
     timer.trigger();
-    Mockito.verify(listener).receive(Mockito.any(ForeignException.class));
+    Mockito.verify(listener).receive(Mockito.any());
   }
 
   /**
@@ -98,7 +98,7 @@ public class TestTimeoutExceptionInjector {
       LOG.debug("Correctly failed timer: " + e.getMessage());
     }
     Thread.sleep(time * 2);
-    Mockito.verify(listener, Mockito.times(1)).receive(Mockito.any(ForeignException.class));
+    Mockito.verify(listener, Mockito.times(1)).receive(Mockito.any());
     Mockito.verifyNoMoreInteractions(listener);
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
index 8364b22..bb91770 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
@@ -22,7 +22,6 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertTrue;
 import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyObject;
 import static org.mockito.Matchers.eq;
 import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.mock;
@@ -42,7 +41,6 @@ import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
-import java.util.Optional;
 import java.util.Set;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.CountDownLatch;
@@ -56,8 +54,6 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandlerImpl;
-import org.apache.hadoop.hbase.security.User;
-import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
 import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos;
@@ -182,7 +178,6 @@ public class TestSimpleRpcScheduler {
 
   @Test
   public void testHandlerIsolation() throws IOException, InterruptedException {
-
     CallRunner generalTask = createMockTask();
     CallRunner priorityTask = createMockTask();
     CallRunner replicationTask = createMockTask();
@@ -219,9 +214,7 @@ public class TestSimpleRpcScheduler {
     scheduler.init(CONTEXT);
     scheduler.start();
     for (CallRunner task : tasks) {
-      when(qosFunction.getPriority((RPCProtos.RequestHeader) anyObject(),
-        (Message) anyObject(), (User) anyObject()))
-          .thenReturn(qos.get(task));
+      when(qosFunction.getPriority(any(), any(), any())).thenReturn(qos.get(task));
       scheduler.dispatch(task);
     }
     for (CallRunner task : tasks) {
@@ -238,13 +231,11 @@ public class TestSimpleRpcScheduler {
     ServerCall call = mock(ServerCall.class);
     CallRunner task = mock(CallRunner.class);
     when(task.getRpcCall()).thenReturn(call);
-    when(call.getRequestUser()).thenReturn(Optional.empty());
     return task;
   }
 
   @Test
   public void testRpcScheduler() throws Exception {
-
     testRpcScheduler(RpcExecutor.CALL_QUEUE_TYPE_DEADLINE_CONF_VALUE);
     testRpcScheduler(RpcExecutor.CALL_QUEUE_TYPE_FIFO_CONF_VALUE);
   }
@@ -254,9 +245,7 @@ public class TestSimpleRpcScheduler {
     schedConf.set(RpcExecutor.CALL_QUEUE_TYPE_CONF_KEY, queueType);
 
     PriorityFunction priority = mock(PriorityFunction.class);
-    when(priority.getPriority(any(RequestHeader.class),
-      any(Message.class), any(User.class)))
-      .thenReturn(HConstants.NORMAL_QOS);
+    when(priority.getPriority(any(), any(), any())).thenReturn(HConstants.NORMAL_QOS);
 
     RpcScheduler scheduler = new SimpleRpcScheduler(schedConf, 1, 1, 1, priority,
                                                     HConstants.QOS_THRESHOLD);
@@ -268,25 +257,22 @@ public class TestSimpleRpcScheduler {
       RequestHeader smallHead = RequestHeader.newBuilder().setCallId(1).build();
       when(smallCallTask.getRpcCall()).thenReturn(smallCall);
       when(smallCall.getHeader()).thenReturn(smallHead);
-      when(smallCall.getRequestUser()).thenReturn(Optional.empty());
 
       CallRunner largeCallTask = mock(CallRunner.class);
       ServerCall largeCall = mock(ServerCall.class);
       RequestHeader largeHead = RequestHeader.newBuilder().setCallId(50).build();
       when(largeCallTask.getRpcCall()).thenReturn(largeCall);
       when(largeCall.getHeader()).thenReturn(largeHead);
-      when(largeCall.getRequestUser()).thenReturn(Optional.empty());
 
       CallRunner hugeCallTask = mock(CallRunner.class);
       ServerCall hugeCall = mock(ServerCall.class);
       RequestHeader hugeHead = RequestHeader.newBuilder().setCallId(100).build();
       when(hugeCallTask.getRpcCall()).thenReturn(hugeCall);
       when(hugeCall.getHeader()).thenReturn(hugeHead);
-      when(hugeCall.getRequestUser()).thenReturn(Optional.empty());
 
-      when(priority.getDeadline(eq(smallHead), any(Message.class))).thenReturn(0L);
-      when(priority.getDeadline(eq(largeHead), any(Message.class))).thenReturn(50L);
-      when(priority.getDeadline(eq(hugeHead), any(Message.class))).thenReturn(100L);
+      when(priority.getDeadline(eq(smallHead), any())).thenReturn(0L);
+      when(priority.getDeadline(eq(largeHead), any())).thenReturn(50L);
+      when(priority.getDeadline(eq(hugeHead), any())).thenReturn(100L);
 
       final ArrayList<Integer> work = new ArrayList<>();
       doAnswerTaskExecution(smallCallTask, work, 10, 250);
@@ -337,8 +323,7 @@ public class TestSimpleRpcScheduler {
     schedConf.setFloat(RWQueueRpcExecutor.CALL_QUEUE_SCAN_SHARE_CONF_KEY, 0f);
 
     PriorityFunction priority = mock(PriorityFunction.class);
-    when(priority.getPriority(any(RequestHeader.class), any(Message.class),
-      any(User.class))).thenReturn(HConstants.NORMAL_QOS);
+    when(priority.getPriority(any(), any(), any())).thenReturn(HConstants.NORMAL_QOS);
 
     RpcScheduler scheduler = new SimpleRpcScheduler(schedConf, 2, 1, 1, priority,
                                                     HConstants.QOS_THRESHOLD);
@@ -353,8 +338,7 @@ public class TestSimpleRpcScheduler {
     schedConf.setFloat(RWQueueRpcExecutor.CALL_QUEUE_SCAN_SHARE_CONF_KEY, 0.5f);
 
     PriorityFunction priority = mock(PriorityFunction.class);
-    when(priority.getPriority(any(RPCProtos.RequestHeader.class), any(Message.class),
-      any(User.class))).thenReturn(HConstants.NORMAL_QOS);
+    when(priority.getPriority(any(), any(), any())).thenReturn(HConstants.NORMAL_QOS);
 
     RpcScheduler scheduler = new SimpleRpcScheduler(schedConf, 3, 1, 1, priority,
                                                     HConstants.QOS_THRESHOLD);
@@ -369,14 +353,12 @@ public class TestSimpleRpcScheduler {
       when(putCallTask.getRpcCall()).thenReturn(putCall);
       when(putCall.getHeader()).thenReturn(putHead);
       when(putCall.getParam()).thenReturn(putCall.param);
-      when(putCall.getRequestUser()).thenReturn(Optional.empty());
 
       CallRunner getCallTask = mock(CallRunner.class);
       ServerCall getCall = mock(ServerCall.class);
       RequestHeader getHead = RequestHeader.newBuilder().setMethodName("get").build();
       when(getCallTask.getRpcCall()).thenReturn(getCall);
       when(getCall.getHeader()).thenReturn(getHead);
-      when(getCall.getRequestUser()).thenReturn(Optional.empty());
 
       CallRunner scanCallTask = mock(CallRunner.class);
       ServerCall scanCall = mock(ServerCall.class);
@@ -385,7 +367,6 @@ public class TestSimpleRpcScheduler {
       when(scanCallTask.getRpcCall()).thenReturn(scanCall);
       when(scanCall.getHeader()).thenReturn(scanHead);
       when(scanCall.getParam()).thenReturn(scanCall.param);
-      when(scanCall.getRequestUser()).thenReturn(Optional.empty());
 
       ArrayList<Integer> work = new ArrayList<>();
       doAnswerTaskExecution(putCallTask, work, 1, 1000);
@@ -449,8 +430,7 @@ public class TestSimpleRpcScheduler {
     schedConf.setInt("hbase.ipc.server.max.callqueue.length", 5);
 
     PriorityFunction priority = mock(PriorityFunction.class);
-    when(priority.getPriority(any(RequestHeader.class), any(Message.class),
-      any(User.class))).thenReturn(HConstants.NORMAL_QOS);
+    when(priority.getPriority(any(), any(), any())).thenReturn(HConstants.NORMAL_QOS);
     SimpleRpcScheduler scheduler = new SimpleRpcScheduler(schedConf, 0, 0, 0, priority,
       HConstants.QOS_THRESHOLD);
     try {
@@ -463,7 +443,6 @@ public class TestSimpleRpcScheduler {
       RequestHeader putHead = RequestHeader.newBuilder().setMethodName("mutate").build();
       when(putCallTask.getRpcCall()).thenReturn(putCall);
       when(putCall.getHeader()).thenReturn(putHead);
-      when(putCall.getRequestUser()).thenReturn(Optional.empty());
 
       assertTrue(scheduler.dispatch(putCallTask));
 
@@ -516,8 +495,7 @@ public class TestSimpleRpcScheduler {
     schedConf.set(RpcExecutor.CALL_QUEUE_TYPE_CONF_KEY,
       RpcExecutor.CALL_QUEUE_TYPE_CODEL_CONF_VALUE);
     PriorityFunction priority = mock(PriorityFunction.class);
-    when(priority.getPriority(any(RPCProtos.RequestHeader.class), any(Message.class),
-      any(User.class))).thenReturn(HConstants.NORMAL_QOS);
+    when(priority.getPriority(any(), any(), any())).thenReturn(HConstants.NORMAL_QOS);
     SimpleRpcScheduler scheduler =
         new SimpleRpcScheduler(schedConf, 1, 1, 1, priority, HConstants.QOS_THRESHOLD);
     try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
index 6215790..3dcd849 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
@@ -145,7 +145,7 @@ public class TestSplitLogManager {
 
     // By default, we let the test manage the error as before, so the server
     // does not appear as dead from the master point of view, only from the split log pov.
-    Mockito.when(sm.isServerOnline(Mockito.any(ServerName.class))).thenReturn(true);
+    Mockito.when(sm.isServerOnline(Mockito.any())).thenReturn(true);
 
     to = 12000;
     conf.setInt(HConstants.HBASE_SPLITLOG_MANAGER_TIMEOUT, to);

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
index 073216c..4e11778 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hbase.master.assignment;
 
+import static org.mockito.ArgumentMatchers.any;
+
 import java.io.IOException;
 import java.util.HashSet;
 import java.util.Map;
@@ -55,23 +57,21 @@ import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
 import org.apache.hadoop.hbase.procedure2.store.NoopProcedureStore;
 import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
 import org.apache.hadoop.hbase.security.Superusers;
-import org.apache.hadoop.hbase.util.FSUtils;
-import org.mockito.Mockito;
-import org.mockito.invocation.InvocationOnMock;
-import org.mockito.stubbing.Answer;
-
-import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException;
+import org.apache.hadoop.hbase.util.FSUtils;
+import org.mockito.Mockito;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+
 
 /**
  * A mocked master services.
@@ -136,17 +136,15 @@ public class MockMasterServices extends MockNoopMasterServices {
     MutateResponse.Builder builder = MutateResponse.newBuilder();
     builder.setProcessed(true);
     try {
-      Mockito.when(ri.mutate((RpcController)Mockito.any(), (MutateRequest)Mockito.any())).
-        thenReturn(builder.build());
+      Mockito.when(ri.mutate(any(), any())).thenReturn(builder.build());
     } catch (ServiceException se) {
       throw ProtobufUtil.handleRemoteException(se);
     }
     try {
-      Mockito.when(ri.multi((RpcController)Mockito.any(), (MultiRequest)Mockito.any())).
-        thenAnswer(new Answer<MultiResponse>() {
+      Mockito.when(ri.multi(any(), any())).thenAnswer(new Answer<MultiResponse>() {
           @Override
           public MultiResponse answer(InvocationOnMock invocation) throws Throwable {
-            return buildMultiResponse( (MultiRequest)invocation.getArguments()[1]);
+            return buildMultiResponse(invocation.getArgument(1));
           }
         });
     } catch (ServiceException se) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java
index 5c76643..566479a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java
@@ -145,7 +145,7 @@ public class TestCleanerChore {
     // touch a new file
     fs.create(file).close();
     assertTrue("Test file didn't get created.", fs.exists(file));
-    
+
     FileStatus fStat = fs.getFileStatus(parent);
     chore.chore();
     // make sure we never checked the directory
@@ -216,7 +216,7 @@ public class TestCleanerChore {
         FSUtils.logFileSystemState(fs, testDir, LOG);
         return (Boolean) invocation.callRealMethod();
       }
-    }).when(spy).isFileDeletable(Mockito.any(FileStatus.class));
+    }).when(spy).isFileDeletable(Mockito.any());
 
     // run the chore
     chore.chore();
@@ -225,7 +225,7 @@ public class TestCleanerChore {
     assertTrue("Added file unexpectedly deleted", fs.exists(addedFile));
     assertTrue("Parent directory deleted unexpectedly", fs.exists(parent));
     assertFalse("Original file unexpectedly retained", fs.exists(file));
-    Mockito.verify(spy, Mockito.times(1)).isFileDeletable(Mockito.any(FileStatus.class));
+    Mockito.verify(spy, Mockito.times(1)).isFileDeletable(Mockito.any());
     Mockito.reset(spy);
   }
 
@@ -274,7 +274,7 @@ public class TestCleanerChore {
         FSUtils.logFileSystemState(fs, testDir, LOG);
         return (Boolean) invocation.callRealMethod();
       }
-    }).when(spy).isFileDeletable(Mockito.any(FileStatus.class));
+    }).when(spy).isFileDeletable(Mockito.any());
 
     // attempt to delete the directory, which
     if (chore.checkAndDeleteDirectory(parent)) {
@@ -286,7 +286,7 @@ public class TestCleanerChore {
     assertTrue("Added file unexpectedly deleted", fs.exists(racyFile));
     assertTrue("Parent directory deleted unexpectedly", fs.exists(parent));
     assertFalse("Original file unexpectedly retained", fs.exists(file));
-    Mockito.verify(spy, Mockito.times(1)).isFileDeletable(Mockito.any(FileStatus.class));
+    Mockito.verify(spy, Mockito.times(1)).isFileDeletable(Mockito.any());
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java
index ab6d7d0..2af4b47 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java
@@ -39,6 +39,8 @@ import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.RegionInfoBuilder;
 import org.apache.hadoop.hbase.master.MasterRpcServices;
 import org.apache.hadoop.hbase.master.MasterServices;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -49,10 +51,6 @@ import org.junit.experimental.categories.Category;
 import org.junit.rules.TestName;
 import org.mockito.Mockito;
 
-import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
-import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse;
 
 /**
  * Tests logic of {@link SimpleRegionNormalizer}.
@@ -353,9 +351,9 @@ public class TestSimpleRegionNormalizer {
     // for simplicity all regions are assumed to be on one server; doesn't matter to us
     ServerName sn = ServerName.valueOf("localhost", 0, 1L);
     when(masterServices.getAssignmentManager().getRegionStates().
-      getRegionsOfTable(any(TableName.class))).thenReturn(RegionInfo);
+      getRegionsOfTable(any())).thenReturn(RegionInfo);
     when(masterServices.getAssignmentManager().getRegionStates().
-      getRegionServerOfRegion(any(RegionInfo.class))).thenReturn(sn);
+      getRegionServerOfRegion(any())).thenReturn(sn);
 
     for (Map.Entry<byte[], Integer> region : regionSizes.entrySet()) {
       RegionLoad regionLoad = Mockito.mock(RegionLoad.class);
@@ -366,8 +364,8 @@ public class TestSimpleRegionNormalizer {
         getRegionsLoad().get(region.getKey())).thenReturn(regionLoad);
     }
     try {
-      when(masterRpcServices.isSplitOrMergeEnabled(any(RpcController.class),
-        any(IsSplitOrMergeEnabledRequest.class))).thenReturn(
+      when(masterRpcServices.isSplitOrMergeEnabled(any(),
+        any())).thenReturn(
           IsSplitOrMergeEnabledResponse.newBuilder().setEnabled(true).build());
     } catch (ServiceException se) {
       LOG.debug("error setting isSplitOrMergeEnabled switch", se);

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java
index fa934d9..0603b21 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java
@@ -124,7 +124,7 @@ public class TestProcedure {
     proc.completedProcedure.await();
     verify(procspy).sendGlobalBarrierReached();
     verify(procspy).sendGlobalBarrierComplete();
-    verify(procspy, never()).receive(any(ForeignException.class));
+    verify(procspy, never()).receive(any());
   }
 
   @Test(timeout = 60000)
@@ -176,7 +176,7 @@ public class TestProcedure {
     procspy.completedProcedure.await();
     verify(procspy).sendGlobalBarrierReached();
     verify(procspy).sendGlobalBarrierComplete();
-    verify(procspy, never()).receive(any(ForeignException.class));
+    verify(procspy, never()).receive(any());
   }
 
   @Test(timeout = 60000)

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a55dcd/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java
index b5a9f29..d096db7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java
@@ -41,10 +41,10 @@ import java.util.List;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.errorhandling.ForeignException;
-import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
 import org.junit.After;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -52,8 +52,6 @@ import org.mockito.InOrder;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-
 /**
  * Test Procedure coordinator operation.
  * <p>
@@ -106,7 +104,7 @@ public class TestProcedureCoordinator {
     Procedure proc2 = new Procedure(coordinator,  monitor,
         WAKE_FREQUENCY, TIMEOUT, procName +"2", procData, expected);
     Procedure procSpy2 = spy(proc2);
-    when(coordinator.createProcedure(any(ForeignExceptionDispatcher.class), eq(procName), eq(procData), anyListOf(String.class)))
+    when(coordinator.createProcedure(any(), eq(procName), eq(procData), anyListOf(String.class)))
     .thenReturn(procSpy, procSpy2);
 
     coordinator.startProcedure(procSpy.getErrorMonitor(), procName, procData, expected);
@@ -127,7 +125,7 @@ public class TestProcedureCoordinator {
         TIMEOUT, procName, procData, expected);
     final Procedure procSpy = spy(proc);
 
-    when(coordinator.createProcedure(any(ForeignExceptionDispatcher.class), eq(procName), eq(procData), anyListOf(String.class)))
+    when(coordinator.createProcedure(any(), eq(procName), eq(procData), anyListOf(String.class)))
         .thenReturn(procSpy);
 
     // use the passed controller responses
@@ -139,10 +137,10 @@ public class TestProcedureCoordinator {
     proc = coordinator.startProcedure(proc.getErrorMonitor(), procName, procData, expected);
     // and wait for it to finish
     while(!proc.completedLatch.await(WAKE_FREQUENCY, TimeUnit.MILLISECONDS));
-    verify(procSpy, atLeastOnce()).receive(any(ForeignException.class));
+    verify(procSpy, atLeastOnce()).receive(any());
     verify(coordinator, times(1)).rpcConnectionFailure(anyString(), eq(cause));
     verify(controller, times(1)).sendGlobalBarrierAcquire(procSpy, procData, expected);
-    verify(controller, never()).sendGlobalBarrierReached(any(Procedure.class),
+    verify(controller, never()).sendGlobalBarrierReached(any(),
         anyListOf(String.class));
   }
 
@@ -158,7 +156,7 @@ public class TestProcedureCoordinator {
     final Procedure spy = spy(new Procedure(coordinator,
         WAKE_FREQUENCY, TIMEOUT, procName, procData, expected));
 
-    when(coordinator.createProcedure(any(ForeignExceptionDispatcher.class), eq(procName), eq(procData), anyListOf(String.class)))
+    when(coordinator.createProcedure(any(), eq(procName), eq(procData), anyListOf(String.class)))
     .thenReturn(spy);
 
     // use the passed controller responses
@@ -171,11 +169,11 @@ public class TestProcedureCoordinator {
     Procedure task = coordinator.startProcedure(spy.getErrorMonitor(), procName, procData, expected);
     // and wait for it to finish
     while(!task.completedLatch.await(WAKE_FREQUENCY, TimeUnit.MILLISECONDS));
-    verify(spy, atLeastOnce()).receive(any(ForeignException.class));
+    verify(spy, atLeastOnce()).receive(any());
     verify(coordinator, times(1)).rpcConnectionFailure(anyString(), eq(cause));
     verify(controller, times(1)).sendGlobalBarrierAcquire(eq(spy),
         eq(procData), anyListOf(String.class));
-    verify(controller, times(1)).sendGlobalBarrierReached(any(Procedure.class),
+    verify(controller, times(1)).sendGlobalBarrierReached(any(),
         anyListOf(String.class));
   }
 
@@ -267,7 +265,7 @@ public class TestProcedureCoordinator {
   public void runCoordinatedOperation(Procedure spy, AcquireBarrierAnswer prepareOperation,
       BarrierAnswer commitOperation, String... cohort) throws Exception {
     List<String> expected = Arrays.asList(cohort);
-    when(coordinator.createProcedure(any(ForeignExceptionDispatcher.class), eq(procName), eq(procData), anyListOf(String.class)))
+    when(coordinator.createProcedure(any(), eq(procName), eq(procData), anyListOf(String.class)))
       .thenReturn(spy);
 
     // use the passed controller responses