You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@knox.apache.org by mo...@apache.org on 2017/09/01 13:17:42 UTC

[44/64] [partial] knox git commit: KNOX-998 - Refactoring save 1

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultHaProviderTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultHaProviderTest.java b/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultHaProviderTest.java
deleted file mode 100644
index 87a63f4..0000000
--- a/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultHaProviderTest.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.ha.provider.impl;
-
-import org.apache.hadoop.gateway.ha.provider.HaDescriptor;
-import org.apache.hadoop.gateway.ha.provider.HaProvider;
-import org.junit.Test;
-
-import java.util.ArrayList;
-
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.hamcrest.Matchers.isIn;
-import static org.junit.Assert.*;
-
-public class DefaultHaProviderTest {
-
-   @Test
-   public void testDescriptor() {
-      try {
-         new DefaultHaProvider(null);
-         fail("provider construction should have failed with null descriptor");
-      } catch (IllegalArgumentException e) {
-      }
-      HaDescriptor descriptor = new DefaultHaDescriptor();
-      HaProvider provider = new DefaultHaProvider(descriptor);
-      assertNotNull(provider.getHaDescriptor());
-      descriptor.addServiceConfig(new DefaultHaServiceConfig("foo"));
-      assertTrue(provider.isHaEnabled("foo"));
-   }
-
-   @Test
-   public void testAddingService() {
-      HaDescriptor descriptor = new DefaultHaDescriptor();
-      HaProvider provider = new DefaultHaProvider(descriptor);
-      ArrayList<String> urls = new ArrayList<String>();
-      urls.add("http://host1");
-      urls.add("http://host2");
-      provider.addHaService("foo", urls);
-      assertNull(provider.getActiveURL("bar"));
-      String url = provider.getActiveURL("foo");
-      assertNotNull(url);
-      assertThat(url, isIn(urls));
-   }
-
-   @Test
-   public void testActiveUrl() {
-      HaDescriptor descriptor = new DefaultHaDescriptor();
-      HaProvider provider = new DefaultHaProvider(descriptor);
-      ArrayList<String> urls = new ArrayList<String>();
-      String url1 = "http://host1";
-      urls.add(url1);
-      String url2 = "http://host2";
-      urls.add(url2);
-      String url3 = "http://host3";
-      urls.add(url3);
-      String serviceName = "foo";
-      provider.addHaService(serviceName, urls);
-      assertEquals(url1, provider.getActiveURL(serviceName));
-      provider.markFailedURL(serviceName, url1);
-      assertEquals(url2, provider.getActiveURL(serviceName));
-      provider.markFailedURL(serviceName, url2);
-      assertEquals(url3, provider.getActiveURL(serviceName));
-      provider.markFailedURL(serviceName, url3);
-      assertEquals(url1, provider.getActiveURL(serviceName));
-      provider.setActiveURL(serviceName, url3);
-      assertEquals(url3, provider.getActiveURL(serviceName));
-      provider.setActiveURL(serviceName, url2);
-      assertEquals(url2, provider.getActiveURL(serviceName));
-   }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultURLManagerTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultURLManagerTest.java b/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultURLManagerTest.java
deleted file mode 100644
index 6c07d23..0000000
--- a/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultURLManagerTest.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.ha.provider.impl;
-
-import org.junit.Test;
-
-import java.util.ArrayList;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-public class DefaultURLManagerTest {
-
-   @Test
-   public void testActiveURLManagement() {
-      ArrayList<String> urls = new ArrayList<>();
-      String url1 = "http://host1";
-      urls.add(url1);
-      String url2 = "http://host2";
-      urls.add(url2);
-      DefaultURLManager manager = new DefaultURLManager();
-      manager.setURLs(urls);
-      assertTrue(manager.getURLs().containsAll(urls));
-      assertEquals(url1, manager.getActiveURL());
-      manager.markFailed(url1);
-      assertEquals(url2, manager.getActiveURL());
-      manager.markFailed(url2);
-      assertEquals(url1, manager.getActiveURL());
-   }
-
-   @Test
-   public void testMarkingFailedURL() {
-      ArrayList<String> urls = new ArrayList<>();
-      String url1 = "http://host1:4555";
-      urls.add(url1);
-      String url2 = "http://host2:1234";
-      urls.add(url2);
-      String url3 = "http://host1:1234";
-      urls.add(url3);
-      String url4 = "http://host2:4555";
-      urls.add(url4);
-      DefaultURLManager manager = new DefaultURLManager();
-      manager.setURLs(urls);
-      assertTrue(manager.getURLs().containsAll(urls));
-      assertEquals(url1, manager.getActiveURL());
-      manager.markFailed(url1);
-      assertEquals(url2, manager.getActiveURL());
-      manager.markFailed(url1);
-      assertEquals(url2, manager.getActiveURL());
-      manager.markFailed(url3);
-      assertEquals(url2, manager.getActiveURL());
-      manager.markFailed(url4);
-      assertEquals(url2, manager.getActiveURL());
-      manager.markFailed(url2);
-      assertEquals(url3, manager.getActiveURL());
-   }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/HS2ZookeeperURLManagerTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/HS2ZookeeperURLManagerTest.java b/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/HS2ZookeeperURLManagerTest.java
deleted file mode 100644
index 05c31b6..0000000
--- a/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/HS2ZookeeperURLManagerTest.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.ha.provider.impl;
-
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.CuratorFrameworkFactory;
-import org.apache.curator.retry.ExponentialBackoffRetry;
-import org.apache.curator.test.TestingCluster;
-import org.apache.hadoop.gateway.ha.provider.HaServiceConfig;
-import org.apache.hadoop.gateway.ha.provider.URLManager;
-import org.apache.hadoop.gateway.ha.provider.URLManagerLoader;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-public class HS2ZookeeperURLManagerTest {
-
-  private TestingCluster cluster;
-  private HS2ZookeeperURLManager manager;
-
-  @Before
-  public void setup() throws Exception {
-    cluster = new TestingCluster(3);
-    cluster.start();
-
-    CuratorFramework zooKeeperClient =
-        CuratorFrameworkFactory.builder().connectString(cluster.getConnectString())
-            .retryPolicy(new ExponentialBackoffRetry(1000, 3)).build();
-
-    String host1 = "hive.server2.authentication=NONE;hive.server2.transport.mode=http;hive.server2.thrift.http.path=cliservice;" +
-        "hive.server2.thrift.http.port=10001;hive.server2.thrift.bind.host=host1;hive.server2.use.SSL=true";
-    String host2 = "hive.server2.authentication=NONE;hive.server2.transport.mode=http;hive.server2.thrift.http.path=foobar;" +
-        "hive.server2.thrift.http.port=10002;hive.server2.thrift.bind.host=host2;hive.server2.use.SSL=false";
-    String host3 = "hive.server2.authentication=NONE;hive.server2.transport.mode=http;hive.server2.thrift.http.path=cliservice;" +
-        "hive.server2.thrift.http.port=10003;hive.server2.thrift.bind.host=host3;hive.server2.use.SSL=false";
-    String host4 = "hive.server2.authentication=NONE;hive.server2.transport.mode=http;hive.server2.thrift.http.path=cliservice;" +
-        "hive.server2.thrift.http.port=10004;hive.server2.thrift.bind.host=host4;hive.server2.use.SSL=true";
-    zooKeeperClient.start();
-    zooKeeperClient.create().forPath("/hiveServer2");
-    zooKeeperClient.create().forPath("/hiveServer2/host1", host1.getBytes());
-    zooKeeperClient.create().forPath("/hiveServer2/host2", host2.getBytes());
-    zooKeeperClient.create().forPath("/hiveServer2/host3", host3.getBytes());
-    zooKeeperClient.create().forPath("/hiveServer2/host4", host4.getBytes());
-    zooKeeperClient.close();
-    manager = new HS2ZookeeperURLManager();
-    HaServiceConfig config = new DefaultHaServiceConfig("HIVE");
-    config.setEnabled(true);
-    config.setZookeeperEnsemble(cluster.getConnectString());
-    config.setZookeeperNamespace("hiveServer2");
-    manager.setConfig(config);
-
-  }
-
-  @After
-  public void teardown() throws IOException {
-    cluster.stop();
-  }
-
-  @Test
-  public void testActiveURLManagement() throws Exception {
-    List<String> urls = manager.getURLs();
-    Assert.assertNotNull(urls);
-    String url1 = urls.get( 0 ); //"https://host4:10004/cliservice";
-    String url2 = urls.get( 1 ); //"http://host3:10003/cliservice";
-    String url3 = urls.get( 2 ); //"http://host2:10002/foobar";
-    assertEquals(url1, urls.get(0));
-    assertEquals(url1, manager.getActiveURL());
-    manager.markFailed(url1);
-    assertEquals(url2, manager.getActiveURL());
-    manager.markFailed(url2);
-    assertEquals(url3, manager.getActiveURL());
-  }
-
-  @Test
-  public void testMarkingFailedURL() {
-    List<String> urls = manager.getURLs();
-    String url1 = urls.get(0); //"https://host4:10004/cliservice";
-    urls.add(url1);
-    String url2 = urls.get(1); //"http://host3:10003/cliservice";
-    urls.add(url2);
-    String url3 = urls.get(2); //"http://host2:10002/foobar";
-    urls.add(url3);
-    String url4 = urls.get(3); //"https://host1:10001/cliservice";
-    urls.add(url4);
-    assertEquals(url1, manager.getActiveURL());
-    manager.markFailed(url1);
-    assertEquals(url2, manager.getActiveURL());
-    manager.markFailed(url1);
-    assertEquals(url2, manager.getActiveURL());
-    manager.markFailed(url3);
-    assertEquals(url2, manager.getActiveURL());
-    manager.markFailed(url4);
-    assertEquals(url2, manager.getActiveURL());
-    manager.markFailed(url2);
-    //now the urls should get re-looked up
-    assertEquals(url1, manager.getActiveURL());
-  }
-
-  @Test
-  public void testHS2URLManagerLoading() {
-    HaServiceConfig config = new DefaultHaServiceConfig("HIVE");
-    config.setEnabled(true);
-    config.setZookeeperEnsemble(cluster.getConnectString());
-    config.setZookeeperNamespace("hiveServer2");
-    URLManager manager = URLManagerLoader.loadURLManager(config);
-    Assert.assertNotNull(manager);
-    Assert.assertTrue(manager instanceof HS2ZookeeperURLManager);
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorFactoryTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorFactoryTest.java b/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorFactoryTest.java
deleted file mode 100644
index 55622c1..0000000
--- a/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorFactoryTest.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.ha.provider.impl;
-
-import org.apache.hadoop.gateway.ha.provider.HaServiceConfig;
-import org.junit.Test;
-
-import static org.junit.Assert.*;
-
-public class HaDescriptorFactoryTest {
-
-   @Test
-   public void testCreateDescriptor() {
-      assertNotNull(HaDescriptorFactory.createDescriptor());
-   }
-
-   @Test
-   public void testCreateServiceConfig() {
-      HaServiceConfig serviceConfig = HaDescriptorFactory.createServiceConfig("foo", "enabled=true;maxFailoverAttempts=42;failoverSleep=50;maxRetryAttempts=1;retrySleep=1000");
-      assertNotNull(serviceConfig);
-      assertTrue(serviceConfig.isEnabled());
-      assertEquals("foo", serviceConfig.getServiceName());
-      assertEquals(42, serviceConfig.getMaxFailoverAttempts());
-      assertEquals(50, serviceConfig.getFailoverSleep());
-      assertEquals(1, serviceConfig.getMaxRetryAttempts());
-      assertEquals(1000, serviceConfig.getRetrySleep());
-
-      serviceConfig = HaDescriptorFactory.createServiceConfig("bar", "false", "3", "1000", "5", "3000", null, null);
-      assertNotNull(serviceConfig);
-      assertFalse(serviceConfig.isEnabled());
-      assertEquals("bar", serviceConfig.getServiceName());
-      assertEquals(3, serviceConfig.getMaxFailoverAttempts());
-      assertEquals(1000, serviceConfig.getFailoverSleep());
-      assertEquals(5, serviceConfig.getMaxRetryAttempts());
-      assertEquals(3000, serviceConfig.getRetrySleep());
-
-   }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorManagerTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorManagerTest.java b/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorManagerTest.java
deleted file mode 100644
index a2f423e..0000000
--- a/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorManagerTest.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.ha.provider.impl;
-
-import org.apache.hadoop.gateway.ha.provider.HaDescriptor;
-import org.apache.hadoop.gateway.ha.provider.HaServiceConfig;
-import org.junit.Test;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.StringWriter;
-
-import static org.junit.Assert.*;
-import static org.xmlmatchers.XmlMatchers.hasXPath;
-import static org.xmlmatchers.transform.XmlConverters.the;
-
-public class HaDescriptorManagerTest {
-
-   @Test
-   public void testDescriptorLoad() throws IOException {
-      String xml = "<ha><service name='foo' maxFailoverAttempts='42' failoverSleep='4000' maxRetryAttempts='2' retrySleep='2213' enabled='false'/>" +
-            "<service name='bar' failoverLimit='3' enabled='true'/></ha>";
-      ByteArrayInputStream inputStream = new ByteArrayInputStream(xml.getBytes());
-      HaDescriptor descriptor = HaDescriptorManager.load(inputStream);
-      assertNotNull(descriptor);
-      assertEquals(1, descriptor.getEnabledServiceNames().size());
-      HaServiceConfig config = descriptor.getServiceConfig("foo");
-      assertNotNull(config);
-      assertEquals("foo", config.getServiceName());
-      assertEquals(42, config.getMaxFailoverAttempts());
-      assertEquals(4000, config.getFailoverSleep());
-      assertEquals(2, config.getMaxRetryAttempts());
-      assertEquals(2213, config.getRetrySleep());
-      assertFalse(config.isEnabled());
-      config = descriptor.getServiceConfig("bar");
-      assertTrue(config.isEnabled());
-   }
-
-   @Test
-   public void testDescriptorDefaults() throws IOException {
-      String xml = "<ha><service name='foo'/></ha>";
-      ByteArrayInputStream inputStream = new ByteArrayInputStream(xml.getBytes());
-      HaDescriptor descriptor = HaDescriptorManager.load(inputStream);
-      assertNotNull(descriptor);
-      assertEquals(1, descriptor.getEnabledServiceNames().size());
-      HaServiceConfig config = descriptor.getServiceConfig("foo");
-      assertNotNull(config);
-      assertEquals("foo", config.getServiceName());
-      assertEquals(HaServiceConfigConstants.DEFAULT_MAX_FAILOVER_ATTEMPTS, config.getMaxFailoverAttempts());
-      assertEquals(HaServiceConfigConstants.DEFAULT_FAILOVER_SLEEP, config.getFailoverSleep());
-      assertEquals(HaServiceConfigConstants.DEFAULT_MAX_RETRY_ATTEMPTS, config.getMaxRetryAttempts());
-      assertEquals(HaServiceConfigConstants.DEFAULT_RETRY_SLEEP, config.getRetrySleep());
-      assertEquals(HaServiceConfigConstants.DEFAULT_ENABLED, config.isEnabled());
-   }
-
-   @Test
-   public void testDescriptorStoring() throws IOException {
-      HaDescriptor descriptor = HaDescriptorFactory.createDescriptor();
-      descriptor.addServiceConfig(HaDescriptorFactory.createServiceConfig("foo", "false", "42", "1000", "3", "3000", "foo:2181,bar:2181", "hiveserver2"));
-      descriptor.addServiceConfig(HaDescriptorFactory.createServiceConfig("bar", "true", "3", "5000", "5", "8000", null, null));
-      StringWriter writer = new StringWriter();
-      HaDescriptorManager.store(descriptor, writer);
-      String descriptorXml = writer.toString();
-      String xml = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n" +
-            "<ha>\n" +
-            "  <service enabled=\"false\" failoverSleep=\"1000\" maxFailoverAttempts=\"42\" maxRetryAttempts=\"3\" name=\"foo\" retrySleep=\"3000\" zookeeperEnsemble=\"foo:2181,bar:2181\" zookeeperNamespace=\"hiveserver2\"/>\n" +
-            "  <service enabled=\"true\" failoverSleep=\"5000\" maxFailoverAttempts=\"3\" maxRetryAttempts=\"5\" name=\"bar\" retrySleep=\"8000\"/>\n" +
-            "</ha>\n";
-      assertThat( the( xml ), hasXPath( "/ha/service[@enabled='false' and @failoverSleep='1000' and @maxFailoverAttempts='42' and @maxRetryAttempts='3' and @name='foo' and @retrySleep='3000' and @zookeeperEnsemble='foo:2181,bar:2181' and @zookeeperNamespace='hiveserver2']" ) );
-      assertThat( the( xml ), hasXPath( "/ha/service[@enabled='true' and @failoverSleep='5000' and @maxFailoverAttempts='3' and @maxRetryAttempts='5' and @name='bar' and @retrySleep='8000']" ) );
-   }
-
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/MockURLManager.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/MockURLManager.java b/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/MockURLManager.java
deleted file mode 100644
index f11785f..0000000
--- a/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/MockURLManager.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.ha.provider.impl;
-
-import org.apache.hadoop.gateway.ha.provider.HaServiceConfig;
-
-public class MockURLManager extends DefaultURLManager {
-
-  HaServiceConfig config;
-
-  @Override
-  public boolean supportsConfig(HaServiceConfig config) {
-    return config.getServiceName().equalsIgnoreCase("mock-test");
-  }
-
-  @Override
-  public void setConfig(HaServiceConfig config) {
-    this.config = config;
-    super.setConfig(config);
-  }
-
-  public HaServiceConfig getConfig() {
-    return config;
-  }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/URLManagerLoaderTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/URLManagerLoaderTest.java b/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/URLManagerLoaderTest.java
deleted file mode 100644
index 6aadfec..0000000
--- a/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/URLManagerLoaderTest.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.ha.provider.impl;
-
-import org.apache.hadoop.gateway.ha.provider.URLManager;
-import org.apache.hadoop.gateway.ha.provider.URLManagerLoader;
-import org.junit.Assert;
-import org.junit.Test;
-
-public class URLManagerLoaderTest {
-
-  @Test
-  public void testURLManagerLoader() {
-    DefaultHaServiceConfig serviceConfig = new DefaultHaServiceConfig("mock-test");
-    URLManager manager = URLManagerLoader.loadURLManager(serviceConfig);
-    Assert.assertNotNull(manager);
-    Assert.assertTrue(manager instanceof MockURLManager);
-    Assert.assertNotNull(((MockURLManager) manager).getConfig());
-    Assert.assertEquals("mock-test", ((MockURLManager) manager).getConfig().getServiceName());
-  }
-
-  @Test
-  public void testDefaultURLManager() {
-    DefaultHaServiceConfig serviceConfig = new DefaultHaServiceConfig("nothing like this exists");
-    URLManager manager = URLManagerLoader.loadURLManager(serviceConfig);
-    Assert.assertNotNull(manager);
-    Assert.assertTrue(manager instanceof DefaultURLManager);
-    manager = URLManagerLoader.loadURLManager(null);
-    Assert.assertNotNull(manager);
-    Assert.assertTrue(manager instanceof DefaultURLManager);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/deploy/HaProviderDeploymentContributorTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/deploy/HaProviderDeploymentContributorTest.java b/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/deploy/HaProviderDeploymentContributorTest.java
new file mode 100644
index 0000000..f28baaf
--- /dev/null
+++ b/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/deploy/HaProviderDeploymentContributorTest.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.ha.deploy;
+
+import org.apache.knox.gateway.deploy.ProviderDeploymentContributor;
+import org.junit.Test;
+
+import java.util.Iterator;
+import java.util.ServiceLoader;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.junit.Assert.fail;
+
+
+public class HaProviderDeploymentContributorTest {
+
+   @Test
+   public void testServiceLoader() throws Exception {
+      ServiceLoader loader = ServiceLoader.load( ProviderDeploymentContributor.class );
+      Iterator iterator = loader.iterator();
+      assertThat( "Service iterator empty.", iterator.hasNext() );
+      while( iterator.hasNext() ) {
+         Object object = iterator.next();
+         if( object instanceof HaProviderDeploymentContributor ) {
+            return;
+         }
+      }
+      fail( "Failed to find " + HaProviderDeploymentContributor.class.getName() + " via service loader." );
+   }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/dispatch/DefaultHaDispatchTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/dispatch/DefaultHaDispatchTest.java b/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/dispatch/DefaultHaDispatchTest.java
new file mode 100644
index 0000000..0f19e79
--- /dev/null
+++ b/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/dispatch/DefaultHaDispatchTest.java
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.ha.dispatch;
+
+import org.apache.knox.gateway.ha.provider.HaDescriptor;
+import org.apache.knox.gateway.ha.provider.HaProvider;
+import org.apache.knox.gateway.ha.provider.HaServletContextListener;
+import org.apache.knox.gateway.ha.provider.impl.DefaultHaProvider;
+import org.apache.knox.gateway.ha.provider.impl.HaDescriptorFactory;
+import org.apache.knox.gateway.servlet.SynchronousServletOutputStreamAdapter;
+import org.apache.http.client.methods.HttpRequestBase;
+import org.apache.http.client.methods.HttpUriRequest;
+import org.apache.http.impl.client.DefaultHttpClient;
+import org.apache.http.params.BasicHttpParams;
+import org.easymock.EasyMock;
+import org.easymock.IAnswer;
+import org.junit.Assert;
+import org.junit.Test;
+
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.concurrent.atomic.AtomicInteger;
+
+public class DefaultHaDispatchTest {
+
+  @Test
+  public void testConnectivityFailover() throws Exception {
+    String serviceName = "OOZIE";
+    HaDescriptor descriptor = HaDescriptorFactory.createDescriptor();
+    descriptor.addServiceConfig(HaDescriptorFactory.createServiceConfig(serviceName, "true", "1", "1000", "2", "1000", null, null));
+    HaProvider provider = new DefaultHaProvider(descriptor);
+    URI uri1 = new URI( "http://unreachable-host" );
+    URI uri2 = new URI( "http://reachable-host" );
+    ArrayList<String> urlList = new ArrayList<String>();
+    urlList.add(uri1.toString());
+    urlList.add(uri2.toString());
+    provider.addHaService(serviceName, urlList);
+    FilterConfig filterConfig = EasyMock.createNiceMock(FilterConfig.class);
+    ServletContext servletContext = EasyMock.createNiceMock(ServletContext.class);
+
+    EasyMock.expect(filterConfig.getServletContext()).andReturn(servletContext).anyTimes();
+    EasyMock.expect(servletContext.getAttribute(HaServletContextListener.PROVIDER_ATTRIBUTE_NAME)).andReturn(provider).anyTimes();
+
+    BasicHttpParams params = new BasicHttpParams();
+
+    HttpUriRequest outboundRequest = EasyMock.createNiceMock(HttpRequestBase.class);
+    EasyMock.expect(outboundRequest.getMethod()).andReturn( "GET" ).anyTimes();
+    EasyMock.expect(outboundRequest.getURI()).andReturn( uri1  ).anyTimes();
+    EasyMock.expect(outboundRequest.getParams()).andReturn( params ).anyTimes();
+
+    HttpServletRequest inboundRequest = EasyMock.createNiceMock(HttpServletRequest.class);
+    EasyMock.expect(inboundRequest.getRequestURL()).andReturn( new StringBuffer(uri2.toString()) ).once();
+    EasyMock.expect(inboundRequest.getAttribute("dispatch.ha.failover.counter")).andReturn(new AtomicInteger(0)).once();
+    EasyMock.expect(inboundRequest.getAttribute("dispatch.ha.failover.counter")).andReturn(new AtomicInteger(1)).once();
+
+    HttpServletResponse outboundResponse = EasyMock.createNiceMock(HttpServletResponse.class);
+    EasyMock.expect(outboundResponse.getOutputStream()).andAnswer( new IAnswer<SynchronousServletOutputStreamAdapter>() {
+      @Override
+      public SynchronousServletOutputStreamAdapter answer() throws Throwable {
+        return new SynchronousServletOutputStreamAdapter() {
+          @Override
+          public void write( int b ) throws IOException {
+            throw new IOException( "unreachable-host" );
+          }
+        };
+      }
+    }).once();
+    EasyMock.replay(filterConfig, servletContext, outboundRequest, inboundRequest, outboundResponse);
+    Assert.assertEquals(uri1.toString(), provider.getActiveURL(serviceName));
+    DefaultHaDispatch dispatch = new DefaultHaDispatch();
+    dispatch.setHttpClient(new DefaultHttpClient());
+    dispatch.setHaProvider(provider);
+    dispatch.setServiceRole(serviceName);
+    dispatch.init();
+    long startTime = System.currentTimeMillis();
+    try {
+      dispatch.executeRequest(outboundRequest, inboundRequest, outboundResponse);
+    } catch (IOException e) {
+      //this is expected after the failover limit is reached
+    }
+    long elapsedTime = System.currentTimeMillis() - startTime;
+    Assert.assertEquals(uri2.toString(), provider.getActiveURL(serviceName));
+    //test to make sure the sleep took place
+    Assert.assertTrue(elapsedTime > 1000);
+  }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/DefaultHaProviderTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/DefaultHaProviderTest.java b/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/DefaultHaProviderTest.java
new file mode 100644
index 0000000..90c03e5
--- /dev/null
+++ b/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/DefaultHaProviderTest.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.ha.provider.impl;
+
+import org.apache.knox.gateway.ha.provider.HaDescriptor;
+import org.apache.knox.gateway.ha.provider.HaProvider;
+import org.junit.Test;
+
+import java.util.ArrayList;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.isIn;
+import static org.junit.Assert.*;
+
+public class DefaultHaProviderTest {
+
+   @Test
+   public void testDescriptor() {
+      try {
+         new DefaultHaProvider(null);
+         fail("provider construction should have failed with null descriptor");
+      } catch (IllegalArgumentException e) {
+      }
+      HaDescriptor descriptor = new DefaultHaDescriptor();
+      HaProvider provider = new DefaultHaProvider(descriptor);
+      assertNotNull(provider.getHaDescriptor());
+      descriptor.addServiceConfig(new DefaultHaServiceConfig("foo"));
+      assertTrue(provider.isHaEnabled("foo"));
+   }
+
+   @Test
+   public void testAddingService() {
+      HaDescriptor descriptor = new DefaultHaDescriptor();
+      HaProvider provider = new DefaultHaProvider(descriptor);
+      ArrayList<String> urls = new ArrayList<String>();
+      urls.add("http://host1");
+      urls.add("http://host2");
+      provider.addHaService("foo", urls);
+      assertNull(provider.getActiveURL("bar"));
+      String url = provider.getActiveURL("foo");
+      assertNotNull(url);
+      assertThat(url, isIn(urls));
+   }
+
+   @Test
+   public void testActiveUrl() {
+      HaDescriptor descriptor = new DefaultHaDescriptor();
+      HaProvider provider = new DefaultHaProvider(descriptor);
+      ArrayList<String> urls = new ArrayList<String>();
+      String url1 = "http://host1";
+      urls.add(url1);
+      String url2 = "http://host2";
+      urls.add(url2);
+      String url3 = "http://host3";
+      urls.add(url3);
+      String serviceName = "foo";
+      provider.addHaService(serviceName, urls);
+      assertEquals(url1, provider.getActiveURL(serviceName));
+      provider.markFailedURL(serviceName, url1);
+      assertEquals(url2, provider.getActiveURL(serviceName));
+      provider.markFailedURL(serviceName, url2);
+      assertEquals(url3, provider.getActiveURL(serviceName));
+      provider.markFailedURL(serviceName, url3);
+      assertEquals(url1, provider.getActiveURL(serviceName));
+      provider.setActiveURL(serviceName, url3);
+      assertEquals(url3, provider.getActiveURL(serviceName));
+      provider.setActiveURL(serviceName, url2);
+      assertEquals(url2, provider.getActiveURL(serviceName));
+   }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/DefaultURLManagerTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/DefaultURLManagerTest.java b/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/DefaultURLManagerTest.java
new file mode 100644
index 0000000..c8b6c58
--- /dev/null
+++ b/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/DefaultURLManagerTest.java
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.ha.provider.impl;
+
+import org.junit.Test;
+
+import java.util.ArrayList;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+public class DefaultURLManagerTest {
+
+   @Test
+   public void testActiveURLManagement() {
+      ArrayList<String> urls = new ArrayList<>();
+      String url1 = "http://host1";
+      urls.add(url1);
+      String url2 = "http://host2";
+      urls.add(url2);
+      DefaultURLManager manager = new DefaultURLManager();
+      manager.setURLs(urls);
+      assertTrue(manager.getURLs().containsAll(urls));
+      assertEquals(url1, manager.getActiveURL());
+      manager.markFailed(url1);
+      assertEquals(url2, manager.getActiveURL());
+      manager.markFailed(url2);
+      assertEquals(url1, manager.getActiveURL());
+   }
+
+   @Test
+   public void testMarkingFailedURL() {
+      ArrayList<String> urls = new ArrayList<>();
+      String url1 = "http://host1:4555";
+      urls.add(url1);
+      String url2 = "http://host2:1234";
+      urls.add(url2);
+      String url3 = "http://host1:1234";
+      urls.add(url3);
+      String url4 = "http://host2:4555";
+      urls.add(url4);
+      DefaultURLManager manager = new DefaultURLManager();
+      manager.setURLs(urls);
+      assertTrue(manager.getURLs().containsAll(urls));
+      assertEquals(url1, manager.getActiveURL());
+      manager.markFailed(url1);
+      assertEquals(url2, manager.getActiveURL());
+      manager.markFailed(url1);
+      assertEquals(url2, manager.getActiveURL());
+      manager.markFailed(url3);
+      assertEquals(url2, manager.getActiveURL());
+      manager.markFailed(url4);
+      assertEquals(url2, manager.getActiveURL());
+      manager.markFailed(url2);
+      assertEquals(url3, manager.getActiveURL());
+   }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/HS2ZookeeperURLManagerTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/HS2ZookeeperURLManagerTest.java b/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/HS2ZookeeperURLManagerTest.java
new file mode 100644
index 0000000..53f1e5e
--- /dev/null
+++ b/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/HS2ZookeeperURLManagerTest.java
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.ha.provider.impl;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.apache.curator.test.TestingCluster;
+import org.apache.knox.gateway.ha.provider.HaServiceConfig;
+import org.apache.knox.gateway.ha.provider.URLManager;
+import org.apache.knox.gateway.ha.provider.URLManagerLoader;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+public class HS2ZookeeperURLManagerTest {
+
+  private TestingCluster cluster;
+  private HS2ZookeeperURLManager manager;
+
+  @Before
+  public void setup() throws Exception {
+    cluster = new TestingCluster(3);
+    cluster.start();
+
+    CuratorFramework zooKeeperClient =
+        CuratorFrameworkFactory.builder().connectString(cluster.getConnectString())
+            .retryPolicy(new ExponentialBackoffRetry(1000, 3)).build();
+
+    String host1 = "hive.server2.authentication=NONE;hive.server2.transport.mode=http;hive.server2.thrift.http.path=cliservice;" +
+        "hive.server2.thrift.http.port=10001;hive.server2.thrift.bind.host=host1;hive.server2.use.SSL=true";
+    String host2 = "hive.server2.authentication=NONE;hive.server2.transport.mode=http;hive.server2.thrift.http.path=foobar;" +
+        "hive.server2.thrift.http.port=10002;hive.server2.thrift.bind.host=host2;hive.server2.use.SSL=false";
+    String host3 = "hive.server2.authentication=NONE;hive.server2.transport.mode=http;hive.server2.thrift.http.path=cliservice;" +
+        "hive.server2.thrift.http.port=10003;hive.server2.thrift.bind.host=host3;hive.server2.use.SSL=false";
+    String host4 = "hive.server2.authentication=NONE;hive.server2.transport.mode=http;hive.server2.thrift.http.path=cliservice;" +
+        "hive.server2.thrift.http.port=10004;hive.server2.thrift.bind.host=host4;hive.server2.use.SSL=true";
+    zooKeeperClient.start();
+    zooKeeperClient.create().forPath("/hiveServer2");
+    zooKeeperClient.create().forPath("/hiveServer2/host1", host1.getBytes());
+    zooKeeperClient.create().forPath("/hiveServer2/host2", host2.getBytes());
+    zooKeeperClient.create().forPath("/hiveServer2/host3", host3.getBytes());
+    zooKeeperClient.create().forPath("/hiveServer2/host4", host4.getBytes());
+    zooKeeperClient.close();
+    manager = new HS2ZookeeperURLManager();
+    HaServiceConfig config = new DefaultHaServiceConfig("HIVE");
+    config.setEnabled(true);
+    config.setZookeeperEnsemble(cluster.getConnectString());
+    config.setZookeeperNamespace("hiveServer2");
+    manager.setConfig(config);
+
+  }
+
+  @After
+  public void teardown() throws IOException {
+    cluster.stop();
+  }
+
+  @Test
+  public void testActiveURLManagement() throws Exception {
+    List<String> urls = manager.getURLs();
+    Assert.assertNotNull(urls);
+    String url1 = urls.get( 0 ); //"https://host4:10004/cliservice";
+    String url2 = urls.get( 1 ); //"http://host3:10003/cliservice";
+    String url3 = urls.get( 2 ); //"http://host2:10002/foobar";
+    assertEquals(url1, urls.get(0));
+    assertEquals(url1, manager.getActiveURL());
+    manager.markFailed(url1);
+    assertEquals(url2, manager.getActiveURL());
+    manager.markFailed(url2);
+    assertEquals(url3, manager.getActiveURL());
+  }
+
+  @Test
+  public void testMarkingFailedURL() {
+    List<String> urls = manager.getURLs();
+    String url1 = urls.get(0); //"https://host4:10004/cliservice";
+    urls.add(url1);
+    String url2 = urls.get(1); //"http://host3:10003/cliservice";
+    urls.add(url2);
+    String url3 = urls.get(2); //"http://host2:10002/foobar";
+    urls.add(url3);
+    String url4 = urls.get(3); //"https://host1:10001/cliservice";
+    urls.add(url4);
+    assertEquals(url1, manager.getActiveURL());
+    manager.markFailed(url1);
+    assertEquals(url2, manager.getActiveURL());
+    manager.markFailed(url1);
+    assertEquals(url2, manager.getActiveURL());
+    manager.markFailed(url3);
+    assertEquals(url2, manager.getActiveURL());
+    manager.markFailed(url4);
+    assertEquals(url2, manager.getActiveURL());
+    manager.markFailed(url2);
+    //now the urls should get re-looked up
+    assertEquals(url1, manager.getActiveURL());
+  }
+
+  @Test
+  public void testHS2URLManagerLoading() {
+    HaServiceConfig config = new DefaultHaServiceConfig("HIVE");
+    config.setEnabled(true);
+    config.setZookeeperEnsemble(cluster.getConnectString());
+    config.setZookeeperNamespace("hiveServer2");
+    URLManager manager = URLManagerLoader.loadURLManager(config);
+    Assert.assertNotNull(manager);
+    Assert.assertTrue(manager instanceof HS2ZookeeperURLManager);
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/HaDescriptorFactoryTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/HaDescriptorFactoryTest.java b/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/HaDescriptorFactoryTest.java
new file mode 100644
index 0000000..bf306e9
--- /dev/null
+++ b/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/HaDescriptorFactoryTest.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.ha.provider.impl;
+
+import org.apache.knox.gateway.ha.provider.HaServiceConfig;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class HaDescriptorFactoryTest {
+
+   @Test
+   public void testCreateDescriptor() {
+      assertNotNull(HaDescriptorFactory.createDescriptor());
+   }
+
+   @Test
+   public void testCreateServiceConfig() {
+      HaServiceConfig serviceConfig = HaDescriptorFactory.createServiceConfig("foo", "enabled=true;maxFailoverAttempts=42;failoverSleep=50;maxRetryAttempts=1;retrySleep=1000");
+      assertNotNull(serviceConfig);
+      assertTrue(serviceConfig.isEnabled());
+      assertEquals("foo", serviceConfig.getServiceName());
+      assertEquals(42, serviceConfig.getMaxFailoverAttempts());
+      assertEquals(50, serviceConfig.getFailoverSleep());
+      assertEquals(1, serviceConfig.getMaxRetryAttempts());
+      assertEquals(1000, serviceConfig.getRetrySleep());
+
+      serviceConfig = HaDescriptorFactory.createServiceConfig("bar", "false", "3", "1000", "5", "3000", null, null);
+      assertNotNull(serviceConfig);
+      assertFalse(serviceConfig.isEnabled());
+      assertEquals("bar", serviceConfig.getServiceName());
+      assertEquals(3, serviceConfig.getMaxFailoverAttempts());
+      assertEquals(1000, serviceConfig.getFailoverSleep());
+      assertEquals(5, serviceConfig.getMaxRetryAttempts());
+      assertEquals(3000, serviceConfig.getRetrySleep());
+
+   }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/HaDescriptorManagerTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/HaDescriptorManagerTest.java b/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/HaDescriptorManagerTest.java
new file mode 100644
index 0000000..c9ed2c7
--- /dev/null
+++ b/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/HaDescriptorManagerTest.java
@@ -0,0 +1,89 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.ha.provider.impl;
+
+import org.apache.knox.gateway.ha.provider.HaDescriptor;
+import org.apache.knox.gateway.ha.provider.HaServiceConfig;
+import org.junit.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.StringWriter;
+
+import static org.junit.Assert.*;
+import static org.xmlmatchers.XmlMatchers.hasXPath;
+import static org.xmlmatchers.transform.XmlConverters.the;
+
+public class HaDescriptorManagerTest {
+
+   @Test
+   public void testDescriptorLoad() throws IOException {
+      String xml = "<ha><service name='foo' maxFailoverAttempts='42' failoverSleep='4000' maxRetryAttempts='2' retrySleep='2213' enabled='false'/>" +
+            "<service name='bar' failoverLimit='3' enabled='true'/></ha>";
+      ByteArrayInputStream inputStream = new ByteArrayInputStream(xml.getBytes());
+      HaDescriptor descriptor = HaDescriptorManager.load(inputStream);
+      assertNotNull(descriptor);
+      assertEquals(1, descriptor.getEnabledServiceNames().size());
+      HaServiceConfig config = descriptor.getServiceConfig("foo");
+      assertNotNull(config);
+      assertEquals("foo", config.getServiceName());
+      assertEquals(42, config.getMaxFailoverAttempts());
+      assertEquals(4000, config.getFailoverSleep());
+      assertEquals(2, config.getMaxRetryAttempts());
+      assertEquals(2213, config.getRetrySleep());
+      assertFalse(config.isEnabled());
+      config = descriptor.getServiceConfig("bar");
+      assertTrue(config.isEnabled());
+   }
+
+   @Test
+   public void testDescriptorDefaults() throws IOException {
+      String xml = "<ha><service name='foo'/></ha>";
+      ByteArrayInputStream inputStream = new ByteArrayInputStream(xml.getBytes());
+      HaDescriptor descriptor = HaDescriptorManager.load(inputStream);
+      assertNotNull(descriptor);
+      assertEquals(1, descriptor.getEnabledServiceNames().size());
+      HaServiceConfig config = descriptor.getServiceConfig("foo");
+      assertNotNull(config);
+      assertEquals("foo", config.getServiceName());
+      assertEquals(HaServiceConfigConstants.DEFAULT_MAX_FAILOVER_ATTEMPTS, config.getMaxFailoverAttempts());
+      assertEquals(HaServiceConfigConstants.DEFAULT_FAILOVER_SLEEP, config.getFailoverSleep());
+      assertEquals(HaServiceConfigConstants.DEFAULT_MAX_RETRY_ATTEMPTS, config.getMaxRetryAttempts());
+      assertEquals(HaServiceConfigConstants.DEFAULT_RETRY_SLEEP, config.getRetrySleep());
+      assertEquals(HaServiceConfigConstants.DEFAULT_ENABLED, config.isEnabled());
+   }
+
+   @Test
+   public void testDescriptorStoring() throws IOException {
+      HaDescriptor descriptor = HaDescriptorFactory.createDescriptor();
+      descriptor.addServiceConfig(HaDescriptorFactory.createServiceConfig("foo", "false", "42", "1000", "3", "3000", "foo:2181,bar:2181", "hiveserver2"));
+      descriptor.addServiceConfig(HaDescriptorFactory.createServiceConfig("bar", "true", "3", "5000", "5", "8000", null, null));
+      StringWriter writer = new StringWriter();
+      HaDescriptorManager.store(descriptor, writer);
+      String descriptorXml = writer.toString();
+      String xml = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n" +
+            "<ha>\n" +
+            "  <service enabled=\"false\" failoverSleep=\"1000\" maxFailoverAttempts=\"42\" maxRetryAttempts=\"3\" name=\"foo\" retrySleep=\"3000\" zookeeperEnsemble=\"foo:2181,bar:2181\" zookeeperNamespace=\"hiveserver2\"/>\n" +
+            "  <service enabled=\"true\" failoverSleep=\"5000\" maxFailoverAttempts=\"3\" maxRetryAttempts=\"5\" name=\"bar\" retrySleep=\"8000\"/>\n" +
+            "</ha>\n";
+      assertThat( the( xml ), hasXPath( "/ha/service[@enabled='false' and @failoverSleep='1000' and @maxFailoverAttempts='42' and @maxRetryAttempts='3' and @name='foo' and @retrySleep='3000' and @zookeeperEnsemble='foo:2181,bar:2181' and @zookeeperNamespace='hiveserver2']" ) );
+      assertThat( the( xml ), hasXPath( "/ha/service[@enabled='true' and @failoverSleep='5000' and @maxFailoverAttempts='3' and @maxRetryAttempts='5' and @name='bar' and @retrySleep='8000']" ) );
+   }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/MockURLManager.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/MockURLManager.java b/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/MockURLManager.java
new file mode 100644
index 0000000..f10eb26
--- /dev/null
+++ b/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/MockURLManager.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.ha.provider.impl;
+
+import org.apache.knox.gateway.ha.provider.HaServiceConfig;
+
+public class MockURLManager extends DefaultURLManager {
+
+  HaServiceConfig config;
+
+  @Override
+  public boolean supportsConfig(HaServiceConfig config) {
+    return config.getServiceName().equalsIgnoreCase("mock-test");
+  }
+
+  @Override
+  public void setConfig(HaServiceConfig config) {
+    this.config = config;
+    super.setConfig(config);
+  }
+
+  public HaServiceConfig getConfig() {
+    return config;
+  }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/URLManagerLoaderTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/URLManagerLoaderTest.java b/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/URLManagerLoaderTest.java
new file mode 100644
index 0000000..6dc014a
--- /dev/null
+++ b/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/provider/impl/URLManagerLoaderTest.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.ha.provider.impl;
+
+import org.apache.knox.gateway.ha.provider.URLManager;
+import org.apache.knox.gateway.ha.provider.URLManagerLoader;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class URLManagerLoaderTest {
+
+  @Test
+  public void testURLManagerLoader() {
+    DefaultHaServiceConfig serviceConfig = new DefaultHaServiceConfig("mock-test");
+    URLManager manager = URLManagerLoader.loadURLManager(serviceConfig);
+    Assert.assertNotNull(manager);
+    Assert.assertTrue(manager instanceof MockURLManager);
+    Assert.assertNotNull(((MockURLManager) manager).getConfig());
+    Assert.assertEquals("mock-test", ((MockURLManager) manager).getConfig().getServiceName());
+  }
+
+  @Test
+  public void testDefaultURLManager() {
+    DefaultHaServiceConfig serviceConfig = new DefaultHaServiceConfig("nothing like this exists");
+    URLManager manager = URLManagerLoader.loadURLManager(serviceConfig);
+    Assert.assertNotNull(manager);
+    Assert.assertTrue(manager instanceof DefaultURLManager);
+    manager = URLManagerLoader.loadURLManager(null);
+    Assert.assertNotNull(manager);
+    Assert.assertTrue(manager instanceof DefaultURLManager);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-ha/src/test/resources/META-INF/services/org.apache.hadoop.gateway.ha.provider.URLManager
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/resources/META-INF/services/org.apache.hadoop.gateway.ha.provider.URLManager b/gateway-provider-ha/src/test/resources/META-INF/services/org.apache.hadoop.gateway.ha.provider.URLManager
deleted file mode 100644
index ea210ce..0000000
--- a/gateway-provider-ha/src/test/resources/META-INF/services/org.apache.hadoop.gateway.ha.provider.URLManager
+++ /dev/null
@@ -1,19 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.hadoop.gateway.ha.provider.impl.MockURLManager
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-ha/src/test/resources/META-INF/services/org.apache.knox.gateway.ha.provider.URLManager
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/resources/META-INF/services/org.apache.knox.gateway.ha.provider.URLManager b/gateway-provider-ha/src/test/resources/META-INF/services/org.apache.knox.gateway.ha.provider.URLManager
new file mode 100644
index 0000000..4ccaa33
--- /dev/null
+++ b/gateway-provider-ha/src/test/resources/META-INF/services/org.apache.knox.gateway.ha.provider.URLManager
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.knox.gateway.ha.provider.impl.MockURLManager
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/IdentityAsserterMessages.java
----------------------------------------------------------------------
diff --git a/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/IdentityAsserterMessages.java b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/IdentityAsserterMessages.java
deleted file mode 100644
index c4ada6b..0000000
--- a/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/IdentityAsserterMessages.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway;
-
-import org.apache.hadoop.gateway.i18n.messages.Message;
-import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
-import org.apache.hadoop.gateway.i18n.messages.Messages;
-import org.apache.hadoop.gateway.i18n.messages.StackTrace;
-
-@Messages(logger="org.apache.hadoop.gateway")
-public interface IdentityAsserterMessages {
-
-  @Message( level = MessageLevel.WARN, text = "Skipping unencodable parameter {0}={1}, {2}: {3}" )
-  void skippingUnencodableParameter( String name, String value, String encoding, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/AbstractIdentityAsserterDeploymentContributor.java
----------------------------------------------------------------------
diff --git a/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/AbstractIdentityAsserterDeploymentContributor.java b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/AbstractIdentityAsserterDeploymentContributor.java
deleted file mode 100644
index 15ae296..0000000
--- a/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/AbstractIdentityAsserterDeploymentContributor.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.identityasserter.common.filter;
-
-import org.apache.hadoop.gateway.deploy.DeploymentContext;
-import org.apache.hadoop.gateway.deploy.ProviderDeploymentContributorBase;
-import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
-import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
-import org.apache.hadoop.gateway.topology.Provider;
-import org.apache.hadoop.gateway.topology.Service;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-public abstract class AbstractIdentityAsserterDeploymentContributor extends ProviderDeploymentContributorBase {
-
-  @Override
-  public String getRole() {
-    return "identity-assertion";
-  }
-
-  @Override
-  public void contributeFilter( DeploymentContext context, Provider provider, Service service, 
-      ResourceDescriptor resource, List<FilterParamDescriptor> params ) {
-    params = buildFilterInitParms(provider, resource, params);
-    resource.addFilter().name(getName()).role(getRole()).impl(getFilterClassname()).params(params);
-  }
-
-  public List<FilterParamDescriptor> buildFilterInitParms(Provider provider,
-      ResourceDescriptor resource, List<FilterParamDescriptor> params) {
-    // blindly add all the provider params as filter init params
-    if (params == null) {
-      params = new ArrayList<FilterParamDescriptor>();
-    }
-    Map<String, String> providerParams = provider.getParams();
-    for(Entry<String, String> entry : providerParams.entrySet()) {
-      params.add( resource.createFilterParam().name(entry.getKey().toLowerCase()).value(entry.getValue()));
-    }
-    return params;
-  }
-
-  protected abstract String getFilterClassname();
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/AbstractIdentityAssertionFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/AbstractIdentityAssertionFilter.java b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/AbstractIdentityAssertionFilter.java
deleted file mode 100644
index 7575b36..0000000
--- a/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/AbstractIdentityAssertionFilter.java
+++ /dev/null
@@ -1,198 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.identityasserter.common.filter;
-
-import java.io.IOException;
-import java.security.AccessController;
-import java.security.Principal;
-import java.security.PrivilegedActionException;
-import java.security.PrivilegedExceptionAction;
-import java.util.Arrays;
-import java.util.Set;
-
-import javax.security.auth.Subject;
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletRequestWrapper;
-
-import org.apache.hadoop.gateway.audit.api.Action;
-import org.apache.hadoop.gateway.audit.api.ActionOutcome;
-import org.apache.hadoop.gateway.audit.api.AuditService;
-import org.apache.hadoop.gateway.audit.api.AuditServiceFactory;
-import org.apache.hadoop.gateway.audit.api.Auditor;
-import org.apache.hadoop.gateway.audit.api.ResourceType;
-import org.apache.hadoop.gateway.audit.log4j.audit.AuditConstants;
-import org.apache.hadoop.gateway.filter.security.AbstractIdentityAssertionBase;
-import org.apache.hadoop.gateway.i18n.GatewaySpiResources;
-import org.apache.hadoop.gateway.i18n.resources.ResourcesFactory;
-import org.apache.hadoop.gateway.security.GroupPrincipal;
-import org.apache.hadoop.gateway.security.ImpersonatedPrincipal;
-import org.apache.hadoop.gateway.security.PrimaryPrincipal;
-
-/**
- *
- */
-public abstract class AbstractIdentityAssertionFilter extends
-  AbstractIdentityAssertionBase implements Filter {
-
-  private static final GatewaySpiResources RES = ResourcesFactory.get( GatewaySpiResources.class );
-  private static AuditService auditService = AuditServiceFactory.getAuditService();
-  private static Auditor auditor = auditService.getAuditor(
-        AuditConstants.DEFAULT_AUDITOR_NAME, AuditConstants.KNOX_SERVICE_NAME,
-        AuditConstants.KNOX_COMPONENT_NAME );
-
-  /**
-   * 
-   */
-  public AbstractIdentityAssertionFilter() {
-    super();
-  }
-
-  /**
-   * This method returns a Stringp[] of new group principal names to use
-   * based on implementation specific mapping or lookup mechanisms.
-   * Returning null means that whatever set of GroupPrincipals is in the
-   * provided Subject is sufficient to use and no additional mapping is required.
-   * @param mappedPrincipalName username for the authenticated identity - post mapUserPrincipal mapping.
-   * @param subject the existing Subject from the authentication event which may or may not contain GroupPrincipals.
-   * @return String[] of new principal names to use as GroupPrincipals or null.
-   */
-  public abstract String[] mapGroupPrincipals(String mappedPrincipalName, Subject subject);
-
-  /**
-   * This method is used to map the username of the authenticated identity to some other
-   * principal name based on an implementation specific mechanism. It will either return
-   * a new principal name or the provided principal name if there is no mapping required.
-   * @param principalName
-   * @return new username or the provided principalName
-   */
-  public abstract String mapUserPrincipal(String principalName);
-
-  /**
-   * @param wrapper
-   * @param response
-   * @param chain
-   * @param mappedPrincipalName
-   * @param groups
-   */
-  protected void continueChainAsPrincipal(HttpServletRequestWrapper request, ServletResponse response,
-      FilterChain chain, String mappedPrincipalName, String[] groups) throws IOException,
-      ServletException {
-        Subject subject = null;
-        Principal impersonationPrincipal = null;
-        Principal primaryPrincipal = null;
-        
-        // get the current subject and determine whether we need another doAs with 
-        // an impersonatedPrincipal and/or mapped group principals
-        boolean impersonationNeeded = false;
-        boolean groupsMapped = false;
-        
-        // look up the current Java Subject and assosciated group principals
-        Subject currentSubject = Subject.getSubject(AccessController.getContext());
-        Set<?> currentGroups = currentSubject.getPrincipals(GroupPrincipal.class);
-        
-        primaryPrincipal = (PrimaryPrincipal) currentSubject.getPrincipals(PrimaryPrincipal.class).toArray()[0];
-        if (primaryPrincipal != null) {
-          if (!primaryPrincipal.getName().equals(mappedPrincipalName)) {
-            impersonationNeeded = true;
-            auditService.getContext().setProxyUsername( mappedPrincipalName );
-            auditor.audit( Action.IDENTITY_MAPPING, primaryPrincipal.getName(), 
-                ResourceType.PRINCIPAL, ActionOutcome.SUCCESS, RES.effectiveUser(mappedPrincipalName) );
-          }
-        }
-        else {
-          // something is amiss - authentication/federation providers should have run
-          // before identity assertion and should have ensured that the appropriate
-          // principals were added to the current subject
-          // TODO: log as appropriate
-          primaryPrincipal = new PrimaryPrincipal(((HttpServletRequest) request).getUserPrincipal().getName());
-        }
-        
-        groupsMapped = groups != null || !currentGroups.isEmpty();
-        
-        if (impersonationNeeded || groupsMapped) {
-          // gonna need a new subject and doAs
-          subject = new Subject();
-          Set<Principal> principals = subject.getPrincipals();
-          principals.add(primaryPrincipal);
-          
-          // map group principals from current Subject into newly created Subject
-          for (Object obj : currentGroups) {
-            principals.add((Principal)obj);
-          }
-          
-          if (impersonationNeeded) {
-            impersonationPrincipal = new ImpersonatedPrincipal(mappedPrincipalName);
-            subject.getPrincipals().add(impersonationPrincipal);
-          }
-          if (groupsMapped) {
-            addMappedGroupsToSubject(mappedPrincipalName, groups, subject);
-          }
-          doAs(request, response, chain, subject);
-        }
-        else {
-          doFilterInternal(request, response, chain);
-        }
-      }
-
-  private void doAs(final ServletRequest request, final ServletResponse response, final FilterChain chain, Subject subject)
-      throws IOException, ServletException {
-    try {
-      Subject.doAs(
-        subject,
-        new PrivilegedExceptionAction<Object>() {
-          public Object run() throws Exception {
-            doFilterInternal(request, response, chain);
-            return null;
-          }
-        });
-    }
-    catch (PrivilegedActionException e) {
-      Throwable t = e.getCause();
-      if (t instanceof IOException) {
-        throw (IOException) t;
-      }
-      else if (t instanceof ServletException) {
-        throw (ServletException) t;
-      }
-      else {
-        throw new ServletException(t);
-      }
-    }
-  }
-
-  private void addMappedGroupsToSubject(String mappedPrincipalName, String[] groups, Subject subject) {
-    if (groups != null) {
-      auditor.audit( Action.IDENTITY_MAPPING, mappedPrincipalName, ResourceType.PRINCIPAL, 
-          ActionOutcome.SUCCESS, RES.groupsList( Arrays.toString( groups ) ) );
-
-      for (int i = 0; i < groups.length; i++) {
-        subject.getPrincipals().add(new GroupPrincipal(groups[i]));
-      }
-    }
-  }
-
-  private void doFilterInternal(ServletRequest request, ServletResponse response, FilterChain chain)
-      throws IOException, ServletException {
-    chain.doFilter(request, response);
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/CommonIdentityAssertionFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/CommonIdentityAssertionFilter.java b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/CommonIdentityAssertionFilter.java
deleted file mode 100644
index 06dbfb2..0000000
--- a/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/CommonIdentityAssertionFilter.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.identityasserter.common.filter;
-
-import javax.security.auth.Subject;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletRequestWrapper;
-
-import org.apache.commons.lang.ArrayUtils;
-import org.apache.hadoop.gateway.identityasserter.common.filter.AbstractIdentityAssertionFilter;
-import org.apache.hadoop.gateway.security.principal.PrincipalMappingException;
-import org.apache.hadoop.gateway.security.principal.SimplePrincipalMapper;
-
-import java.io.IOException;
-import java.security.AccessController;
-
-public class CommonIdentityAssertionFilter extends AbstractIdentityAssertionFilter {
-  private static final String GROUP_PRINCIPAL_MAPPING = "group.principal.mapping";
-  private static final String PRINCIPAL_MAPPING = "principal.mapping";
-  private SimplePrincipalMapper mapper = new SimplePrincipalMapper();
-
-  /* (non-Javadoc)
-   * @see javax.servlet.Filter#init(javax.servlet.FilterConfig)
-   */
-  @Override
-  public void init(FilterConfig filterConfig) throws ServletException {
-    String principalMapping = filterConfig.getInitParameter(PRINCIPAL_MAPPING);
-    if (principalMapping == null || principalMapping.isEmpty()) {
-      principalMapping = filterConfig.getServletContext().getInitParameter(PRINCIPAL_MAPPING);
-    }
-    String groupPrincipalMapping = filterConfig.getInitParameter(GROUP_PRINCIPAL_MAPPING);
-    if (groupPrincipalMapping == null || groupPrincipalMapping.isEmpty()) {
-      groupPrincipalMapping = filterConfig.getServletContext().getInitParameter(GROUP_PRINCIPAL_MAPPING);
-    }
-    if (principalMapping != null && !principalMapping.isEmpty() || groupPrincipalMapping != null && !groupPrincipalMapping.isEmpty()) {
-      try {
-        mapper.loadMappingTable(principalMapping, groupPrincipalMapping);
-      } catch (PrincipalMappingException e) {
-        throw new ServletException("Unable to load principal mapping table.", e);
-      }
-    }
-  }
-
-  /* (non-Javadoc)
-   * @see javax.servlet.Filter#destroy()
-   */
-  @Override
-  public void destroy() {
-  }
-
-  /**
-   * Obtain the standard javax.security.auth.Subject, retrieve the caller principal, map
-   * to the identity to be asserted as appropriate and create the provider specific
-   * assertion token. Add the assertion token to the request.
-   */
-  public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) 
-      throws IOException, ServletException {
-    Subject subject = Subject.getSubject(AccessController.getContext());
-
-    String principalName = getPrincipalName(subject);
-
-    String mappedPrincipalName = mapUserPrincipalBase(principalName);
-    mappedPrincipalName = mapUserPrincipal(mappedPrincipalName);
-    String[] mappedGroups = mapGroupPrincipals(mappedPrincipalName, subject);
-    String[] groups = mapGroupPrincipals(mappedPrincipalName, subject);
-    groups = combineGroupMappings(mappedGroups, groups);
-
-    HttpServletRequestWrapper wrapper = wrapHttpServletRequest(
-        request, mappedPrincipalName);
-
-    continueChainAsPrincipal(wrapper, response, chain, mappedPrincipalName, groups);
-  }
-
-  /**
-   * @param mappedGroups
-   * @param groups
-   * @return
-   */
-  private String[] combineGroupMappings(String[] mappedGroups, String[] groups) {
-    if (mappedGroups != null && groups != null) {
-      return (String[])ArrayUtils.addAll(mappedGroups, groups);
-    }
-    else {
-      return groups != null ? groups : mappedGroups;
-    }
-  }
-
-  public HttpServletRequestWrapper wrapHttpServletRequest(
-      ServletRequest request, String mappedPrincipalName) {
-    // wrap the request so that the proper principal is returned
-    // from request methods
-    IdentityAsserterHttpServletRequestWrapper wrapper =
-        new IdentityAsserterHttpServletRequestWrapper(
-        (HttpServletRequest)request, 
-        mappedPrincipalName);
-    return wrapper;
-  }
-
-  protected String[] mapGroupPrincipalsBase(String mappedPrincipalName, Subject subject) {
-    return mapper.mapGroupPrincipal(mappedPrincipalName);
-  }
-
-  protected String mapUserPrincipalBase(String principalName) {
-    return mapper.mapUserPrincipal(principalName);
-  }
-
-  /* (non-Javadoc)
-   * @see org.apache.hadoop.gateway.identityasserter.common.filter.AbstractIdentityAssertionFilter#mapGroupPrincipals(java.lang.String, javax.security.auth.Subject)
-   */
-  @Override
-  public String[] mapGroupPrincipals(String mappedPrincipalName, Subject subject) {
-    // NOP
-    return null;
-  }
-
-  /* (non-Javadoc)
-   * @see org.apache.hadoop.gateway.identityasserter.common.filter.AbstractIdentityAssertionFilter#mapUserPrincipal(java.lang.String)
-   */
-  @Override
-  public String mapUserPrincipal(String principalName) {
-    // NOP
-    return principalName;
-  }
-}