You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@knox.apache.org by mo...@apache.org on 2017/12/14 21:12:48 UTC

[01/49] knox git commit: KNOX-1118 - Remove POC Service Definition for AmbariUI

Repository: knox
Updated Branches:
  refs/heads/KNOX-998-Package_Restructuring e70904b3d -> e766b3b77


KNOX-1118 - Remove POC Service Definition for AmbariUI

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/fa6acbef
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/fa6acbef
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/fa6acbef

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: fa6acbef83700e0fbbbe23fa1ffac8d6b103b94a
Parents: d4b0dc6
Author: Larry McCay <lm...@hortonworks.com>
Authored: Thu Nov 16 15:31:44 2017 -0500
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Thu Nov 16 16:33:00 2017 -0500

----------------------------------------------------------------------
 .../services/ambariui/2.2.1/rewrite.xml         | 104 -------------------
 .../services/ambariui/2.2.1/service.xml         |  92 ----------------
 2 files changed, 196 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/fa6acbef/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/rewrite.xml
----------------------------------------------------------------------
diff --git a/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/rewrite.xml b/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/rewrite.xml
deleted file mode 100644
index 0c99d76..0000000
--- a/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/rewrite.xml
+++ /dev/null
@@ -1,104 +0,0 @@
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<rules>
-    <rule dir="IN" name="AMBARIUI/ambari/inbound/root" pattern="*://*:*/**/ambari/">
-        <rewrite template="{$serviceUrl[AMBARIUI]}/"/>
-    </rule>
-    <rule dir="IN" name="AMBARIUI/ambari/inbound/path" pattern="*://*:*/**/ambari/{**}">
-        <rewrite template="{$serviceUrl[AMBARIUI]}/{**}"/>
-    </rule>
-    <rule dir="IN" name="AMBARIUI/ambari/inbound/query" pattern="*://*:*/**/ambari/{**}?{**}">
-        <rewrite template="{$serviceUrl[AMBARIUI]}/{**}?{**}"/>
-    </rule>
-
-    <rule dir="OUT" name="AMBARIUI/ambari/outbound/sitepath">
-        <rewrite template="{$frontend[path]}/"/>
-    </rule>
-
-    <rule dir="OUT" name="AMBARIUI/ambari/outbound/extrapath">
-        <rewrite template="{$frontend[path]}/api/v1"/>
-    </rule>
-    <rule dir="OUT" name="AMBARIUI/ambari/outbound/logohref">
-        <rewrite template="#/main/dashboard"/>
-    </rule>
-    <rule dir="OUT" name="AMBARIUI/ambari/outbound/img" pattern="/img/{**}">
-        <rewrite template="{$frontend[url]}/img/{**}"/>
-    </rule>
-
-    <rule dir="OUT" name="AMBARIUI/ambari/outbound/css">
-        <rewrite template="{$frontend[path]}/stylesheets/{**}"/>
-    </rule>
-    <rule dir="OUT" name="AMBARIUI/ambari/outbound/js">
-        <rewrite template="{$frontend[path]}/javascripts/{**}"/>
-    </rule>
-
-    <filter name="AMBARIUI/ambari/outbound/proxyroot">
-        <content type="*/x-javascript">
-            <apply path="\{proxy_root\}" rule="AMBARIUI/ambari/outbound/sitepath"/>
-        </content>
-        <content type="application/javascript">
-            <apply path="\{proxy_root\}" rule="AMBARIUI/ambari/outbound/sitepath"/>
-        </content>
-    </filter>
-
-    <!-- filter to rewrite api prefix defined in .js from root -->
-    <!-- e.g. /api/v1 -->
-    <filter name="AMBARIUI/ambari/outbound/apiendpoint">
-        <content type="*/x-javascript">
-            <apply path="/api/v1" rule="AMBARIUI/ambari/outbound/extrapath"/>
-        </content>
-        <content type="application/javascript">
-            <apply path="/api/v1" rule="AMBARIUI/ambari/outbound/extrapath"/>
-        </content>
-    </filter>
-
-    <filter name="AMBARIUI/ambari/outbound/apiendpoint/html">
-        <content type="text/html">
-            <apply path="/api/v1" rule="AMBARIUI/ambari/outbound/extrapath"/>
-        </content>
-    </filter>
-
-    <filter name="AMBARIUI/ambari/outbound/apiendpoint/noroot">
-        <content type="*/x-javascript">
-            <apply path="api/v1" rule="AMBARIUI/ambari/outbound/extrapath"/>
-        </content>
-        <content type="application/javascript">
-            <apply path="api/v1" rule="AMBARIUI/ambari/outbound/extrapath"/>
-        </content>
-    </filter>
-
-    <filter name="AMBARIUI/ambari/outbound/links">
-        <content type="*/x-javascript">
-            <apply path="/api/v1" rule="AMBARIUI/ambari/outbound/extrapath"/>
-            <apply path="\{proxy_root\}" rule="AMBARIUI/ambari/outbound/sitepath"/>
-            <apply path="/#/main/dashboard" rule="AMBARIUI/ambari/outbound/logohref"/>
-        </content>
-        <content type="application/javascript">
-            <apply path="/api/v1" rule="AMBARIUI/ambari/outbound/extrapath"/>
-            <apply path="\{proxy_root\}" rule="AMBARIUI/ambari/outbound/sitepath"/>
-            <apply path="/#/main/dashboard" rule="AMBARIUI/ambari/outbound/logohref"/>
-        </content>
-        <content type="*/html">
-        </content>
-    </filter>
-    <filter name="AMBARIUI/ambari/outbound/mainpage">
-        <content type="*/html">
-            <apply path="stylesheets/{**}.css" rule="AMBARIUI/ambari/outbound/css" />
-            <apply path="javascripts/{**}.js" rule="AMBARIUI/ambari/outbound/js" />
-        </content>
-    </filter>
-</rules>

http://git-wip-us.apache.org/repos/asf/knox/blob/fa6acbef/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml
----------------------------------------------------------------------
diff --git a/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml b/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml
deleted file mode 100644
index ab4ab2b..0000000
--- a/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<service role="AMBARIUI" name="ambariui" version="2.2.0">
-    <policies>
-        <policy role="webappsec"/>
-        <policy role="authentication" name="Anonymous"/>
-        <policy role="rewrite"/>
-        <policy role="authorization"/>
-    </policies>
-    <routes>
-        <route path="/ambari">
-            <rewrite apply="AMBARIUI/ambari/inbound/root" to="request.url"/>
-            <rewrite apply="AMBARIUI/ambari/outbound/mainpage" to="response.body"/>
-        </route>
-        <route path="/ambari/**">
-            <rewrite apply="AMBARIUI/ambari/inbound/path" to="request.url"/>
-        </route>
-        <route path="/ambari/**?**">
-            <rewrite apply="AMBARIUI/ambari/inbound/query" to="request.url"/>
-        </route>
-        <route path="/ambari/**/app.js">
-            <rewrite apply="AMBARIUI/ambari/outbound/links" to="response.body"/>
-        </route>
-
-        <!-- Admin View route -->
-        <route path="/ambari/views/ADMIN_VIEW/**/INSTANCE/**/main.js">
-            <rewrite apply="AMBARIUI/ambari/outbound/proxyroot" to="response.body"/>
-        </route>
-
-        <!-- Files view -->
-        <route path="/ambari/views/FILES/**/assets/files-view.js">
-            <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint/noroot" to="response.body"/>
-        </route>
-
-        <!-- Capacity Scheduler view -->
-        <route path="/ambari/views/CAPACITY-SCHEDULER/**/javascripts/app.js">
-            <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint/noroot" to="response.body"/>
-        </route>
-
-        <!-- Pig view  -->
-        <route path="/ambari/views/PIG/**/javascripts/app.js">
-            <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint/noroot" to="response.body"/>
-        </route>
-
-        <!-- Hive view route -->
-        <route path="/ambari/views/HIVE/**/assets/hive.js">
-            <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint" to="response.body"/>
-        </route>
-
-        <!-- Storm View -->
-        <route path="/ambari/views/Storm_Monitoring/**/scripts/models/*.js">
-            <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint" to="response.body"/>
-        </route>
-
-        <route path="/ambari/views/ZEPPELIN/*/*/">
-            <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint/html" to="response.body"/>
-        </route>
-
-        <!-- Tez View -->
-        <route path="/ambari/views/TEZ/*/*/ambari-scripts/init-view.js">
-          <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint" to="response.body"/>
-        </route>
-
-        <!-- Hive 2.0 view -->
-        <route path="/ambari/views/HIVE/**/assets/ui.js">
-            <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint/noroot" to="response.body"/>
-        </route>
-
-        <!-- SmartSense view -->
-        <route path="/ambari/views/SMARTSENSE/**/assets/hstapp-*.js">
-            <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint" to="response.body"/>
-        </route>
-
-        <!-- No need to rewrite Slider View -->
-    </routes>
-    <dispatch classname="org.apache.hadoop.gateway.dispatch.PassAllHeadersNoEncodingDispatch"/>
-</service>
-


[36/49] knox git commit: Merge branch 'master' into KNOX-998-Package_Restructuring

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-server/src/test/java/org/apache/knox/gateway/config/impl/GatewayConfigImplTest.java
----------------------------------------------------------------------
diff --cc gateway-server/src/test/java/org/apache/knox/gateway/config/impl/GatewayConfigImplTest.java
index 06da13d,0000000..4187214
mode 100644,000000..100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/config/impl/GatewayConfigImplTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/config/impl/GatewayConfigImplTest.java
@@@ -1,220 -1,0 +1,263 @@@
 +package org.apache.knox.gateway.config.impl;
 +
 +import org.apache.knox.test.TestUtils;
 +import org.hamcrest.CoreMatchers;
 +import org.junit.Test;
 +
 +import java.util.List;
++import java.util.concurrent.TimeUnit;
 +
 +import static org.hamcrest.CoreMatchers.is;
 +import static org.hamcrest.CoreMatchers.notNullValue;
 +import static org.hamcrest.MatcherAssert.assertThat;
 +import static org.hamcrest.Matchers.hasItems;
 +import static org.hamcrest.Matchers.nullValue;
++import static org.junit.Assert.assertNotEquals;
++import static org.junit.Assert.assertTrue;
++import static org.testng.Assert.assertEquals;
++import static org.testng.Assert.assertFalse;
++import static org.testng.Assert.assertNotNull;
 +
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +public class GatewayConfigImplTest {
 +
 +  @Test( timeout = TestUtils.SHORT_TIMEOUT )
 +  public void testHttpServerSettings() {
 +    GatewayConfigImpl config = new GatewayConfigImpl();
 +
 +    // Check the defaults.
 +    assertThat( config.getHttpServerRequestBuffer(), is( 16*1024 ) );
 +    assertThat( config.getHttpServerRequestHeaderBuffer(), is( 8*1024 ) );
 +    assertThat( config.getHttpServerResponseBuffer(), is( 32*1024 ) );
 +    assertThat( config.getHttpServerResponseHeaderBuffer(), is( 8*1024 ) );
 +
 +    assertThat( GatewayConfigImpl.HTTP_SERVER_REQUEST_BUFFER, is( "gateway.httpserver.requestBuffer" ) );
 +    assertThat( GatewayConfigImpl.HTTP_SERVER_REQUEST_HEADER_BUFFER, is( "gateway.httpserver.requestHeaderBuffer" ) );
 +    assertThat( GatewayConfigImpl.HTTP_SERVER_RESPONSE_BUFFER, is( "gateway.httpserver.responseBuffer" ) );
 +    assertThat( GatewayConfigImpl.HTTP_SERVER_RESPONSE_HEADER_BUFFER, is( "gateway.httpserver.responseHeaderBuffer" ) );
 +
 +    config.setInt( GatewayConfigImpl.HTTP_SERVER_REQUEST_BUFFER, 32*1024 );
 +    assertThat( config.getHttpServerRequestBuffer(), is( 32*1024 ) );
 +
 +    config.setInt( GatewayConfigImpl.HTTP_SERVER_REQUEST_HEADER_BUFFER, 4*1024 );
 +    assertThat( config.getHttpServerRequestHeaderBuffer(), is( 4*1024 ) );
 +
 +    config.setInt( GatewayConfigImpl.HTTP_SERVER_RESPONSE_BUFFER, 16*1024 );
 +    assertThat( config.getHttpServerResponseBuffer(), is( 16*1024 ) );
 +
 +    config.setInt( GatewayConfigImpl.HTTP_SERVER_RESPONSE_HEADER_BUFFER, 6*1024 );
 +    assertThat( config.getHttpServerResponseHeaderBuffer(), is( 6*1024 ) );
 +
 +    // Restore the defaults.
 +    config.setInt( GatewayConfigImpl.HTTP_SERVER_REQUEST_BUFFER, 16*1024 );
 +    config.setInt( GatewayConfigImpl.HTTP_SERVER_REQUEST_HEADER_BUFFER, 8*1024 );
 +    config.setInt( GatewayConfigImpl.HTTP_SERVER_RESPONSE_BUFFER, 32*1024 );
 +    config.setInt( GatewayConfigImpl.HTTP_SERVER_RESPONSE_HEADER_BUFFER, 8*1024 );
 +  }
 +
 +  @Test( timeout = TestUtils.SHORT_TIMEOUT )
 +  public void testGetGatewayDeploymentsBackupVersionLimit() {
 +    GatewayConfigImpl config = new GatewayConfigImpl();
 +    assertThat( config.getGatewayDeploymentsBackupVersionLimit(), is(5) );
 +
 +    config.setInt( config.DEPLOYMENTS_BACKUP_VERSION_LIMIT, 3 );
 +    assertThat( config.getGatewayDeploymentsBackupVersionLimit(), is(3) );
 +
 +    config.setInt( config.DEPLOYMENTS_BACKUP_VERSION_LIMIT, -3 );
 +    assertThat( config.getGatewayDeploymentsBackupVersionLimit(), is(-1) );
 +
 +    config.setInt( config.DEPLOYMENTS_BACKUP_VERSION_LIMIT, 0 );
 +    assertThat( config.getGatewayDeploymentsBackupVersionLimit(), is(0) );
 +  }
 +
 +  @Test( timeout = TestUtils.SHORT_TIMEOUT )
 +  public void testGetGatewayDeploymentsBackupAgeLimit() {
 +    GatewayConfigImpl config = new GatewayConfigImpl();
 +    assertThat( config.getGatewayDeploymentsBackupAgeLimit(), is(-1L) );
 +
 +    config.set( config.DEPLOYMENTS_BACKUP_AGE_LIMIT, "1" );
 +    assertThat( config.getGatewayDeploymentsBackupAgeLimit(), is(86400000L) );
 +
 +    config.set( config.DEPLOYMENTS_BACKUP_AGE_LIMIT, "2" );
 +    assertThat( config.getGatewayDeploymentsBackupAgeLimit(), is(86400000L*2L) );
 +
 +    config.set( config.DEPLOYMENTS_BACKUP_AGE_LIMIT, "0" );
 +    assertThat( config.getGatewayDeploymentsBackupAgeLimit(), is(0L) );
 +
 +    config.set( config.DEPLOYMENTS_BACKUP_AGE_LIMIT, "X" );
 +    assertThat( config.getGatewayDeploymentsBackupAgeLimit(), is(-1L) );
 +  }
 +
 +
 +  @Test
 +  public void testSSLCiphers() {
 +    GatewayConfigImpl config = new GatewayConfigImpl();
 +    List<String> list;
 +
 +    list = config.getIncludedSSLCiphers();
 +    assertThat( list, is(nullValue()) );
 +
 +    config.set( "ssl.include.ciphers", "none" );
 +    assertThat( config.getIncludedSSLCiphers(), is(nullValue()) );
 +
 +    config.set( "ssl.include.ciphers", "" );
 +    assertThat( config.getIncludedSSLCiphers(), is(nullValue()) );
 +
 +    config.set( "ssl.include.ciphers", "ONE" );
 +    assertThat( config.getIncludedSSLCiphers(), is(hasItems("ONE")) );
 +
 +    config.set( "ssl.include.ciphers", " ONE " );
 +    assertThat( config.getIncludedSSLCiphers(), is(hasItems("ONE")) );
 +
 +    config.set( "ssl.include.ciphers", "ONE,TWO" );
 +    assertThat( config.getIncludedSSLCiphers(), is(hasItems("ONE","TWO")) );
 +
 +    config.set( "ssl.include.ciphers", "ONE,TWO,THREE" );
 +    assertThat( config.getIncludedSSLCiphers(), is(hasItems("ONE","TWO","THREE")) );
 +
 +    config.set( "ssl.include.ciphers", " ONE , TWO , THREE " );
 +    assertThat( config.getIncludedSSLCiphers(), is(hasItems("ONE","TWO","THREE")) );
 +
 +    list = config.getExcludedSSLCiphers();
 +    assertThat( list, is(nullValue()) );
 +
 +    config.set( "ssl.exclude.ciphers", "none" );
 +    assertThat( config.getExcludedSSLCiphers(), is(nullValue()) );
 +
 +    config.set( "ssl.exclude.ciphers", "" );
 +    assertThat( config.getExcludedSSLCiphers(), is(nullValue()) );
 +
 +    config.set( "ssl.exclude.ciphers", "ONE" );
 +    assertThat( config.getExcludedSSLCiphers(), is(hasItems("ONE")) );
 +
 +    config.set( "ssl.exclude.ciphers", " ONE " );
 +    assertThat( config.getExcludedSSLCiphers(), is(hasItems("ONE")) );
 +
 +    config.set( "ssl.exclude.ciphers", "ONE,TWO" );
 +    assertThat( config.getExcludedSSLCiphers(), is(hasItems("ONE","TWO")) );
 +
 +    config.set( "ssl.exclude.ciphers", "ONE,TWO,THREE" );
 +    assertThat( config.getExcludedSSLCiphers(), is(hasItems("ONE","TWO","THREE")) );
 +
 +    config.set( "ssl.exclude.ciphers", " ONE , TWO , THREE " );
 +    assertThat( config.getExcludedSSLCiphers(), is(hasItems("ONE","TWO","THREE")) );
 +  }
 +
 +  @Test( timeout = TestUtils.SHORT_TIMEOUT )
 +  public void testGlobalRulesServices() {
 +    GatewayConfigImpl config = new GatewayConfigImpl();
 +    List<String> list;
 +
 +    list = config.getGlobalRulesServices();
 +    assertThat( list, is(notNullValue()) );
 +
 +    assertThat( list, is( CoreMatchers.hasItems("NAMENODE","JOBTRACKER", "WEBHDFS", "WEBHCAT", "OOZIE", "WEBHBASE", "HIVE", "RESOURCEMANAGER")));
 +
 +
 +    config.set( GatewayConfigImpl.GLOBAL_RULES_SERVICES, "none" );
 +    assertThat( config.getGlobalRulesServices(), is( CoreMatchers.hasItems("NAMENODE","JOBTRACKER", "WEBHDFS", "WEBHCAT", "OOZIE", "WEBHBASE", "HIVE", "RESOURCEMANAGER")) );
 +
 +    config.set( GatewayConfigImpl.GLOBAL_RULES_SERVICES, "" );
 +    assertThat( config.getGlobalRulesServices(), is( CoreMatchers.hasItems("NAMENODE","JOBTRACKER", "WEBHDFS", "WEBHCAT", "OOZIE", "WEBHBASE", "HIVE", "RESOURCEMANAGER")) );
 +
 +    config.set( GatewayConfigImpl.GLOBAL_RULES_SERVICES, "ONE" );
 +    assertThat( config.getGlobalRulesServices(), is(hasItems("ONE")) );
 +
 +    config.set( GatewayConfigImpl.GLOBAL_RULES_SERVICES, "ONE,TWO,THREE" );
 +    assertThat( config.getGlobalRulesServices(), is(hasItems("ONE","TWO","THREE")) );
 +
 +    config.set( GatewayConfigImpl.GLOBAL_RULES_SERVICES, " ONE , TWO , THREE " );
 +    assertThat( config.getGlobalRulesServices(), is(hasItems("ONE","TWO","THREE")) );
 +  }
 +
 +  @Test( timeout = TestUtils.SHORT_TIMEOUT )
 +  public void testMetricsSettings() {
 +    GatewayConfigImpl config = new GatewayConfigImpl();
 +    //test defaults
 +    assertThat(config.isMetricsEnabled(), is(false));
 +    assertThat(config.isJmxMetricsReportingEnabled(), is(false));
 +    assertThat(config.isGraphiteMetricsReportingEnabled(), is(false));
 +    assertThat(config.getGraphiteHost(), is("localhost"));
 +    assertThat(config.getGraphitePort(), is(32772));
 +  }
 +  
 +  @Test( timeout = TestUtils.SHORT_TIMEOUT )
 +  public void testGatewayIdleTimeout() {
 +    GatewayConfigImpl config = new GatewayConfigImpl();
 +    long idleTimeout = 0l;
 +    
 +    idleTimeout = config.getGatewayIdleTimeout();
 +    assertThat( idleTimeout, is(300000L));
 +
 +    config.set( GatewayConfigImpl.GATEWAY_IDLE_TIMEOUT, "15000" );
 +    idleTimeout = config.getGatewayIdleTimeout();
 +    assertThat( idleTimeout, is(15000L));
 +  }
 +  
 +  @Test( timeout = TestUtils.SHORT_TIMEOUT )
 +  public void testGatewayServerHeaderEnabled() {
 +    GatewayConfigImpl config = new GatewayConfigImpl();
 +    boolean serverHeaderEnabled = true;
 +    
 +    serverHeaderEnabled = config.isGatewayServerHeaderEnabled();
 +    assertThat( serverHeaderEnabled, is(true));
 +
 +    config.set( GatewayConfigImpl.SERVER_HEADER_ENABLED, "false");
 +    serverHeaderEnabled = config.isGatewayServerHeaderEnabled();
 +    assertThat( serverHeaderEnabled, is(false));
 +  }
 +
++
++  @Test
++  public void testGetRemoteConfigurationRegistryNames() {
++    GatewayConfigImpl config = new GatewayConfigImpl();
++
++    List<String> registryNames = config.getRemoteRegistryConfigurationNames();
++    assertNotNull(registryNames);
++    assertTrue(registryNames.isEmpty());
++
++    config.set(GatewayConfigImpl.CONFIG_REGISTRY_PREFIX + ".test1",
++               "type=ZooKeeper;address=host1:2181;authType=digest;principal=itsme;credentialAlias=testAlias");
++    registryNames = config.getRemoteRegistryConfigurationNames();
++    assertNotNull(registryNames);
++    assertFalse(registryNames.isEmpty());
++    assertEquals(1, registryNames.size());
++
++    config.set(GatewayConfigImpl.CONFIG_REGISTRY_PREFIX + ".test2",
++               "type=ZooKeeper;address=host2:2181,host3:2181,host4:2181");
++    registryNames = config.getRemoteRegistryConfigurationNames();
++    assertNotNull(registryNames);
++    assertFalse(registryNames.isEmpty());
++    assertEquals(registryNames.size(), 2);
++  }
++
++
++  @Test
++  public void testHTTPDefaultTimeouts() {
++    final GatewayConfigImpl config = new GatewayConfigImpl();
++
++    assertNotEquals(config.getHttpClientConnectionTimeout(), -1);
++    assertNotEquals(config.getHttpClientSocketTimeout(), -1);
++
++    assertEquals(TimeUnit.SECONDS.toMillis(20), config.getHttpClientConnectionTimeout());
++    assertEquals(TimeUnit.SECONDS.toMillis(20), config.getHttpClientSocketTimeout());
++
++  }
++
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
----------------------------------------------------------------------
diff --cc gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
index 408d396,0000000..60cf633
mode 100644,000000..100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
@@@ -1,610 -1,0 +1,618 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.services.topology;
 +
 +import org.apache.commons.io.FileUtils;
 +import org.apache.commons.io.FilenameUtils;
 +import org.apache.commons.io.IOUtils;
 +import org.apache.commons.io.monitor.FileAlterationListener;
 +import org.apache.commons.io.monitor.FileAlterationMonitor;
 +import org.apache.commons.io.monitor.FileAlterationObserver;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.services.topology.impl.DefaultTopologyService;
 +import org.apache.knox.gateway.services.security.AliasService;
 +import org.apache.knox.test.TestUtils;
 +import org.apache.knox.gateway.topology.Param;
 +import org.apache.knox.gateway.topology.Provider;
 +import org.apache.knox.gateway.topology.Topology;
 +import org.apache.knox.gateway.topology.TopologyEvent;
 +import org.apache.knox.gateway.topology.TopologyListener;
 +import org.easymock.EasyMock;
 +import org.junit.After;
 +import org.junit.Before;
 +import org.junit.Test;
 +
 +import java.io.File;
 +import java.io.IOException;
 +import java.io.InputStream;
 +import java.io.OutputStream;
 +import java.util.ArrayList;
 +import java.util.Arrays;
 +import java.util.Collection;
 +import java.util.HashMap;
 +import java.util.HashSet;
 +import java.util.Iterator;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.Set;
 +
 +import static org.easymock.EasyMock.anyObject;
 +import static org.hamcrest.CoreMatchers.is;
 +import static org.hamcrest.Matchers.hasItem;
 +import static org.hamcrest.core.IsNull.notNullValue;
 +import static org.junit.Assert.assertEquals;
 +import static org.junit.Assert.assertFalse;
 +import static org.junit.Assert.assertNotEquals;
 +import static org.junit.Assert.assertNotNull;
 +import static org.junit.Assert.assertThat;
 +import static org.junit.Assert.assertTrue;
 +
 +public class DefaultTopologyServiceTest {
 +
 +  @Before
 +  public void setUp() throws Exception {
 +  }
 +
 +  @After
 +  public void tearDown() throws Exception {
 +  }
 +
 +  private File createDir() throws IOException {
 +    return TestUtils.createTempDir(this.getClass().getSimpleName() + "-");
 +  }
 +
 +  private File createFile(File parent, String name, String resource, long timestamp) throws IOException {
 +    File file = new File(parent, name);
 +    if (!file.exists()) {
 +      FileUtils.touch(file);
 +    }
 +    InputStream input = ClassLoader.getSystemResourceAsStream(resource);
 +    OutputStream output = FileUtils.openOutputStream(file);
 +    IOUtils.copy(input, output);
 +    //KNOX-685: output.flush();
 +    input.close();
 +    output.close();
 +    file.setLastModified(timestamp);
 +    assertTrue("Failed to create test file " + file.getAbsolutePath(), file.exists());
 +    assertTrue("Failed to populate test file " + file.getAbsolutePath(), file.length() > 0);
 +
 +    return file;
 +  }
 +
 +  @Test
 +  public void testGetTopologies() throws Exception {
 +
 +    File dir = createDir();
 +    File topologyDir = new File(dir, "topologies");
 +
++    File descriptorsDir = new File(dir, "descriptors");
++    descriptorsDir.mkdirs();
++
++    File sharedProvidersDir = new File(dir, "shared-providers");
++    sharedProvidersDir.mkdirs();
++
 +    long time = topologyDir.lastModified();
 +    try {
 +      createFile(topologyDir, "one.xml", "org/apache/knox/gateway/topology/file/topology-one.xml", time);
 +
 +      TestTopologyListener topoListener = new TestTopologyListener();
 +      FileAlterationMonitor monitor = new FileAlterationMonitor(Long.MAX_VALUE);
 +
 +      TopologyService provider = new DefaultTopologyService();
 +      Map<String, String> c = new HashMap<>();
 +
 +      GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
 +      EasyMock.expect(config.getGatewayTopologyDir()).andReturn(topologyDir.getAbsolutePath()).anyTimes();
-       EasyMock.expect(config.getGatewayConfDir()).andReturn(topologyDir.getParentFile().getAbsolutePath()).anyTimes();
++      EasyMock.expect(config.getGatewayConfDir()).andReturn(descriptorsDir.getParentFile().getAbsolutePath()).anyTimes();
++      EasyMock.expect(config.getGatewayProvidersConfigDir()).andReturn(sharedProvidersDir.getAbsolutePath()).anyTimes();
++      EasyMock.expect(config.getGatewayDescriptorsDir()).andReturn(descriptorsDir.getAbsolutePath()).anyTimes();
 +      EasyMock.replay(config);
 +
 +      provider.init(config, c);
 +
 +      provider.addTopologyChangeListener(topoListener);
 +
 +      provider.reloadTopologies();
 +
 +      Collection<Topology> topologies = provider.getTopologies();
 +      assertThat(topologies, notNullValue());
 +      assertThat(topologies.size(), is(1));
 +      Topology topology = topologies.iterator().next();
 +      assertThat(topology.getName(), is("one"));
 +      assertThat(topology.getTimestamp(), is(time));
 +      assertThat(topoListener.events.size(), is(1));
 +      topoListener.events.clear();
 +
 +      // Add a file to the directory.
 +      File two = createFile(topologyDir, "two.xml",
 +          "org/apache/knox/gateway/topology/file/topology-two.xml", 1L);
 +      provider.reloadTopologies();
 +      topologies = provider.getTopologies();
 +      assertThat(topologies.size(), is(2));
 +      Set<String> names = new HashSet<>(Arrays.asList("one", "two"));
 +      Iterator<Topology> iterator = topologies.iterator();
 +      topology = iterator.next();
 +      assertThat(names, hasItem(topology.getName()));
 +      names.remove(topology.getName());
 +      topology = iterator.next();
 +      assertThat(names, hasItem(topology.getName()));
 +      names.remove(topology.getName());
 +      assertThat(names.size(), is(0));
 +      assertThat(topoListener.events.size(), is(1));
 +      List<TopologyEvent> events = topoListener.events.get(0);
 +      assertThat(events.size(), is(1));
 +      TopologyEvent event = events.get(0);
 +      assertThat(event.getType(), is(TopologyEvent.Type.CREATED));
 +      assertThat(event.getTopology(), notNullValue());
 +
 +      // Update a file in the directory.
 +      two = createFile(topologyDir, "two.xml",
 +          "org/apache/knox/gateway/topology/file/topology-three.xml", 2L);
 +      provider.reloadTopologies();
 +      topologies = provider.getTopologies();
 +      assertThat(topologies.size(), is(2));
 +      names = new HashSet<>(Arrays.asList("one", "two"));
 +      iterator = topologies.iterator();
 +      topology = iterator.next();
 +      assertThat(names, hasItem(topology.getName()));
 +      names.remove(topology.getName());
 +      topology = iterator.next();
 +      assertThat(names, hasItem(topology.getName()));
 +      names.remove(topology.getName());
 +      assertThat(names.size(), is(0));
 +
 +      // Remove a file from the directory.
 +      two.delete();
 +      provider.reloadTopologies();
 +      topologies = provider.getTopologies();
 +      assertThat(topologies.size(), is(1));
 +      topology = topologies.iterator().next();
 +      assertThat(topology.getName(), is("one"));
 +      assertThat(topology.getTimestamp(), is(time));
 +
 +    } finally {
 +      FileUtils.deleteQuietly(dir);
 +    }
 +  }
 +
 +  /**
 +   * KNOX-1014
 +   *
 +   * Test the lifecycle relationship between simple descriptors and topology files.
 +   *
 +   * N.B. This test depends on the DummyServiceDiscovery extension being configured:
 +   *        org.apache.knox.gateway.topology.discovery.test.extension.DummyServiceDiscovery
 +   */
 +  @Test
 +  public void testSimpleDescriptorsTopologyGeneration() throws Exception {
 +
 +    File dir = createDir();
 +    File topologyDir = new File(dir, "topologies");
 +    topologyDir.mkdirs();
 +
 +    File descriptorsDir = new File(dir, "descriptors");
 +    descriptorsDir.mkdirs();
 +
 +    File sharedProvidersDir = new File(dir, "shared-providers");
 +    sharedProvidersDir.mkdirs();
 +
 +    try {
 +      TestTopologyListener topoListener = new TestTopologyListener();
 +      FileAlterationMonitor monitor = new FileAlterationMonitor(Long.MAX_VALUE);
 +
 +      TopologyService provider = new DefaultTopologyService();
 +      Map<String, String> c = new HashMap<>();
 +
 +      GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
 +      EasyMock.expect(config.getGatewayTopologyDir()).andReturn(topologyDir.getAbsolutePath()).anyTimes();
 +      EasyMock.expect(config.getGatewayConfDir()).andReturn(descriptorsDir.getParentFile().getAbsolutePath()).anyTimes();
 +      EasyMock.replay(config);
 +
 +      provider.init(config, c);
 +      provider.addTopologyChangeListener(topoListener);
 +      provider.reloadTopologies();
 +
 +
 +      // Add a simple descriptor to the descriptors dir to verify topology generation and loading (KNOX-1006)
 +      AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
 +      EasyMock.expect(aliasService.getPasswordFromAliasForGateway(anyObject(String.class))).andReturn(null).anyTimes();
 +      EasyMock.replay(aliasService);
 +      DefaultTopologyService.DescriptorsMonitor dm =
 +              new DefaultTopologyService.DescriptorsMonitor(topologyDir, aliasService);
 +
 +      // Listener to simulate the topologies directory monitor, to notice when a topology has been deleted
 +      provider.addTopologyChangeListener(new TestTopologyDeleteListener((DefaultTopologyService)provider));
 +
 +      // Write out the referenced provider config first
 +      File provCfgFile = createFile(sharedProvidersDir,
 +                                    "ambari-cluster-policy.xml",
 +                                    "org/apache/knox/gateway/topology/file/ambari-cluster-policy.xml",
 +                                    System.currentTimeMillis());
 +      try {
 +        // Create the simple descriptor in the descriptors dir
 +        File simpleDesc = createFile(descriptorsDir,
 +                                     "four.json",
 +                                     "org/apache/knox/gateway/topology/file/simple-topology-four.json",
 +                                     System.currentTimeMillis());
 +
 +        // Trigger the topology generation by noticing the simple descriptor
 +        dm.onFileChange(simpleDesc);
 +
 +        // Load the generated topology
 +        provider.reloadTopologies();
 +        Collection<Topology> topologies = provider.getTopologies();
 +        assertThat(topologies.size(), is(1));
 +        Iterator<Topology> iterator = topologies.iterator();
 +        Topology topology = iterator.next();
 +        assertThat("four", is(topology.getName()));
 +        int serviceCount = topology.getServices().size();
 +        assertEquals("Expected the same number of services as are declared in the simple dscriptor.", 10, serviceCount);
 +
 +        // Overwrite the simple descriptor with a different set of services, and check that the changes are
 +        // propagated to the associated topology
 +        simpleDesc = createFile(descriptorsDir,
 +                                "four.json",
 +                                "org/apache/knox/gateway/topology/file/simple-descriptor-five.json",
 +                                System.currentTimeMillis());
 +        dm.onFileChange(simpleDesc);
 +        provider.reloadTopologies();
 +        topologies = provider.getTopologies();
 +        topology = topologies.iterator().next();
 +        assertNotEquals(serviceCount, topology.getServices().size());
 +        assertEquals(6, topology.getServices().size());
 +
 +        // Delete the simple descriptor, and make sure that the associated topology file is deleted
 +        simpleDesc.delete();
 +        dm.onFileDelete(simpleDesc);
 +        provider.reloadTopologies();
 +        topologies = provider.getTopologies();
 +        assertTrue(topologies.isEmpty());
 +
 +        // Delete a topology file, and make sure that the associated simple descriptor is deleted
 +        // Overwrite the simple descriptor with a different set of services, and check that the changes are
 +        // propagated to the associated topology
 +        simpleDesc = createFile(descriptorsDir,
 +                                "deleteme.json",
 +                                "org/apache/knox/gateway/topology/file/simple-descriptor-five.json",
 +                                System.currentTimeMillis());
 +        dm.onFileChange(simpleDesc);
 +        provider.reloadTopologies();
 +        topologies = provider.getTopologies();
 +        assertFalse(topologies.isEmpty());
 +        topology = topologies.iterator().next();
 +        assertEquals("deleteme", topology.getName());
 +        File topologyFile = new File(topologyDir, topology.getName() + ".xml");
 +        assertTrue(topologyFile.exists());
 +        topologyFile.delete();
 +        provider.reloadTopologies();
 +        assertFalse("Simple descriptor should have been deleted because the associated topology was.",
 +                    simpleDesc.exists());
 +
 +      } finally {
 +        provCfgFile.delete();
 +      }
 +    } finally {
 +      FileUtils.deleteQuietly(dir);
 +    }
 +  }
 +
 +  /**
 +   * KNOX-1014
 +   *
 +   * Test the lifecycle relationship between provider configuration files, simple descriptors, and topology files.
 +   *
 +   * N.B. This test depends on the DummyServiceDiscovery extension being configured:
 +   *        org.apache.knox.gateway.topology.discovery.test.extension.DummyServiceDiscovery
 +   */
 +  @Test
 +  public void testTopologiesUpdateFromProviderConfigChange() throws Exception {
 +    File dir = createDir();
 +    File topologyDir = new File(dir, "topologies");
 +    topologyDir.mkdirs();
 +
 +    File descriptorsDir = new File(dir, "descriptors");
 +    descriptorsDir.mkdirs();
 +
 +    File sharedProvidersDir = new File(dir, "shared-providers");
 +    sharedProvidersDir.mkdirs();
 +
 +    try {
 +      TestTopologyListener topoListener = new TestTopologyListener();
 +      FileAlterationMonitor monitor = new FileAlterationMonitor(Long.MAX_VALUE);
 +
 +      TopologyService ts = new DefaultTopologyService();
 +      Map<String, String> c = new HashMap<>();
 +
 +      GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
 +      EasyMock.expect(config.getGatewayTopologyDir()).andReturn(topologyDir.getAbsolutePath()).anyTimes();
 +      EasyMock.expect(config.getGatewayConfDir()).andReturn(descriptorsDir.getParentFile().getAbsolutePath()).anyTimes();
 +      EasyMock.replay(config);
 +
 +      ts.init(config, c);
 +      ts.addTopologyChangeListener(topoListener);
 +      ts.reloadTopologies();
 +
 +      java.lang.reflect.Field dmField = ts.getClass().getDeclaredField("descriptorsMonitor");
 +      dmField.setAccessible(true);
 +      DefaultTopologyService.DescriptorsMonitor dm = (DefaultTopologyService.DescriptorsMonitor) dmField.get(ts);
 +
 +      // Write out the referenced provider configs first
 +      createFile(sharedProvidersDir,
 +                 "provider-config-one.xml",
 +                 "org/apache/knox/gateway/topology/file/provider-config-one.xml",
 +                 System.currentTimeMillis());
 +
 +      // Create the simple descriptor, which depends on provider-config-one.xml
 +      File simpleDesc = createFile(descriptorsDir,
 +                                   "six.json",
 +                                   "org/apache/knox/gateway/topology/file/simple-descriptor-six.json",
 +                                   System.currentTimeMillis());
 +
 +      // "Notice" the simple descriptor change, and generate a topology based on it
 +      dm.onFileChange(simpleDesc);
 +
 +      // Load the generated topology
 +      ts.reloadTopologies();
 +      Collection<Topology> topologies = ts.getTopologies();
 +      assertThat(topologies.size(), is(1));
 +      Iterator<Topology> iterator = topologies.iterator();
 +      Topology topology = iterator.next();
 +      assertFalse("The Shiro provider is disabled in provider-config-one.xml",
 +                  topology.getProvider("authentication", "ShiroProvider").isEnabled());
 +
 +      // Overwrite the referenced provider configuration with a different ShiroProvider config, and check that the
 +      // changes are propagated to the associated topology
 +      File providerConfig = createFile(sharedProvidersDir,
 +                                       "provider-config-one.xml",
 +                                       "org/apache/knox/gateway/topology/file/ambari-cluster-policy.xml",
 +                                       System.currentTimeMillis());
 +
 +      // "Notice" the simple descriptor change as a result of the referenced config change
 +      dm.onFileChange(simpleDesc);
 +
 +      // Load the generated topology
 +      ts.reloadTopologies();
 +      topologies = ts.getTopologies();
 +      assertFalse(topologies.isEmpty());
 +      topology = topologies.iterator().next();
 +      assertTrue("The Shiro provider is enabled in ambari-cluster-policy.xml",
 +              topology.getProvider("authentication", "ShiroProvider").isEnabled());
 +
 +      // Delete the provider configuration, and make sure that the associated topology file is unaffected.
 +      // The topology file should not be affected because the simple descriptor handling will fail to resolve the
 +      // referenced provider configuration.
 +      providerConfig.delete();     // Delete the file
 +      dm.onFileChange(simpleDesc); // The provider config deletion will trigger a descriptor change notification
 +      ts.reloadTopologies();
 +      topologies = ts.getTopologies();
 +      assertFalse(topologies.isEmpty());
 +      assertTrue("The Shiro provider is enabled in ambari-cluster-policy.xml",
 +              topology.getProvider("authentication", "ShiroProvider").isEnabled());
 +
 +    } finally {
 +      FileUtils.deleteQuietly(dir);
 +    }
 +  }
 +
 +  /**
 +   * KNOX-1039
 +   */
 +  @Test
 +  public void testConfigurationCRUDAPI() throws Exception {
 +    File dir = createDir();
 +    File topologyDir = new File(dir, "topologies");
 +    topologyDir.mkdirs();
 +
 +    File descriptorsDir = new File(dir, "descriptors");
 +    descriptorsDir.mkdirs();
 +
 +    File sharedProvidersDir = new File(dir, "shared-providers");
 +    sharedProvidersDir.mkdirs();
 +
 +    try {
 +      TestTopologyListener topoListener = new TestTopologyListener();
 +      FileAlterationMonitor monitor = new FileAlterationMonitor(Long.MAX_VALUE);
 +
 +      TopologyService ts = new DefaultTopologyService();
 +      Map<String, String> c = new HashMap<>();
 +
 +      GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
 +      EasyMock.expect(config.getGatewayTopologyDir()).andReturn(topologyDir.getAbsolutePath()).anyTimes();
 +      EasyMock.expect(config.getGatewayConfDir()).andReturn(descriptorsDir.getParentFile().getAbsolutePath()).anyTimes();
 +      EasyMock.replay(config);
 +
 +      ts.init(config, c);
 +      ts.addTopologyChangeListener(topoListener);
 +      ts.reloadTopologies();
 +
 +      java.lang.reflect.Field dmField = ts.getClass().getDeclaredField("descriptorsMonitor");
 +      dmField.setAccessible(true);
 +      DefaultTopologyService.DescriptorsMonitor dm = (DefaultTopologyService.DescriptorsMonitor) dmField.get(ts);
 +
 +      final String simpleDescName  = "six.json";
 +      final String provConfOne     = "provider-config-one.xml";
 +      final String provConfTwo     = "ambari-cluster-policy.xml";
 +
 +      // "Deploy" the referenced provider configs first
 +      boolean isDeployed =
 +        ts.deployProviderConfiguration(provConfOne,
 +                FileUtils.readFileToString(new File(ClassLoader.getSystemResource(
 +                    "org/apache/knox/gateway/topology/file/provider-config-one.xml").toURI())));
 +      assertTrue(isDeployed);
 +      File provConfOneFile = new File(sharedProvidersDir, provConfOne);
 +      assertTrue(provConfOneFile.exists());
 +
 +      isDeployed =
 +        ts.deployProviderConfiguration(provConfTwo,
 +                FileUtils.readFileToString(new File(ClassLoader.getSystemResource(
 +                    "org/apache/knox/gateway/topology/file/ambari-cluster-policy.xml").toURI())));
 +      assertTrue(isDeployed);
 +      File provConfTwoFile = new File(sharedProvidersDir, provConfTwo);
 +      assertTrue(provConfTwoFile.exists());
 +
 +      // Validate the provider configurations known by the topology service
 +      Collection<File> providerConfigurations = ts.getProviderConfigurations();
 +      assertNotNull(providerConfigurations);
 +      assertEquals(2, providerConfigurations.size());
 +      assertTrue(providerConfigurations.contains(provConfOneFile));
 +      assertTrue(providerConfigurations.contains(provConfTwoFile));
 +
 +      // "Deploy" the simple descriptor, which depends on provConfOne
 +      isDeployed =
 +        ts.deployDescriptor(simpleDescName,
 +            FileUtils.readFileToString(new File(ClassLoader.getSystemResource(
 +                "org/apache/knox/gateway/topology/file/simple-descriptor-six.json").toURI())));
 +      assertTrue(isDeployed);
 +      File simpleDesc = new File(descriptorsDir, simpleDescName);
 +      assertTrue(simpleDesc.exists());
 +
 +      // Validate the simple descriptors known by the topology service
 +      Collection<File> descriptors = ts.getDescriptors();
 +      assertNotNull(descriptors);
 +      assertEquals(1, descriptors.size());
 +      assertTrue(descriptors.contains(simpleDesc));
 +
 +      // "Notice" the simple descriptor, so the provider configuration dependency relationship is recorded
 +      dm.onFileChange(simpleDesc);
 +
 +      // Attempt to delete the referenced provConfOne
 +      assertFalse("Should not be able to delete a provider configuration that is referenced by one or more descriptors",
 +                  ts.deleteProviderConfiguration(FilenameUtils.getBaseName(provConfOne)));
 +
 +      // Overwrite the simple descriptor with content that changes the provider config reference to provConfTwo
 +      isDeployed =
 +        ts.deployDescriptor(simpleDescName,
 +              FileUtils.readFileToString(new File(ClassLoader.getSystemResource(
 +                  "org/apache/knox/gateway/topology/file/simple-descriptor-five.json").toURI())));
 +      assertTrue(isDeployed);
 +      assertTrue(simpleDesc.exists());
 +      ts.getProviderConfigurations();
 +
 +      // "Notice" the simple descriptor, so the provider configuration dependency relationship is updated
 +      dm.onFileChange(simpleDesc);
 +
 +      // Attempt to delete the referenced provConfOne
 +      assertTrue("Should be able to delete the provider configuration, now that it's not referenced by any descriptors",
 +                 ts.deleteProviderConfiguration(FilenameUtils.getBaseName(provConfOne)));
 +
 +      // Re-validate the provider configurations known by the topology service
 +      providerConfigurations = ts.getProviderConfigurations();
 +      assertNotNull(providerConfigurations);
 +      assertEquals(1, providerConfigurations.size());
 +      assertFalse(providerConfigurations.contains(provConfOneFile));
 +      assertTrue(providerConfigurations.contains(provConfTwoFile));
 +
 +      // Attempt to delete the referenced provConfTwo
 +      assertFalse("Should not be able to delete a provider configuration that is referenced by one or more descriptors",
 +                  ts.deleteProviderConfiguration(FilenameUtils.getBaseName(provConfTwo)));
 +
 +      // Delete the referencing simple descriptor
 +      assertTrue(ts.deleteDescriptor(FilenameUtils.getBaseName(simpleDescName)));
 +      assertFalse(simpleDesc.exists());
 +
 +      // Re-validate the simple descriptors known by the topology service
 +      descriptors = ts.getDescriptors();
 +      assertNotNull(descriptors);
 +      assertTrue(descriptors.isEmpty());
 +
 +      // "Notice" the simple descriptor, so the provider configuration dependency relationship is updated
 +      dm.onFileDelete(simpleDesc);
 +
 +      // Attempt to delete the referenced provConfTwo
 +      assertTrue("Should be able to delete the provider configuration, now that it's not referenced by any descriptors",
 +                 ts.deleteProviderConfiguration(FilenameUtils.getBaseName(provConfTwo)));
 +
 +      // Re-validate the provider configurations known by the topology service
 +      providerConfigurations = ts.getProviderConfigurations();
 +      assertNotNull(providerConfigurations);
 +      assertTrue(providerConfigurations.isEmpty());
 +
 +    } finally {
 +      FileUtils.deleteQuietly(dir);
 +    }
 +  }
 +
 +  private void kickMonitor(FileAlterationMonitor monitor) {
 +    for (FileAlterationObserver observer : monitor.getObservers()) {
 +      observer.checkAndNotify();
 +    }
 +  }
 +
 +
 +  @Test
 +  public void testProviderParamsOrderIsPreserved() {
 +
 +    Provider provider = new Provider();
 +    String names[] = {"ldapRealm=",
 +        "ldapContextFactory",
 +        "ldapRealm.contextFactory",
 +        "ldapGroupRealm",
 +        "ldapGroupRealm.contextFactory",
 +        "ldapGroupRealm.contextFactory.systemAuthenticationMechanism"
 +    };
 +
 +    Param param = null;
 +    for (String name : names) {
 +      param = new Param();
 +      param.setName(name);
 +      param.setValue(name);
 +      provider.addParam(param);
 +
 +    }
 +    Map<String, String> params = provider.getParams();
 +    Set<String> keySet = params.keySet();
 +    Iterator<String> iter = keySet.iterator();
 +    int i = 0;
 +    while (iter.hasNext()) {
 +      assertTrue(iter.next().equals(names[i++]));
 +    }
 +
 +  }
 +
 +  private class TestTopologyListener implements TopologyListener {
 +
 +    ArrayList<List<TopologyEvent>> events = new ArrayList<List<TopologyEvent>>();
 +
 +    @Override
 +    public void handleTopologyEvent(List<TopologyEvent> events) {
 +      this.events.add(events);
 +    }
 +
 +  }
 +
 +
 +  private class TestTopologyDeleteListener implements TopologyListener {
 +
 +    FileAlterationListener delegate;
 +
 +    TestTopologyDeleteListener(FileAlterationListener delegate) {
 +      this.delegate = delegate;
 +    }
 +
 +    @Override
 +    public void handleTopologyEvent(List<TopologyEvent> events) {
 +      for (TopologyEvent event : events) {
 +        if (event.getType().equals(TopologyEvent.Type.DELETED)) {
 +          delegate.onFileDelete(new File(event.getTopology().getUri()));
 +        }
 +      }
 +    }
 +
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorFactoryTest.java
----------------------------------------------------------------------
diff --cc gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorFactoryTest.java
index df31f3d,0000000..2622f13
mode 100644,000000..100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorFactoryTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorFactoryTest.java
@@@ -1,681 -1,0 +1,690 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one or more
 + * contributor license agreements. See the NOTICE file distributed with this
 + * work for additional information regarding copyright ownership. The ASF
 + * licenses this file to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance with the License.
 + * You may obtain a copy of the License at
 + *
 + * http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 + * License for the specific language governing permissions and limitations under
 + * the License.
 + */
 +package org.apache.knox.gateway.topology.simple;
 +
 +import java.io.File;
 +import java.io.FileWriter;
 +import java.io.Writer;
 +import java.util.*;
 +
 +import org.junit.Test;
 +import static org.junit.Assert.*;
 +
 +
 +public class SimpleDescriptorFactoryTest {
 +
 +    private enum FileType {
 +        JSON,
-         YAML
++        YAML,
++        YML
 +    }
 +
 +    @Test
 +    public void testParseJSONSimpleDescriptor() throws Exception {
 +        testParseSimpleDescriptor(FileType.JSON);
 +    }
 +
 +    @Test
 +    public void testParseYAMLSimpleDescriptor() throws Exception {
++        testParseSimpleDescriptor(FileType.YML);
 +        testParseSimpleDescriptor(FileType.YAML);
 +    }
 +
 +    @Test
 +    public void testParseJSONSimpleDescriptorWithServiceParams() throws Exception {
 +        testParseSimpleDescriptorWithServiceParams(FileType.JSON);
 +    }
 +
 +    @Test
 +    public void testParseYAMLSimpleDescriptorWithServiceParams() throws Exception {
++        testParseSimpleDescriptorWithServiceParams(FileType.YML);
 +        testParseSimpleDescriptorWithServiceParams(FileType.YAML);
 +    }
 +
 +    @Test
 +    public void testParseJSONSimpleDescriptorWithApplications() throws Exception {
 +        testParseSimpleDescriptorWithApplications(FileType.JSON);
 +    }
 +
 +    @Test
 +    public void testParseYAMLSimpleDescriptorApplications() throws Exception {
++        testParseSimpleDescriptorWithApplications(FileType.YML);
 +        testParseSimpleDescriptorWithApplications(FileType.YAML);
 +    }
 +
 +
 +    @Test
 +    public void testParseJSONSimpleDescriptorWithServicesAndApplications() throws Exception {
 +        testParseSimpleDescriptorWithServicesAndApplications(FileType.JSON);
 +    }
 +
 +    @Test
 +    public void testParseYAMLSimpleDescriptorWithServicesAndApplications() throws Exception {
++        testParseSimpleDescriptorWithServicesAndApplications(FileType.YML);
 +        testParseSimpleDescriptorWithServicesAndApplications(FileType.YAML);
 +    }
 +
 +
 +    private void testParseSimpleDescriptor(FileType type) throws Exception {
 +        final String   discoveryType    = "AMBARI";
 +        final String   discoveryAddress = "http://c6401.ambari.apache.org:8080";
 +        final String   discoveryUser    = "joeblow";
 +        final String   providerConfig   = "ambari-cluster-policy.xml";
 +        final String   clusterName      = "myCluster";
 +
 +        final Map<String, List<String>> services = new HashMap<>();
 +        services.put("NODEMANAGER", null);
 +        services.put("JOBTRACKER", null);
 +        services.put("RESOURCEMANAGER", null);
 +        services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org"));
 +        services.put("AMBARIUI", Collections.singletonList("http://c6401.ambari.apache.org:8080"));
 +
 +        String fileName = "test-topology." + getFileExtensionForType(type);
 +        File testFile = null;
 +        try {
 +            testFile = writeDescriptorFile(type,
 +                                           fileName,
 +                                           discoveryType,
 +                                           discoveryAddress,
 +                                           discoveryUser,
 +                                           providerConfig,
 +                                           clusterName,
 +                                           services);
 +            SimpleDescriptor sd = SimpleDescriptorFactory.parse(testFile.getAbsolutePath());
 +            validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, services);
 +        } catch (Exception e) {
 +            e.printStackTrace();
 +        } finally {
 +            if (testFile != null) {
 +                try {
 +                    testFile.delete();
 +                } catch (Exception e) {
 +                    // Ignore
 +                }
 +            }
 +        }
 +    }
 +
 +    private void testParseSimpleDescriptorWithServiceParams(FileType type) throws Exception {
 +
 +        final String   discoveryType    = "AMBARI";
 +        final String   discoveryAddress = "http://c6401.ambari.apache.org:8080";
 +        final String   discoveryUser    = "admin";
 +        final String   providerConfig   = "ambari-cluster-policy.xml";
 +        final String   clusterName      = "myCluster";
 +
 +        final Map<String, List<String>> services = new HashMap<>();
 +        services.put("NODEMANAGER", null);
 +        services.put("JOBTRACKER", null);
 +        services.put("RESOURCEMANAGER", null);
 +        services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org"));
 +        services.put("AMBARIUI", Collections.singletonList("http://c6401.ambari.apache.org:8080"));
 +        services.put("KNOXSSO", null);
 +        services.put("KNOXTOKEN", null);
 +        services.put("CustomRole", Collections.singletonList("http://c6402.ambari.apache.org:1234"));
 +
 +        final Map<String, Map<String, String>> serviceParams = new HashMap<>();
 +        Map<String, String> knoxSSOParams = new HashMap<>();
 +        knoxSSOParams.put("knoxsso.cookie.secure.only", "true");
 +        knoxSSOParams.put("knoxsso.token.ttl", "100000");
 +        serviceParams.put("KNOXSSO", knoxSSOParams);
 +
 +        Map<String, String> knoxTokenParams = new HashMap<>();
 +        knoxTokenParams.put("knox.token.ttl", "36000000");
 +        knoxTokenParams.put("knox.token.audiences", "tokenbased");
 +        knoxTokenParams.put("knox.token.target.url", "https://localhost:8443/gateway/tokenbased");
 +        serviceParams.put("KNOXTOKEN", knoxTokenParams);
 +
 +        Map<String, String> customRoleParams = new HashMap<>();
 +        customRoleParams.put("custom.param.1", "value1");
 +        customRoleParams.put("custom.param.2", "value2");
 +        serviceParams.put("CustomRole", customRoleParams);
 +
 +        String fileName = "test-topology." + getFileExtensionForType(type);
 +        File testFile = null;
 +        try {
 +            testFile = writeDescriptorFile(type,
 +                                           fileName,
 +                                           discoveryType,
 +                                           discoveryAddress,
 +                                           discoveryUser,
 +                                           providerConfig,
 +                                           clusterName,
 +                                           services,
 +                                           serviceParams);
 +            SimpleDescriptor sd = SimpleDescriptorFactory.parse(testFile.getAbsolutePath());
 +            validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, services, serviceParams);
 +        } finally {
 +            if (testFile != null) {
 +                try {
 +                    testFile.delete();
 +                } catch (Exception e) {
 +                    // Ignore
 +                }
 +            }
 +        }
 +    }
 +
 +    private void testParseSimpleDescriptorWithApplications(FileType type) throws Exception {
 +
 +        final String   discoveryType    = "AMBARI";
 +        final String   discoveryAddress = "http://c6401.ambari.apache.org:8080";
 +        final String   discoveryUser    = "admin";
 +        final String   providerConfig   = "ambari-cluster-policy.xml";
 +        final String   clusterName      = "myCluster";
 +
 +        final Map<String, List<String>> apps = new HashMap<>();
 +        apps.put("app-one", null);
 +        apps.put("appTwo", null);
 +        apps.put("thirdApps", null);
 +        apps.put("appfour", Arrays.asList("http://host1:1234", "http://host2:5678", "http://host1:1357"));
 +        apps.put("AppFive", Collections.singletonList("http://host5:8080"));
 +
 +        final Map<String, Map<String, String>> appParams = new HashMap<>();
 +        Map<String, String> oneParams = new HashMap<>();
 +        oneParams.put("appone.cookie.secure.only", "true");
 +        oneParams.put("appone.token.ttl", "100000");
 +        appParams.put("app-one", oneParams);
 +        Map<String, String> fiveParams = new HashMap<>();
 +        fiveParams.put("myproperty", "true");
 +        fiveParams.put("anotherparam", "100000");
 +        appParams.put("AppFive", fiveParams);
 +
 +        String fileName = "test-topology." + getFileExtensionForType(type);
 +        File testFile = null;
 +        try {
 +            testFile = writeDescriptorFile(type,
 +                                           fileName,
 +                                           discoveryType,
 +                                           discoveryAddress,
 +                                           discoveryUser,
 +                                           providerConfig,
 +                                           clusterName,
 +                                           null,
 +                                           null,
 +                                           apps,
 +                                           appParams);
 +            SimpleDescriptor sd = SimpleDescriptorFactory.parse(testFile.getAbsolutePath());
 +            validateSimpleDescriptor(sd,
 +                                     discoveryType,
 +                                     discoveryAddress,
 +                                     providerConfig,
 +                                     clusterName,
 +                                     null,
 +                                     null,
 +                                     apps,
 +                                     appParams);
 +        } finally {
 +            if (testFile != null) {
 +                try {
 +                    testFile.delete();
 +                } catch (Exception e) {
 +                    // Ignore
 +                }
 +            }
 +        }
 +    }
 +
 +    private void testParseSimpleDescriptorWithServicesAndApplications(FileType type) throws Exception {
 +
 +        final String   discoveryType    = "AMBARI";
 +        final String   discoveryAddress = "http://c6401.ambari.apache.org:8080";
 +        final String   discoveryUser    = "admin";
 +        final String   providerConfig   = "ambari-cluster-policy.xml";
 +        final String   clusterName      = "myCluster";
 +
 +        final Map<String, List<String>> services = new HashMap<>();
 +        services.put("NODEMANAGER", null);
 +        services.put("JOBTRACKER", null);
 +        services.put("RESOURCEMANAGER", null);
 +        services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org"));
 +        services.put("AMBARIUI", Collections.singletonList("http://c6401.ambari.apache.org:8080"));
 +        services.put("KNOXSSO", null);
 +        services.put("KNOXTOKEN", null);
 +        services.put("CustomRole", Collections.singletonList("http://c6402.ambari.apache.org:1234"));
 +
 +        final Map<String, Map<String, String>> serviceParams = new HashMap<>();
 +        Map<String, String> knoxSSOParams = new HashMap<>();
 +        knoxSSOParams.put("knoxsso.cookie.secure.only", "true");
 +        knoxSSOParams.put("knoxsso.token.ttl", "100000");
 +        serviceParams.put("KNOXSSO", knoxSSOParams);
 +
 +        Map<String, String> knoxTokenParams = new HashMap<>();
 +        knoxTokenParams.put("knox.token.ttl", "36000000");
 +        knoxTokenParams.put("knox.token.audiences", "tokenbased");
 +        knoxTokenParams.put("knox.token.target.url", "https://localhost:8443/gateway/tokenbased");
 +        serviceParams.put("KNOXTOKEN", knoxTokenParams);
 +
 +        Map<String, String> customRoleParams = new HashMap<>();
 +        customRoleParams.put("custom.param.1", "value1");
 +        customRoleParams.put("custom.param.2", "value2");
 +        serviceParams.put("CustomRole", customRoleParams);
 +
 +        final Map<String, List<String>> apps = new HashMap<>();
 +        apps.put("app-one", null);
 +        apps.put("appTwo", null);
 +        apps.put("thirdApps", null);
 +        apps.put("appfour", Arrays.asList("http://host1:1234", "http://host2:5678", "http://host1:1357"));
 +        apps.put("AppFive", Collections.singletonList("http://host5:8080"));
 +
 +        final Map<String, Map<String, String>> appParams = new HashMap<>();
 +        Map<String, String> oneParams = new HashMap<>();
 +        oneParams.put("appone.cookie.secure.only", "true");
 +        oneParams.put("appone.token.ttl", "100000");
 +        appParams.put("app-one", oneParams);
 +        Map<String, String> fiveParams = new HashMap<>();
 +        fiveParams.put("myproperty", "true");
 +        fiveParams.put("anotherparam", "100000");
 +        appParams.put("AppFive", fiveParams);
 +
 +        String fileName = "test-topology." + getFileExtensionForType(type);
 +        File testFile = null;
 +        try {
 +            testFile = writeDescriptorFile(type,
 +                                           fileName,
 +                                           discoveryType,
 +                                           discoveryAddress,
 +                                           discoveryUser,
 +                                           providerConfig,
 +                                           clusterName,
 +                                           services,
 +                                           serviceParams,
 +                                           apps,
 +                                           appParams);
 +            SimpleDescriptor sd = SimpleDescriptorFactory.parse(testFile.getAbsolutePath());
 +            validateSimpleDescriptor(sd,
 +                                     discoveryType,
 +                                     discoveryAddress,
 +                                     providerConfig,
 +                                     clusterName,
 +                                     services,
 +                                     serviceParams,
 +                                     apps,
 +                                     appParams);
 +        } finally {
 +            if (testFile != null) {
 +                try {
 +                    testFile.delete();
 +                } catch (Exception e) {
 +                    // Ignore
 +                }
 +            }
 +        }
 +    }
 +
 +    private String getFileExtensionForType(FileType type) {
 +        String extension = null;
 +        switch (type) {
 +            case JSON:
 +                extension = "json";
 +                break;
-             case YAML:
++            case YML:
 +                extension = "yml";
 +                break;
++            case YAML:
++                extension = "yaml";
++                break;
 +        }
 +        return extension;
 +    }
 +
 +    private File writeDescriptorFile(FileType type,
 +                                     String                           path,
 +                                     String                           discoveryType,
 +                                     String                           discoveryAddress,
 +                                     String                           discoveryUser,
 +                                     String                           providerConfig,
 +                                     String                           clusterName,
 +                                     Map<String, List<String>>        services) throws Exception {
 +        return writeDescriptorFile(type,
 +                                   path,
 +                                   discoveryType,
 +                                   discoveryAddress,
 +                                   discoveryUser,
 +                                   providerConfig,
 +                                   clusterName,
 +                                   services,
 +                                   null);
 +    }
 +
 +    private File writeDescriptorFile(FileType type,
 +                                     String                           path,
 +                                     String                           discoveryType,
 +                                     String                           discoveryAddress,
 +                                     String                           discoveryUser,
 +                                     String                           providerConfig,
 +                                     String                           clusterName,
 +                                     Map<String, List<String>>        services,
 +                                     Map<String, Map<String, String>> serviceParams) throws Exception {
 +        return writeDescriptorFile(type,
 +                                   path,
 +                                   discoveryType,
 +                                   discoveryAddress,
 +                                   discoveryUser,
 +                                   providerConfig,
 +                                   clusterName,
 +                                   services,
 +                                   serviceParams,
 +                                   null,
 +                                   null);
 +    }
 +
 +
 +    private File writeDescriptorFile(FileType type,
 +                                     String                           path,
 +                                     String                           discoveryType,
 +                                     String                           discoveryAddress,
 +                                     String                           discoveryUser,
 +                                     String                           providerConfig,
 +                                     String                           clusterName,
 +                                     Map<String, List<String>>        services,
 +                                     Map<String, Map<String, String>> serviceParams,
 +                                     Map<String, List<String>>        apps,
 +                                     Map<String, Map<String, String>> appParams) throws Exception {
 +        File result = null;
 +        switch (type) {
 +            case JSON:
 +                result = writeJSON(path,
 +                                   discoveryType,
 +                                   discoveryAddress,
 +                                   discoveryUser,
 +                                   providerConfig,
 +                                   clusterName,
 +                                   services,
 +                                   serviceParams,
 +                                   apps,
 +                                   appParams);
 +                break;
 +            case YAML:
++            case YML:
 +                result = writeYAML(path,
 +                                   discoveryType,
 +                                   discoveryAddress,
 +                                   discoveryUser,
 +                                   providerConfig,
 +                                   clusterName,
 +                                   services,
 +                                   serviceParams,
 +                                   apps,
 +                                   appParams);
 +                break;
 +        }
 +        return result;
 +    }
 +
 +
 +    private File writeJSON(String path,
 +                           String discoveryType,
 +                           String discoveryAddress,
 +                           String discoveryUser,
 +                           String providerConfig,
 +                           String clusterName,
 +                           Map<String, List<String>> services,
 +                           Map<String, Map<String, String>> serviceParams,
 +                           Map<String, List<String>> apps,
 +                           Map<String, Map<String, String>> appParams) throws Exception {
 +        File f = new File(path);
 +
 +        Writer fw = new FileWriter(f);
 +        fw.write("{" + "\n");
 +        fw.write("\"discovery-type\":\"" + discoveryType + "\",\n");
 +        fw.write("\"discovery-address\":\"" + discoveryAddress + "\",\n");
 +        fw.write("\"discovery-user\":\"" + discoveryUser + "\",\n");
 +        fw.write("\"provider-config-ref\":\"" + providerConfig + "\",\n");
 +        fw.write("\"cluster\":\"" + clusterName + "\"");
 +
 +        if (services != null && !services.isEmpty()) {
 +            fw.write(",\n\"services\":[\n");
 +            writeServiceOrApplicationJSON(fw, services, serviceParams);
 +            fw.write("]\n");
 +        }
 +
 +        if (apps != null && !apps.isEmpty()) {
 +            fw.write(",\n\"applications\":[\n");
 +            writeServiceOrApplicationJSON(fw, apps, appParams);
 +            fw.write("]\n");
 +        }
 +
 +        fw.write("}\n");
 +        fw.flush();
 +        fw.close();
 +
 +        return f;
 +    }
 +
 +    private void writeServiceOrApplicationJSON(Writer fw,
 +                                               Map<String, List<String>> elementURLs,
 +                                               Map<String, Map<String, String>> elementParams) throws Exception {
 +        if (elementURLs != null) {
 +            int i = 0;
 +            for (String name : elementURLs.keySet()) {
 +                fw.write("{\"name\":\"" + name + "\"");
 +
 +                // Service params
 +                if (elementParams != null && !elementParams.isEmpty()) {
 +                    Map<String, String> params = elementParams.get(name);
 +                    if (params != null && !params.isEmpty()) {
 +                        fw.write(",\n\"params\":{\n");
 +                        Iterator<String> paramNames = params.keySet().iterator();
 +                        while (paramNames.hasNext()) {
 +                            String paramName = paramNames.next();
 +                            String paramValue = params.get(paramName);
 +                            fw.write("\"" + paramName + "\":\"" + paramValue + "\"");
 +                            fw.write(paramNames.hasNext() ? ",\n" : "");
 +                        }
 +                        fw.write("\n}");
 +                    }
 +                }
 +
 +                // Service URLs
 +                List<String> urls = elementURLs.get(name);
 +                if (urls != null) {
 +                    fw.write(",\n\"urls\":[");
 +                    Iterator<String> urlIter = urls.iterator();
 +                    while (urlIter.hasNext()) {
 +                        fw.write("\"" + urlIter.next() + "\"");
 +                        if (urlIter.hasNext()) {
 +                            fw.write(", ");
 +                        }
 +                    }
 +                    fw.write("]\n");
 +                }
 +
 +                fw.write("}");
 +                if (i++ < elementURLs.size() - 1) {
 +                    fw.write(",");
 +                }
 +                fw.write("\n");
 +            }
 +        }
 +    }
 +
 +    private File writeYAML(String                           path,
 +                           String                           discoveryType,
 +                           String                           discoveryAddress,
 +                           String                           discoveryUser,
 +                           String                           providerConfig,
 +                           String                           clusterName,
 +                           Map<String, List<String>>        services,
 +                           Map<String, Map<String, String>> serviceParams,
 +                           Map<String, List<String>>        apps,
 +                           Map<String, Map<String, String>> appParams) throws Exception {
 +
 +        File f = new File(path);
 +
 +        Writer fw = new FileWriter(f);
 +        fw.write("---" + "\n");
 +        fw.write("discovery-type: " + discoveryType + "\n");
 +        fw.write("discovery-address: " + discoveryAddress + "\n");
 +        fw.write("discovery-user: " + discoveryUser + "\n");
 +        fw.write("provider-config-ref: " + providerConfig + "\n");
 +        fw.write("cluster: " + clusterName+ "\n");
 +
 +        if (services != null && !services.isEmpty()) {
 +            fw.write("services:\n");
 +            writeServiceOrApplicationYAML(fw, services, serviceParams);
 +        }
 +
 +        if (apps != null && !apps.isEmpty()) {
 +            fw.write("applications:\n");
 +            writeServiceOrApplicationYAML(fw, apps, appParams);
 +        }
 +
 +        fw.flush();
 +        fw.close();
 +
 +        return f;
 +    }
 +
 +    private void writeServiceOrApplicationYAML(Writer                           fw,
 +                                               Map<String, List<String>>        elementURLs,
 +                                               Map<String, Map<String, String>> elementParams) throws Exception {
 +        for (String name : elementURLs.keySet()) {
 +            fw.write("    - name: " + name + "\n");
 +
 +            // Service params
 +            if (elementParams != null && !elementParams.isEmpty()) {
 +                if (elementParams.containsKey(name)) {
 +                    Map<String, String> params = elementParams.get(name);
 +                    fw.write("      params:\n");
 +                    for (String paramName : params.keySet()) {
 +                        fw.write("            " + paramName + ": " + params.get(paramName) + "\n");
 +                    }
 +                }
 +            }
 +
 +            // Service URLs
 +            List<String> urls = elementURLs.get(name);
 +            if (urls != null) {
 +                fw.write("      urls:\n");
 +                for (String url : urls) {
 +                    fw.write("          - " + url + "\n");
 +                }
 +            }
 +        }
 +    }
 +
 +
 +    private void validateSimpleDescriptor(SimpleDescriptor          sd,
 +                                          String                    discoveryType,
 +                                          String                    discoveryAddress,
 +                                          String                    providerConfig,
 +                                          String                    clusterName,
 +                                          Map<String, List<String>> expectedServices) {
 +        validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, expectedServices, null);
 +    }
 +
 +
 +    private void validateSimpleDescriptor(SimpleDescriptor                 sd,
 +                                          String                           discoveryType,
 +                                          String                           discoveryAddress,
 +                                          String                           providerConfig,
 +                                          String                           clusterName,
 +                                          Map<String, List<String>>        expectedServices,
 +                                          Map<String, Map<String, String>> expectedServiceParameters) {
 +        validateSimpleDescriptor(sd,
 +                                 discoveryType,
 +                                 discoveryAddress,
 +                                 providerConfig,
 +                                 clusterName,
 +                                 expectedServices,
 +                                 expectedServiceParameters,
 +                                 null,
 +                                 null);
 +    }
 +
 +    private void validateSimpleDescriptor(SimpleDescriptor                 sd,
 +                                          String                           discoveryType,
 +                                          String                           discoveryAddress,
 +                                          String                           providerConfig,
 +                                          String                           clusterName,
 +                                          Map<String, List<String>>        expectedServices,
 +                                          Map<String, Map<String, String>> expectedServiceParameters,
 +                                          Map<String, List<String>>        expectedApps,
 +                                          Map<String, Map<String, String>> expectedAppParameters) {
 +        assertNotNull(sd);
 +        assertEquals(discoveryType, sd.getDiscoveryType());
 +        assertEquals(discoveryAddress, sd.getDiscoveryAddress());
 +        assertEquals(providerConfig, sd.getProviderConfig());
 +        assertEquals(clusterName, sd.getClusterName());
 +
 +        List<SimpleDescriptor.Service> actualServices = sd.getServices();
 +
 +        if (expectedServices == null) {
 +            assertTrue(actualServices.isEmpty());
 +        } else {
 +            assertEquals(expectedServices.size(), actualServices.size());
 +
 +            for (SimpleDescriptor.Service actualService : actualServices) {
 +                assertTrue(expectedServices.containsKey(actualService.getName()));
 +                assertEquals(expectedServices.get(actualService.getName()), actualService.getURLs());
 +
 +                // Validate service parameters
 +                if (expectedServiceParameters != null) {
 +                    if (expectedServiceParameters.containsKey(actualService.getName())) {
 +                        Map<String, String> expectedParams = expectedServiceParameters.get(actualService.getName());
 +
 +                        Map<String, String> actualServiceParams = actualService.getParams();
 +                        assertNotNull(actualServiceParams);
 +
 +                        // Validate the size of the service parameter set
 +                        assertEquals(expectedParams.size(), actualServiceParams.size());
 +
 +                        // Validate the parameter contents
 +                        for (String paramName : actualServiceParams.keySet()) {
 +                            assertTrue(expectedParams.containsKey(paramName));
 +                            assertEquals(expectedParams.get(paramName), actualServiceParams.get(paramName));
 +                        }
 +                    }
 +                }
 +            }
 +        }
 +
 +        List<SimpleDescriptor.Application> actualApps = sd.getApplications();
 +
 +        if (expectedApps == null) {
 +            assertTrue(actualApps.isEmpty());
 +        } else {
 +            assertEquals(expectedApps.size(), actualApps.size());
 +
 +            for (SimpleDescriptor.Application actualApp : actualApps) {
 +                assertTrue(expectedApps.containsKey(actualApp.getName()));
 +                assertEquals(expectedApps.get(actualApp.getName()), actualApp.getURLs());
 +
 +                // Validate service parameters
 +                if (expectedServiceParameters != null) {
 +                    if (expectedAppParameters.containsKey(actualApp.getName())) {
 +                        Map<String, String> expectedParams = expectedAppParameters.get(actualApp.getName());
 +
 +                        Map<String, String> actualAppParams = actualApp.getParams();
 +                        assertNotNull(actualAppParams);
 +
 +                        // Validate the size of the service parameter set
 +                        assertEquals(expectedParams.size(), actualAppParams.size());
 +
 +                        // Validate the parameter contents
 +                        for (String paramName : actualAppParams.keySet()) {
 +                            assertTrue(expectedParams.containsKey(paramName));
 +                            assertEquals(expectedParams.get(paramName), actualAppParams.get(paramName));
 +                        }
 +                    }
 +                }
 +            }
 +        }
 +    }
 +
 +}


[28/49] knox git commit: KNOX-1013 - Monitor Ambari for Cluster Topology changes (Phil Zampino via lmccay)

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
index f6bb9b0..95ae1f2 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
@@ -170,6 +170,11 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
   public static final String MIME_TYPES_TO_COMPRESS = GATEWAY_CONFIG_FILE_PREFIX
       + ".gzip.compress.mime.types";
 
+  public static final String CLUSTER_CONFIG_MONITOR_PREFIX = GATEWAY_CONFIG_FILE_PREFIX + ".cluster.config.monitor.";
+  public static final String CLUSTER_CONFIG_MONITOR_INTERVAL_SUFFIX = ".interval";
+  public static final String CLUSTER_CONFIG_MONITOR_ENABLED_SUFFIX = ".enabled";
+
+
   // These config property names are not inline with the convention of using the
   // GATEWAY_CONFIG_FILE_PREFIX as is done by those above. These are left for
   // backward compatibility. 
@@ -942,6 +947,16 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
   }
 
   @Override
+  public int getClusterMonitorPollingInterval(String type) {
+    return getInt(CLUSTER_CONFIG_MONITOR_PREFIX + type.toLowerCase() + CLUSTER_CONFIG_MONITOR_INTERVAL_SUFFIX, -1);
+  }
+  
+  @Override
+  public boolean isClusterMonitorEnabled(String type) {
+    return getBoolean(CLUSTER_CONFIG_MONITOR_PREFIX + type.toLowerCase() + CLUSTER_CONFIG_MONITOR_ENABLED_SUFFIX, true);
+  }
+
+  @Override
   public List<String> getRemoteRegistryConfigurationNames() {
     List<String> result = new ArrayList<>();
 

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java
index 9dca344..626cec0 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegist
 import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
 import org.apache.hadoop.gateway.services.registry.impl.DefaultServiceDefinitionRegistry;
 import org.apache.hadoop.gateway.services.metrics.impl.DefaultMetricsService;
+import org.apache.hadoop.gateway.services.topology.impl.DefaultClusterConfigurationMonitorService;
 import org.apache.hadoop.gateway.services.topology.impl.DefaultTopologyService;
 import org.apache.hadoop.gateway.services.hostmap.impl.DefaultHostMapperService;
 import org.apache.hadoop.gateway.services.registry.impl.DefaultServiceRegistryService;
@@ -112,6 +113,11 @@ public class DefaultGatewayServices implements GatewayServices {
     registryClientService.init(config, options);
     services.put(REMOTE_REGISTRY_CLIENT_SERVICE, registryClientService);
 
+    DefaultClusterConfigurationMonitorService ccs = new DefaultClusterConfigurationMonitorService();
+    ccs.setAliasService(alias);
+    ccs.init(config, options);
+    services.put(CLUSTER_CONFIGURATION_MONITOR_SERVICE, ccs);
+
     DefaultTopologyService tops = new DefaultTopologyService();
     tops.setAliasService(alias);
     tops.init(  config, options  );
@@ -144,6 +150,8 @@ public class DefaultGatewayServices implements GatewayServices {
                             (RemoteConfigurationRegistryClientService)services.get(REMOTE_REGISTRY_CLIENT_SERVICE);
     clientService.start();
 
+    (services.get(CLUSTER_CONFIGURATION_MONITOR_SERVICE)).start();
+
     DefaultTopologyService tops = (DefaultTopologyService)services.get(TOPOLOGY_SERVICE);
     tops.start();
 
@@ -156,6 +164,8 @@ public class DefaultGatewayServices implements GatewayServices {
 
     ks.stop();
 
+    (services.get(CLUSTER_CONFIGURATION_MONITOR_SERVICE)).stop();
+
     DefaultAliasService alias = (DefaultAliasService) services.get(ALIAS_SERVICE);
     alias.stop();
 

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultClusterConfigurationMonitorService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultClusterConfigurationMonitorService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultClusterConfigurationMonitorService.java
new file mode 100644
index 0000000..342ce11
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultClusterConfigurationMonitorService.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.services.topology.impl;
+
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.services.ServiceLifecycleException;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.topology.ClusterConfigurationMonitorService;
+import org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitor;
+import org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitorProvider;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.ServiceLoader;
+
+
+public class DefaultClusterConfigurationMonitorService implements ClusterConfigurationMonitorService {
+
+    private AliasService aliasService = null;
+
+    private Map<String, ClusterConfigurationMonitor> monitors = new HashMap<>();
+
+    @Override
+    public void init(GatewayConfig config, Map<String, String> options) throws ServiceLifecycleException {
+        ServiceLoader<ClusterConfigurationMonitorProvider> providers =
+                                                        ServiceLoader.load(ClusterConfigurationMonitorProvider.class);
+        for (ClusterConfigurationMonitorProvider provider : providers) {
+            // Check the gateway configuration to determine if this type of monitor is enabled
+            if (config.isClusterMonitorEnabled(provider.getType())) {
+                ClusterConfigurationMonitor monitor = provider.newInstance(config, aliasService);
+                if (monitor != null) {
+                    monitors.put(provider.getType(), monitor);
+                }
+            }
+        }
+    }
+
+    @Override
+    public void start() {
+        for (ClusterConfigurationMonitor monitor : monitors.values()) {
+            monitor.start();
+        }
+    }
+
+    @Override
+    public void stop() {
+        for (ClusterConfigurationMonitor monitor : monitors.values()) {
+            monitor.stop();
+        }
+    }
+
+    @Override
+    public ClusterConfigurationMonitor getMonitor(String type) {
+        return monitors.get(type);
+    }
+
+    @Override
+    public void addListener(ClusterConfigurationMonitor.ConfigurationChangeListener listener) {
+        for (ClusterConfigurationMonitor monitor : monitors.values()) {
+            monitor.addListener(listener);
+        }
+    }
+
+    public void setAliasService(AliasService aliasService) {
+        this.aliasService = aliasService;
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
index 5fc3620..aded6cd 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
@@ -28,6 +28,7 @@ import org.apache.commons.io.monitor.FileAlterationListenerAdaptor;
 import org.apache.commons.io.monitor.FileAlterationMonitor;
 import org.apache.commons.io.monitor.FileAlterationObserver;
 import org.apache.hadoop.gateway.GatewayMessages;
+import org.apache.hadoop.gateway.GatewayServer;
 import org.apache.hadoop.gateway.audit.api.Action;
 import org.apache.hadoop.gateway.audit.api.ActionOutcome;
 import org.apache.hadoop.gateway.audit.api.AuditServiceFactory;
@@ -37,15 +38,18 @@ import org.apache.hadoop.gateway.audit.log4j.audit.AuditConstants;
 import org.apache.hadoop.gateway.config.GatewayConfig;
 import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
 import org.apache.hadoop.gateway.service.definition.ServiceDefinition;
+import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.ServiceLifecycleException;
 import org.apache.hadoop.gateway.services.security.AliasService;
 import org.apache.hadoop.gateway.services.topology.TopologyService;
+import org.apache.hadoop.gateway.topology.ClusterConfigurationMonitorService;
 import org.apache.hadoop.gateway.topology.Topology;
 import org.apache.hadoop.gateway.topology.TopologyEvent;
 import org.apache.hadoop.gateway.topology.TopologyListener;
 import org.apache.hadoop.gateway.topology.TopologyMonitor;
 import org.apache.hadoop.gateway.topology.TopologyProvider;
 import org.apache.hadoop.gateway.topology.builder.TopologyBuilder;
+import org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitor;
 import org.apache.hadoop.gateway.topology.monitor.RemoteConfigurationMonitor;
 import org.apache.hadoop.gateway.topology.monitor.RemoteConfigurationMonitorFactory;
 import org.apache.hadoop.gateway.topology.simple.SimpleDescriptorHandler;
@@ -554,7 +558,10 @@ public class DefaultTopologyService
 
   @Override
   public void start() {
-
+    // Register a cluster configuration monitor listener for change notifications
+    ClusterConfigurationMonitorService ccms =
+                  GatewayServer.getGatewayServices().getService(GatewayServices.CLUSTER_CONFIGURATION_MONITOR_SERVICE);
+    ccms.addListener(new TopologyDiscoveryTrigger(this));
   }
 
   @Override
@@ -589,11 +596,17 @@ public class DefaultTopologyService
       // This happens prior to the start-up loading of the topologies.
       String[] descriptorFilenames =  descriptorsDirectory.list();
       if (descriptorFilenames != null) {
-          for (String descriptorFilename : descriptorFilenames) {
-              if (DescriptorsMonitor.isDescriptorFile(descriptorFilename)) {
-                  descriptorsMonitor.onFileChange(new File(descriptorsDirectory, descriptorFilename));
-              }
+        for (String descriptorFilename : descriptorFilenames) {
+          if (DescriptorsMonitor.isDescriptorFile(descriptorFilename)) {
+            // If there isn't a corresponding topology file, or if the descriptor has been modified since the
+            // corresponding topology file was generated, then trigger generation of one
+            File matchingTopologyFile = getExistingFile(topologiesDirectory, FilenameUtils.getBaseName(descriptorFilename));
+            if (matchingTopologyFile == null ||
+                    matchingTopologyFile.lastModified() < (new File(descriptorsDirectory, descriptorFilename)).lastModified()) {
+              descriptorsMonitor.onFileChange(new File(descriptorsDirectory, descriptorFilename));
+            }
           }
+        }
       }
 
       // Initialize the remote configuration monitor, if it has been configured
@@ -604,7 +617,6 @@ public class DefaultTopologyService
     }
   }
 
-
   /**
    * Utility method for listing the files in the specified directory.
    * This method is "nicer" than the File#listFiles() because it will not return null.
@@ -847,4 +859,37 @@ public class DefaultTopologyService
     }
   }
 
+  /**
+   * Listener for Ambari config change events, which will trigger re-generation (including re-discovery) of the
+   * affected topologies.
+   */
+  private static class TopologyDiscoveryTrigger implements ClusterConfigurationMonitor.ConfigurationChangeListener {
+
+    private TopologyService topologyService = null;
+
+    TopologyDiscoveryTrigger(TopologyService topologyService) {
+      this.topologyService = topologyService;
+    }
+
+    @Override
+    public void onConfigurationChange(String source, String clusterName) {
+      log.noticedClusterConfigurationChange(source, clusterName);
+      try {
+        // Identify any descriptors associated with the cluster configuration change
+        for (File descriptor : topologyService.getDescriptors()) {
+          String descriptorContent = FileUtils.readFileToString(descriptor);
+          if (descriptorContent.contains(source)) {
+            if (descriptorContent.contains(clusterName)) {
+              log.triggeringTopologyRegeneration(source, clusterName, descriptor.getAbsolutePath());
+              // 'Touch' the descriptor to trigger re-generation of the associated topology
+              descriptor.setLastModified(System.currentTimeMillis());
+            }
+          }
+        }
+      } catch (Exception e) {
+        log.errorRespondingToConfigChange(source, clusterName, e);
+      }
+    }
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
index c44710a..6b9df0d 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
@@ -54,6 +54,8 @@ public class SimpleDescriptorHandler {
 
     private static final SimpleDescriptorMessages log = MessagesFactory.get(SimpleDescriptorMessages.class);
 
+    private static Map<String, ServiceDiscovery> discoveryInstances = new HashMap<>();
+
     public static Map<String, File> handle(File desc) throws IOException {
         return handle(desc, NO_GATEWAY_SERVICES);
     }
@@ -89,7 +91,12 @@ public class SimpleDescriptorHandler {
             discoveryType = "AMBARI";
         }
 
-        ServiceDiscovery sd = ServiceDiscoveryFactory.get(discoveryType, gatewayServices);
+        // Use the cached discovery object for the required type, if it has already been loaded
+        ServiceDiscovery sd = discoveryInstances.get(discoveryType);
+        if (sd == null) {
+            sd = ServiceDiscoveryFactory.get(discoveryType, gatewayServices);
+            discoveryInstances.put(discoveryType, sd);
+        }
         ServiceDiscovery.Cluster cluster = sd.discover(sdc, desc.getClusterName());
 
         List<String> validServiceNames = new ArrayList<>();

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
index 5cfaf36..e45fd11 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
@@ -313,7 +313,23 @@ public interface GatewayConfig {
    * @return
    */
   boolean isGatewayServerHeaderEnabled();
-
+  
+  /**
+   *
+   * @param type The type of cluster configuration monitor for which the interval should be returned.
+   *
+   * @return The polling interval configuration value, or -1 if it has not been configured.
+   */
+  int getClusterMonitorPollingInterval(String type);
+  
+  /**
+   *
+   * @param type The type of cluster configuration monitor for which the interval should be returned.
+   *
+   * @return The enabled status of the specified type of cluster configuration monitor.
+   */
+  boolean isClusterMonitorEnabled(String type);
+  
   /**
    * @return The list of the names of any remote registry configurations defined herein.
    */

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/GatewayServices.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/GatewayServices.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/GatewayServices.java
index 2894bbc..222b1f0 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/GatewayServices.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/GatewayServices.java
@@ -41,6 +41,8 @@ public interface GatewayServices extends Service, ProviderDeploymentContributor
 
   String REMOTE_REGISTRY_CLIENT_SERVICE = "RemoteConfigRegistryClientService";
 
+  String CLUSTER_CONFIGURATION_MONITOR_SERVICE = "ClusterConfigurationMonitorService";
+
   public abstract Collection<String> getServiceNames();
 
   public abstract <T> T getService( String serviceName );

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/ClusterConfigurationMonitorService.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/ClusterConfigurationMonitorService.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/ClusterConfigurationMonitorService.java
new file mode 100644
index 0000000..961f2e5
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/ClusterConfigurationMonitorService.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology;
+
+import org.apache.hadoop.gateway.services.Service;
+import org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitor;
+
+/**
+ * Gateway service for managing cluster configuration monitors.
+ */
+public interface ClusterConfigurationMonitorService extends Service {
+
+    /**
+     *
+     * @param type The type of monitor (e.g., Ambari)
+     *
+     * @return The monitor associated with the specified type, or null if there is no such monitor.
+     */
+    ClusterConfigurationMonitor getMonitor(String type);
+
+
+    /**
+     * Register for configuration change notifications from <em>any</em> of the monitors managed by this service.
+     *
+     * @param listener The listener to register.
+     */
+    void addListener(ClusterConfigurationMonitor.ConfigurationChangeListener listener);
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ClusterConfigurationMonitor.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ClusterConfigurationMonitor.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ClusterConfigurationMonitor.java
new file mode 100644
index 0000000..fc3614d
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ClusterConfigurationMonitor.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+public interface ClusterConfigurationMonitor {
+
+    /**
+     * Start the monitor.
+     */
+    void start();
+
+    /**
+     * Stop the monitor.
+     */
+    void stop();
+
+    /**
+     *
+     * @param interval The polling interval, in seconds
+     */
+    void setPollingInterval(int interval);
+
+    /**
+     * Register for notifications from the monitor.
+     */
+    void addListener(ConfigurationChangeListener listener);
+
+    /**
+     * Monitor listener interface for receiving notifications that a configuration has changed.
+     */
+    interface ConfigurationChangeListener {
+        void onConfigurationChange(String source, String clusterName);
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ClusterConfigurationMonitorProvider.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ClusterConfigurationMonitorProvider.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ClusterConfigurationMonitorProvider.java
new file mode 100644
index 0000000..a8d5f30
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ClusterConfigurationMonitorProvider.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.services.security.AliasService;
+
+public interface ClusterConfigurationMonitorProvider {
+
+    String getType();
+
+    ClusterConfigurationMonitor newInstance(GatewayConfig config, AliasService aliasService);
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
----------------------------------------------------------------------
diff --git a/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java b/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
index f7ea633..e04c581 100644
--- a/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
+++ b/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
@@ -640,4 +640,14 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
   public String getRemoteConfigurationMonitorClientName() {
     return null;
   }
+
+  @Override
+  public int getClusterMonitorPollingInterval(String type) {
+    return 600;
+  }
+
+  @Override
+  public boolean isClusterMonitorEnabled(String type) {
+    return false;
+  }
 }


[24/49] knox git commit: KNOX-1129 - Remote Configuration Monitor Should Define The Entries It Monitors If They're Not Yet Defined (Phil Zampino via lmccay)

Posted by mo...@apache.org.
KNOX-1129 - Remote Configuration Monitor Should Define The Entries It Monitors If They're Not Yet Defined (Phil Zampino via lmccay)

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/e482e2e9
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/e482e2e9
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/e482e2e9

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: e482e2e93687d8601f94f8134544ebe1e0a04108
Parents: 828ea38
Author: Larry McCay <lm...@hortonworks.com>
Authored: Mon Dec 4 17:15:57 2017 -0500
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Mon Dec 4 17:16:44 2017 -0500

----------------------------------------------------------------------
 .../apache/hadoop/gateway/GatewayMessages.java  |   8 +
 .../DefaultRemoteConfigurationMonitor.java      |  63 +++++
 ...emoteConfigurationRegistryClientService.java |  20 ++
 .../remote/RemoteConfigurationMessages.java     |   3 +
 .../config/remote/zk/CuratorClientService.java  |  48 +++-
 .../RemoteConfigurationRegistryClient.java      |   6 +
 .../monitor/RemoteConfigurationMonitorTest.java | 260 +++++++++++++++++--
 7 files changed, 377 insertions(+), 31 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/e482e2e9/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
index d78ef71..ab0ab39 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
@@ -588,4 +588,12 @@ public interface GatewayMessages {
   @Message( level = MessageLevel.DEBUG, text = "Removed descriptor {0} reference to provider configuration {1}." )
   void removedProviderConfigurationReference(String descriptorName, String providerConfigurationName);
 
+  @Message( level = MessageLevel.WARN,
+            text = "The permissions for the remote configuration registry entry \"{0}\" are such that its content may not be trustworthy." )
+  void suspectWritableRemoteConfigurationEntry(String entryPath);
+
+  @Message( level = MessageLevel.WARN,
+            text = "Correcting the suspect permissions for the remote configuration registry entry \"{0}\"." )
+  void correctingSuspectWritableRemoteConfigurationEntry(String entryPath);
+
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/e482e2e9/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java
index 03bbf16..af60058 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java
@@ -24,9 +24,12 @@ import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegis
 import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient.EntryListener;
 import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient;
 import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+import org.apache.zookeeper.ZooDefs;
 
 import java.io.File;
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
 import java.util.List;
 
 
@@ -39,6 +42,32 @@ class DefaultRemoteConfigurationMonitor implements RemoteConfigurationMonitor {
 
     private static GatewayMessages log = MessagesFactory.get(GatewayMessages.class);
 
+    // N.B. This is ZooKeeper-specific, and should be abstracted when another registry is supported
+    private static final RemoteConfigurationRegistryClient.EntryACL AUTHENTICATED_USERS_ALL;
+    static {
+        AUTHENTICATED_USERS_ALL = new RemoteConfigurationRegistryClient.EntryACL() {
+            public String getId() {
+                return "";
+            }
+
+            public String getType() {
+                return "auth";
+            }
+
+            public Object getPermissions() {
+                return ZooDefs.Perms.ALL;
+            }
+
+            public boolean canRead() {
+                return true;
+            }
+
+            public boolean canWrite() {
+                return true;
+            }
+        };
+    }
+
     private RemoteConfigurationRegistryClient client = null;
 
     private File providersDir;
@@ -75,6 +104,9 @@ class DefaultRemoteConfigurationMonitor implements RemoteConfigurationMonitor {
         final String monitorSource = client.getAddress();
         log.startingRemoteConfigurationMonitor(monitorSource);
 
+        // Ensure the existence of the expected entries and their associated ACLs
+        ensureEntries();
+
         // Confirm access to the remote provider configs directory znode
         List<String> providerConfigs = client.listChildEntries(NODE_KNOX_PROVIDERS);
         if (providerConfigs == null) {
@@ -105,6 +137,37 @@ class DefaultRemoteConfigurationMonitor implements RemoteConfigurationMonitor {
         client.removeEntryListener(NODE_KNOX_DESCRIPTORS);
     }
 
+    private void ensureEntries() {
+        ensureEntry(NODE_KNOX);
+        ensureEntry(NODE_KNOX_CONFIG);
+        ensureEntry(NODE_KNOX_PROVIDERS);
+        ensureEntry(NODE_KNOX_DESCRIPTORS);
+    }
+
+    private void ensureEntry(String name) {
+        if (!client.entryExists(name)) {
+            client.createEntry(name);
+        } else {
+            // Validate the ACL
+            List<RemoteConfigurationRegistryClient.EntryACL> entryACLs = client.getACL(name);
+            for (RemoteConfigurationRegistryClient.EntryACL entryACL : entryACLs) {
+                // N.B. This is ZooKeeper-specific, and should be abstracted when another registry is supported
+                // For now, check for ZooKeeper world:anyone with ANY permissions (even read-only)
+                if (entryACL.getType().equals("world") && entryACL.getId().equals("anyone")) {
+                    log.suspectWritableRemoteConfigurationEntry(name);
+
+                    // If the client is authenticated, but "anyone" can write the content, then the content may not
+                    // be trustworthy.
+                    if (client.isAuthenticationConfigured()) {
+                        log.correctingSuspectWritableRemoteConfigurationEntry(name);
+
+                        // Replace the existing ACL with one that permits only authenticated users
+                        client.setACL(name, Collections.singletonList(AUTHENTICATED_USERS_ALL));
+                  }
+                }
+            }
+        }
+    }
 
     private static class ConfigDirChildEntryListener implements ChildEntryListener {
         File localDir;

http://git-wip-us.apache.org/repos/asf/knox/blob/e482e2e9/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientService.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientService.java
index 161c201..0bfc39a 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientService.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientService.java
@@ -129,6 +129,16 @@ public class LocalFileSystemRemoteConfigurationRegistryClientService implements
                             public Object getPermissions() {
                                 return collected.get(id).toString();
                             }
+
+                            @Override
+                            public boolean canRead() {
+                                return true;
+                            }
+
+                            @Override
+                            public boolean canWrite() {
+                                return true;
+                            }
                         };
                         result.add(acl);
                     }
@@ -216,6 +226,16 @@ public class LocalFileSystemRemoteConfigurationRegistryClientService implements
             }
 
             @Override
+            public boolean isAuthenticationConfigured() {
+                return false;
+            }
+
+            @Override
+            public void setACL(String path, List<EntryACL> acls) {
+                //
+            }
+
+            @Override
             public void deleteEntry(String path) {
                 File entry = new File(root, path);
                 if (entry.exists()) {

http://git-wip-us.apache.org/repos/asf/knox/blob/e482e2e9/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationMessages.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationMessages.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationMessages.java
index 22e622d..7cd1324 100644
--- a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationMessages.java
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationMessages.java
@@ -42,5 +42,8 @@ public interface RemoteConfigurationMessages {
     void errorHandlingRemoteConfigACL(final String path,
                                       @StackTrace(level = MessageLevel.DEBUG) Exception e);
 
+    @Message(level = MessageLevel.ERROR, text = "An error occurred setting the ACL for remote configuration {0} : {1}")
+    void errorSettingEntryACL(final String path,
+                              @StackTrace(level = MessageLevel.DEBUG) Exception e);
 
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/e482e2e9/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/CuratorClientService.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/CuratorClientService.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/CuratorClientService.java
index 0908252..0000f48 100644
--- a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/CuratorClientService.java
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/CuratorClientService.java
@@ -45,6 +45,7 @@ import org.apache.zookeeper.data.Stat;
 
 import java.nio.charset.Charset;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
@@ -115,7 +116,7 @@ class CuratorClientService implements ZooKeeperClientService {
         ACLProvider aclProvider;
         if (config.isSecureRegistry()) {
             configureSasl(config);
-            aclProvider = new SASLOwnerACLProvider(config.getPrincipal());
+            aclProvider = new SASLOwnerACLProvider();
         } else {
             // Clear SASL system property
             System.clearProperty(LOGIN_CONTEXT_NAME_PROPERTY);
@@ -163,6 +164,11 @@ class CuratorClientService implements ZooKeeperClientService {
         }
 
         @Override
+        public boolean isAuthenticationConfigured() {
+            return config.isSecureRegistry();
+        }
+
+        @Override
         public boolean entryExists(String path) {
             Stat s = null;
             try {
@@ -191,6 +197,30 @@ class CuratorClientService implements ZooKeeperClientService {
         }
 
         @Override
+        public void setACL(String path, List<EntryACL> entryACLs) {
+            // Translate the abstract ACLs into ZooKeeper ACLs
+            List<ACL> delegateACLs = new ArrayList<>();
+            for (EntryACL entryACL : entryACLs) {
+                String scheme = entryACL.getType();
+                String id = entryACL.getId();
+                int permissions = 0;
+                if (entryACL.canWrite()) {
+                    permissions = ZooDefs.Perms.ALL;
+                } else if (entryACL.canRead()){
+                    permissions = ZooDefs.Perms.READ;
+                }
+                delegateACLs.add(new ACL(permissions, new Id(scheme, id)));
+            }
+
+            try {
+                // Set the ACLs for the path
+                delegate.setACL().withACL(delegateACLs).forPath(path);
+            } catch (Exception e) {
+                log.errorSettingEntryACL(path, e);
+            }
+        }
+
+        @Override
         public List<String> listChildEntries(String path) {
             List<String> result = null;
             try {
@@ -305,8 +335,8 @@ class CuratorClientService implements ZooKeeperClientService {
 
         private final List<ACL> saslACL;
 
-        private SASLOwnerACLProvider(String principal) {
-            this.saslACL = Collections.singletonList(new ACL(ZooDefs.Perms.ALL, new Id("sasl", principal)));
+        private SASLOwnerACLProvider() {
+            this.saslACL = ZooDefs.Ids.CREATOR_ALL_ACL; // All permissions for any authenticated user
         }
 
         @Override
@@ -396,7 +426,7 @@ class CuratorClientService implements ZooKeeperClientService {
     private static final class ZooKeeperACLAdapter implements RemoteConfigurationRegistryClient.EntryACL {
         private String type;
         private String id;
-        private Object permissions;
+        private int permissions;
 
         ZooKeeperACLAdapter(ACL acl) {
             this.permissions = acl.getPerms();
@@ -418,6 +448,16 @@ class CuratorClientService implements ZooKeeperClientService {
         public Object getPermissions() {
             return permissions;
         }
+
+        @Override
+        public boolean canRead() {
+            return (permissions >= ZooDefs.Perms.READ);
+        }
+
+        @Override
+        public boolean canWrite() {
+            return (permissions >= ZooDefs.Perms.WRITE);
+        }
     }
 
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/e482e2e9/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClient.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClient.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClient.java
index 6fbf410..bfb4518 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClient.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClient.java
@@ -22,10 +22,14 @@ public interface RemoteConfigurationRegistryClient {
 
     String getAddress();
 
+    boolean isAuthenticationConfigured();
+
     boolean entryExists(String path);
 
     List<EntryACL> getACL(String path);
 
+    void setACL(String path, List<EntryACL> acls);
+
     List<String> listChildEntries(String path);
 
     String getEntryData(String path);
@@ -69,6 +73,8 @@ public interface RemoteConfigurationRegistryClient {
         String getId();
         String getType();
         Object getPermissions();
+        boolean canRead();
+        boolean canWrite();
     }
 
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/e482e2e9/gateway-test/src/test/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorTest.java
index 14d98a9..dd75028 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorTest.java
@@ -33,13 +33,16 @@ import org.apache.zookeeper.ZooDefs;
 import org.apache.zookeeper.data.ACL;
 import org.apache.zookeeper.data.Id;
 import org.easymock.EasyMock;
+import org.junit.After;
 import org.junit.AfterClass;
+import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
 import java.io.File;
 import java.io.FileWriter;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
@@ -48,6 +51,7 @@ import java.util.Map;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
@@ -66,9 +70,16 @@ public class RemoteConfigurationMonitorTest {
     private static final String PATH_KNOX_PROVIDERS = PATH_KNOX_CONFIG + "/shared-providers";
     private static final String PATH_KNOX_DESCRIPTORS = PATH_KNOX_CONFIG + "/descriptors";
 
+    private static final String PATH_AUTH_TEST = "/auth_test/child_node";
+
+
+    private static final String ALT_USERNAME = "notyou";
     private static final String ZK_USERNAME = "testsasluser";
     private static final String ZK_PASSWORD = "testsaslpwd";
 
+    private static final ACL ANY_AUTHENTICATED_USER_ALL = new ACL(ZooDefs.Perms.ALL, new Id("auth", ""));
+    private static final ACL SASL_TESTUSER_ALL = new ACL(ZooDefs.Perms.ALL, new Id("sasl", ZK_USERNAME));
+
     private static File testTmp;
     private static File providersDir;
     private static File descriptorsDir;
@@ -80,13 +91,36 @@ public class RemoteConfigurationMonitorTest {
     @BeforeClass
     public static void setupSuite() throws Exception {
         testTmp = TestUtils.createTempDir(RemoteConfigurationMonitorTest.class.getName());
-        File confDir   = TestUtils.createTempDir(testTmp + "/conf");
-        providersDir   = TestUtils.createTempDir(confDir + "/shared-providers");
+        File confDir = TestUtils.createTempDir(testTmp + "/conf");
+        providersDir = TestUtils.createTempDir(confDir + "/shared-providers");
         descriptorsDir = TestUtils.createTempDir(confDir + "/descriptors");
+    }
 
+    @AfterClass
+    public static void tearDownSuite() throws Exception {
+        // Delete the working dir
+        testTmp.delete();
+    }
+
+    @Before
+    public void setupTest() throws Exception {
         configureAndStartZKCluster();
     }
 
+    @After
+    public void tearDownTest() throws Exception {
+        // Clean up the ZK nodes, and close the client
+        if (client != null) {
+            if (client.checkExists().forPath(PATH_KNOX) != null) {
+                client.delete().deletingChildrenIfNeeded().forPath(PATH_KNOX);
+            }
+            client.close();
+        }
+
+        // Shutdown the ZK cluster
+        zkCluster.close();
+    }
+
     /**
      * Create and persist a JAAS configuration file, defining the SASL config for both the ZooKeeper cluster instances
      * and ZooKeeper clients.
@@ -102,7 +136,7 @@ public class RemoteConfigurationMonitorTest {
         fw.write("Server {\n" +
                 "    org.apache.zookeeper.server.auth.DigestLoginModule required\n" +
                 "    user_" + username + " =\"" + password + "\";\n" +
-                "};\n"+
+                "};\n" +
                 "Client {\n" +
                 "    org.apache.zookeeper.server.auth.DigestLoginModule required\n" +
                 "    username=\"" + username + "\"\n" +
@@ -141,42 +175,187 @@ public class RemoteConfigurationMonitorTest {
 
         // Create the client for the test cluster
         client = CuratorFrameworkFactory.builder()
-                .connectString(zkCluster.getConnectString())
-                .retryPolicy(new ExponentialBackoffRetry(100, 3))
-                .build();
+                                        .connectString(zkCluster.getConnectString())
+                                        .retryPolicy(new ExponentialBackoffRetry(100, 3))
+                                        .build();
         assertNotNull(client);
         client.start();
 
-        // Create the knox config paths with an ACL for the sasl user configured for the client
-        List<ACL> acls = new ArrayList<>();
-        acls.add(new ACL(ZooDefs.Perms.ALL, new Id("sasl", ZK_USERNAME)));
+        // Create test config nodes with an ACL for a sasl user that is NOT configured for the test client
+        List<ACL> acls = Arrays.asList(new ACL(ZooDefs.Perms.ALL, new Id("sasl", ALT_USERNAME)),
+                                       new ACL(ZooDefs.Perms.READ, ZooDefs.Ids.ANYONE_ID_UNSAFE));
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_AUTH_TEST);
+        assertNotNull("Failed to create node:" + PATH_AUTH_TEST,
+                      client.checkExists().forPath(PATH_AUTH_TEST));
+    }
 
-        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_DESCRIPTORS);
-        assertNotNull("Failed to create node:" + PATH_KNOX_DESCRIPTORS,
-                client.checkExists().forPath(PATH_KNOX_DESCRIPTORS));
-        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_PROVIDERS);
-        assertNotNull("Failed to create node:" + PATH_KNOX_PROVIDERS,
-                client.checkExists().forPath(PATH_KNOX_PROVIDERS));
+
+    private static void validateKnoxConfigNodeACLs(List<ACL> expectedACLS, List<ACL> actualACLs) throws Exception {
+        assertEquals(expectedACLS.size(), actualACLs.size());
+        int matchedCount = 0;
+        for (ACL expected : expectedACLS) {
+            for (ACL actual : actualACLs) {
+                Id expectedId = expected.getId();
+                Id actualId = actual.getId();
+                if (actualId.getScheme().equals(expectedId.getScheme()) && actualId.getId().equals(expectedId.getId())) {
+                    matchedCount++;
+                    assertEquals(expected.getPerms(), actual.getPerms());
+                    break;
+                }
+            }
+        }
+        assertEquals("ACL mismatch despite being same quantity.", expectedACLS.size(), matchedCount);
     }
 
-    @AfterClass
-    public static void tearDownSuite() throws Exception {
-        // Clean up the ZK nodes, and close the client
-        if (client != null) {
-            client.delete().deletingChildrenIfNeeded().forPath(PATH_KNOX);
-            client.close();
+
+    @Test
+    public void testZooKeeperConfigMonitorSASLNodesExistWithUnacceptableACL() throws Exception {
+        final String configMonitorName = "zkConfigClient";
+        final String alias = "zkPass";
+
+        // Setup the base GatewayConfig mock
+        GatewayConfig gc = EasyMock.createNiceMock(GatewayConfig.class);
+        EasyMock.expect(gc.getGatewayProvidersConfigDir()).andReturn(providersDir.getAbsolutePath()).anyTimes();
+        EasyMock.expect(gc.getGatewayDescriptorsDir()).andReturn(descriptorsDir.getAbsolutePath()).anyTimes();
+        EasyMock.expect(gc.getRemoteRegistryConfigurationNames())
+                .andReturn(Collections.singletonList(configMonitorName))
+                .anyTimes();
+        final String registryConfig =
+                GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
+                        GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString() + ";" +
+                        GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL + "=" + ZK_USERNAME + ";" +
+                        GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE + "=Digest;" +
+                        GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS + "=" + alias;
+        EasyMock.expect(gc.getRemoteRegistryConfiguration(configMonitorName))
+                .andReturn(registryConfig).anyTimes();
+        EasyMock.expect(gc.getRemoteConfigurationMonitorClientName()).andReturn(configMonitorName).anyTimes();
+        EasyMock.replay(gc);
+
+        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
+        EasyMock.expect(aliasService.getPasswordFromAliasForGateway(alias))
+                .andReturn(ZK_PASSWORD.toCharArray())
+                .anyTimes();
+        EasyMock.replay(aliasService);
+
+        RemoteConfigurationRegistryClientService clientService = (new ZooKeeperClientServiceProvider()).newInstance();
+        clientService.setAliasService(aliasService);
+        clientService.init(gc, Collections.emptyMap());
+        clientService.start();
+
+        RemoteConfigurationMonitorFactory.setClientService(clientService);
+
+        RemoteConfigurationMonitor cm = RemoteConfigurationMonitorFactory.get(gc);
+        assertNotNull("Failed to load RemoteConfigurationMonitor", cm);
+
+        final ACL ANY_AUTHENTICATED_USER_ALL = new ACL(ZooDefs.Perms.ALL, new Id("auth", ""));
+        List<ACL> acls = Arrays.asList(ANY_AUTHENTICATED_USER_ALL, new ACL(ZooDefs.Perms.WRITE, ZooDefs.Ids.ANYONE_ID_UNSAFE));
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX);
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_CONFIG);
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_PROVIDERS);
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_DESCRIPTORS);
+
+        // Make sure both ACLs were applied
+        List<ACL> preACLs = client.getACL().forPath(PATH_KNOX);
+        assertEquals(2, preACLs.size());
+
+        // Check that the config nodes really do exist (the monitor will NOT create them if they're present)
+        assertNotNull(client.checkExists().forPath(PATH_KNOX));
+        assertNotNull(client.checkExists().forPath(PATH_KNOX_CONFIG));
+        assertNotNull(client.checkExists().forPath(PATH_KNOX_PROVIDERS));
+        assertNotNull(client.checkExists().forPath(PATH_KNOX_DESCRIPTORS));
+
+        try {
+            cm.start();
+        } catch (Exception e) {
+            fail("Failed to start monitor: " + e.getMessage());
         }
 
-        // Shutdown the ZK cluster
-        zkCluster.close();
+        // Validate the expected ACLs on the Knox config znodes (make sure the monitor removed the world:anyone ACL)
+        List<ACL> expectedACLs = Collections.singletonList(SASL_TESTUSER_ALL);
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX));
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_CONFIG));
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_PROVIDERS));
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_DESCRIPTORS));
+    }
 
-        // Delete the working dir
-        testTmp.delete();
+
+    @Test
+    public void testZooKeeperConfigMonitorSASLNodesExistWithAcceptableACL() throws Exception {
+        final String configMonitorName = "zkConfigClient";
+        final String alias = "zkPass";
+
+        // Setup the base GatewayConfig mock
+        GatewayConfig gc = EasyMock.createNiceMock(GatewayConfig.class);
+        EasyMock.expect(gc.getGatewayProvidersConfigDir()).andReturn(providersDir.getAbsolutePath()).anyTimes();
+        EasyMock.expect(gc.getGatewayDescriptorsDir()).andReturn(descriptorsDir.getAbsolutePath()).anyTimes();
+        EasyMock.expect(gc.getRemoteRegistryConfigurationNames())
+                .andReturn(Collections.singletonList(configMonitorName))
+                .anyTimes();
+        final String registryConfig =
+                GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
+                        GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString() + ";" +
+                        GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL + "=" + ZK_USERNAME + ";" +
+                        GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE + "=Digest;" +
+                        GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS + "=" + alias;
+        EasyMock.expect(gc.getRemoteRegistryConfiguration(configMonitorName))
+                .andReturn(registryConfig).anyTimes();
+        EasyMock.expect(gc.getRemoteConfigurationMonitorClientName()).andReturn(configMonitorName).anyTimes();
+        EasyMock.replay(gc);
+
+        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
+        EasyMock.expect(aliasService.getPasswordFromAliasForGateway(alias))
+                .andReturn(ZK_PASSWORD.toCharArray())
+                .anyTimes();
+        EasyMock.replay(aliasService);
+
+        RemoteConfigurationRegistryClientService clientService = (new ZooKeeperClientServiceProvider()).newInstance();
+        clientService.setAliasService(aliasService);
+        clientService.init(gc, Collections.emptyMap());
+        clientService.start();
+
+        RemoteConfigurationMonitorFactory.setClientService(clientService);
+
+        RemoteConfigurationMonitor cm = RemoteConfigurationMonitorFactory.get(gc);
+        assertNotNull("Failed to load RemoteConfigurationMonitor", cm);
+
+        List<ACL> acls = Arrays.asList(ANY_AUTHENTICATED_USER_ALL);
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX);
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_CONFIG);
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_PROVIDERS);
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_DESCRIPTORS);
+
+        // Check that the config nodes really do exist (the monitor will NOT create them if they're present)
+        assertNotNull(client.checkExists().forPath(PATH_KNOX));
+        assertNotNull(client.checkExists().forPath(PATH_KNOX_CONFIG));
+        assertNotNull(client.checkExists().forPath(PATH_KNOX_PROVIDERS));
+        assertNotNull(client.checkExists().forPath(PATH_KNOX_DESCRIPTORS));
+
+        try {
+            cm.start();
+        } catch (Exception e) {
+            fail("Failed to start monitor: " + e.getMessage());
+        }
+
+        // Test auth violation
+        clientService.get(configMonitorName).createEntry("/auth_test/child_node/test1");
+        assertNull("Creation should have been prevented since write access is not granted to the test client.",
+                client.checkExists().forPath("/auth_test/child_node/test1"));
+        assertTrue("Creation should have been prevented since write access is not granted to the test client.",
+                client.getChildren().forPath("/auth_test/child_node").isEmpty());
+
+        // Validate the expected ACLs on the Knox config znodes (make sure the monitor didn't change them)
+        List<ACL> expectedACLs = Collections.singletonList(SASL_TESTUSER_ALL);
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX));
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_CONFIG));
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_PROVIDERS));
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_DESCRIPTORS));
     }
 
+
     @Test
-    public void testZooKeeperConfigMonitorSASL() throws Exception {
+    public void testZooKeeperConfigMonitorSASLCreateNodes() throws Exception {
         final String configMonitorName = "zkConfigClient";
+        final String alias = "zkPass";
 
         // Setup the base GatewayConfig mock
         GatewayConfig gc = EasyMock.createNiceMock(GatewayConfig.class);
@@ -187,13 +366,19 @@ public class RemoteConfigurationMonitorTest {
                 .anyTimes();
         final String registryConfig =
                             GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
-                            GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString();
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString() + ";" +
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL + "=" + ZK_USERNAME + ";" +
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE + "=Digest;" +
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS + "=" + alias;
         EasyMock.expect(gc.getRemoteRegistryConfiguration(configMonitorName))
                 .andReturn(registryConfig).anyTimes();
         EasyMock.expect(gc.getRemoteConfigurationMonitorClientName()).andReturn(configMonitorName).anyTimes();
         EasyMock.replay(gc);
 
         AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
+        EasyMock.expect(aliasService.getPasswordFromAliasForGateway(alias))
+                .andReturn(ZK_PASSWORD.toCharArray())
+                .anyTimes();
         EasyMock.replay(aliasService);
 
         RemoteConfigurationRegistryClientService clientService = (new ZooKeeperClientServiceProvider()).newInstance();
@@ -206,12 +391,33 @@ public class RemoteConfigurationMonitorTest {
         RemoteConfigurationMonitor cm = RemoteConfigurationMonitorFactory.get(gc);
         assertNotNull("Failed to load RemoteConfigurationMonitor", cm);
 
+        // Check that the config nodes really don't yet exist (the monitor will create them if they're not present)
+        assertNull(client.checkExists().forPath(PATH_KNOX));
+        assertNull(client.checkExists().forPath(PATH_KNOX_CONFIG));
+        assertNull(client.checkExists().forPath(PATH_KNOX_PROVIDERS));
+        assertNull(client.checkExists().forPath(PATH_KNOX_DESCRIPTORS));
+
         try {
             cm.start();
         } catch (Exception e) {
             fail("Failed to start monitor: " + e.getMessage());
         }
 
+        // Test auth violation
+        clientService.get(configMonitorName).createEntry("/auth_test/child_node/test1");
+        assertNull("Creation should have been prevented since write access is not granted to the test client.",
+                   client.checkExists().forPath("/auth_test/child_node/test1"));
+        assertTrue("Creation should have been prevented since write access is not granted to the test client.",
+                   client.getChildren().forPath("/auth_test/child_node").isEmpty());
+
+        // Validate the expected ACLs on the Knox config znodes (make sure the monitor created them correctly)
+        List<ACL> expectedACLs = Collections.singletonList(SASL_TESTUSER_ALL);
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX));
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_CONFIG));
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_PROVIDERS));
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_DESCRIPTORS));
+
+        // Test the Knox config nodes, for which authentication should be sufficient for access
         try {
             final String pc_one_znode = getProviderPath("providers-config1.xml");
             final File pc_one         = new File(providersDir, "providers-config1.xml");


[19/49] knox git commit: Update CHANGES for v0.14.0

Posted by mo...@apache.org.
Update CHANGES for v0.14.0

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/844506f2
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/844506f2
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/844506f2

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 844506f28204ef382b492c72645cf053190a115a
Parents: 55be159
Author: Larry McCay <lm...@hortonworks.com>
Authored: Fri Dec 1 08:37:16 2017 -0500
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Fri Dec 1 08:37:48 2017 -0500

----------------------------------------------------------------------
 CHANGES | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/844506f2/CHANGES
----------------------------------------------------------------------
diff --git a/CHANGES b/CHANGES
index 8815d3e..3fe8edd 100644
--- a/CHANGES
+++ b/CHANGES
@@ -21,7 +21,7 @@ Release Notes - Apache Knox - Version 0.14.0
    * [KNOX-1046] - Add Client Cert Wanted Capability with Configurable Validation that Checks for It
    * [KNOX-1072] - Add Client Cert Required Capability to KnoxToken
    * [KNOX-1107] - Remote Configuration Registry Client Service (Phil Zampino via lmccay)
-
+   * [KNOX-1128] - Readonly protection for generated topologies in Knox Admin UI
 ** Improvement
    * [KNOX-921] - Httpclient max connections are always set to default values
    * [KNOX-1106] - Tighten the rewrite rule on oozieui to reduce false positives (Wei Han via Sandeep More)
@@ -76,6 +76,8 @@ Release Notes - Apache Knox - Version 0.14.0
    * [KNOX-993] - The samples README for knoxshell references the 0.5.0 userguide
    * [KNOX-992] - Add README for the knoxshell distribution (Colm O hEigeartaigh and lmccay via lmccay)
    * [KNOX-1119] - Pac4J OAuth/OpenID Principal Needs to be Configurable (Andreas Hildebrandt via lmccay)
+   * [KNOX-1120] - Pac4J Stop Using ConfigSingleton
+   * [KNOX-1128] - Readonly protection for generated topologies in Knox Admin UI
 ** Bug
    * [KNOX-1003] - Fix the rewrite rules for Zeppelin 0.7.2 UI
    * [KNOX-1004] - Failing (flaky) Knox unit tests
@@ -85,6 +87,7 @@ Release Notes - Apache Knox - Version 0.14.0
    * [KNOX-1079] - Regression: proxy for Atlas fails with j_spring_security_check during login (Madhan Neethiraj via lmccay)
    * [KNOX-1022] - Configuring knox token ttl to higher value generates an access token which is not valid
    * [KNOX-1118] - Remove POC Service Definition for AmbariUI
+   * [KNOX-1134] - Regression due to KNOX-1119
 
 ------------------------------------------------------------------------------
 Release Notes - Apache Knox - Version 0.13.0


[04/49] knox git commit: KNOX-1107 - Remote Configuration Registry Client Service (Phil Zampino via lmccay)

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistriesAccessor.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistriesAccessor.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistriesAccessor.java
new file mode 100644
index 0000000..9fed589
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistriesAccessor.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.config.remote.config;
+
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+public class RemoteConfigurationRegistriesAccessor {
+
+    // System property for specifying a reference to an XML configuration external to the gateway config
+    private static final String XML_CONFIG_REFERENCE_SYSTEM_PROPERTY_NAME =
+                                                                "org.apache.knox.gateway.remote.registry.config.file";
+
+
+    public static List<RemoteConfigurationRegistryConfig> getRemoteRegistryConfigurations(GatewayConfig gatewayConfig) {
+        List<RemoteConfigurationRegistryConfig> result = new ArrayList<>();
+
+        boolean useReferencedFile = false;
+
+        // First check for the system property pointing to a valid XML config for the remote registries
+        String remoteConfigRegistryConfigFilename = System.getProperty(XML_CONFIG_REFERENCE_SYSTEM_PROPERTY_NAME);
+        if (remoteConfigRegistryConfigFilename != null) {
+            File remoteConfigRegistryConfigFile = new File(remoteConfigRegistryConfigFilename);
+            if (remoteConfigRegistryConfigFile.exists()) {
+                useReferencedFile = true;
+                // Parse the file, and build the registry config set
+                result.addAll(RemoteConfigurationRegistriesParser.getConfig(remoteConfigRegistryConfigFilename));
+            }
+        }
+
+        // If the system property was not set to a valid reference to another config file, then try to derive the
+        // registry configurations from the gateway config.
+        if (!useReferencedFile) {
+            RemoteConfigurationRegistries remoteConfigRegistries =
+                                                            new DefaultRemoteConfigurationRegistries(gatewayConfig);
+            result.addAll(remoteConfigRegistries.getRegistryConfigurations());
+        }
+
+        return result;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistriesParser.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistriesParser.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistriesParser.java
new file mode 100644
index 0000000..3ea71ef
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistriesParser.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.config.remote.config;
+
+import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
+
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+import javax.xml.bind.Unmarshaller;
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+class RemoteConfigurationRegistriesParser {
+
+    static List<RemoteConfigurationRegistryConfig> getConfig(String configFilename) {
+        List<RemoteConfigurationRegistryConfig> result = new ArrayList<>();
+
+        File file = new File(configFilename);
+
+        try {
+            JAXBContext jaxbContext = JAXBContext.newInstance(RemoteConfigurationRegistries.class);
+            Unmarshaller jaxbUnmarshaller = jaxbContext.createUnmarshaller();
+            RemoteConfigurationRegistries parsedContent = (RemoteConfigurationRegistries) jaxbUnmarshaller.unmarshal(file);
+            if (parsedContent != null) {
+                result.addAll(parsedContent.getRegistryConfigurations());
+            }
+        } catch (JAXBException e) {
+            e.printStackTrace();
+        }
+
+        return result;
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistry.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistry.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistry.java
new file mode 100644
index 0000000..f3e7dbd
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistry.java
@@ -0,0 +1,139 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.config.remote.config;
+
+import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
+
+import javax.xml.bind.annotation.XmlElement;
+
+class RemoteConfigurationRegistry implements RemoteConfigurationRegistryConfig {
+
+    private String name;
+    private String type;
+    private String connectionString;
+    private String namespace;
+    private String authType;
+    private String principal;
+    private String credentialAlias;
+    private String keyTab;
+    private boolean useKeyTab;
+    private boolean useTicketCache;
+
+    RemoteConfigurationRegistry() {
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    public void setRegistryType(String type) {
+        this.type = type;
+    }
+
+    public void setConnectionString(String connectionString) {
+        this.connectionString = connectionString;
+    }
+
+    public void setNamespace(String namespace) {
+        this.namespace = namespace;
+    }
+
+    public void setAuthType(String authType) {
+        this.authType = authType;
+    }
+
+    public void setPrincipal(String principal) {
+        this.principal = principal;
+    }
+
+    public void setCredentialAlias(String alias) {
+        this.credentialAlias = alias;
+    }
+
+    public void setUseTicketCache(boolean useTicketCache) {
+        this.useTicketCache = useTicketCache;
+    }
+
+    public void setUseKeytab(boolean useKeytab) {
+        this.useKeyTab = useKeytab;
+    }
+
+    public void setKeytab(String keytab) {
+        this.keyTab = keytab;
+    }
+
+    @XmlElement(name="name")
+    public String getName() {
+        return name;
+    }
+
+    @XmlElement(name="type")
+    public String getRegistryType() {
+        return type;
+    }
+
+    @XmlElement(name="auth-type")
+    public String getAuthType() {
+        return authType;
+    }
+
+    @XmlElement(name="principal")
+    public String getPrincipal() {
+        return principal;
+    }
+
+    @XmlElement(name="credential-alias")
+    public String getCredentialAlias() {
+        return credentialAlias;
+    }
+
+    @Override
+    @XmlElement(name="address")
+    public String getConnectionString() {
+        return connectionString;
+    }
+
+    @Override
+    @XmlElement(name="namespace")
+    public String getNamespace() {
+        return namespace;
+    }
+
+    @Override
+    @XmlElement(name="use-ticket-cache")
+    public boolean isUseTicketCache() {
+        return useTicketCache;
+    }
+
+    @Override
+    @XmlElement(name="use-key-tab")
+    public boolean isUseKeyTab() {
+        return useKeyTab;
+    }
+
+    @Override
+    @XmlElement(name="keytab")
+    public String getKeytab() {
+        return keyTab;
+    }
+
+    @Override
+    public boolean isSecureRegistry() {
+        return (getAuthType() != null);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/CuratorClientService.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/CuratorClientService.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/CuratorClientService.java
new file mode 100644
index 0000000..0908252
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/CuratorClientService.java
@@ -0,0 +1,423 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.config.remote.zk;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.framework.api.ACLProvider;
+import org.apache.curator.framework.imps.DefaultACLProvider;
+import org.apache.curator.framework.recipes.cache.ChildData;
+import org.apache.curator.framework.recipes.cache.NodeCache;
+import org.apache.curator.framework.recipes.cache.NodeCacheListener;
+import org.apache.curator.framework.recipes.cache.PathChildrenCache;
+import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent;
+import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationMessages;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient.ChildEntryListener;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient.EntryListener;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient;
+import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
+import org.apache.hadoop.gateway.service.config.remote.config.RemoteConfigurationRegistriesAccessor;
+import org.apache.hadoop.gateway.services.ServiceLifecycleException;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.zookeeper.ZooDefs;
+import org.apache.zookeeper.client.ZooKeeperSaslClient;
+import org.apache.zookeeper.data.ACL;
+import org.apache.zookeeper.data.Id;
+import org.apache.zookeeper.data.Stat;
+
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * RemoteConfigurationRegistryClientService implementation that employs the Curator ZooKeeper client framework.
+ */
+class CuratorClientService implements ZooKeeperClientService {
+
+    private static final String LOGIN_CONTEXT_NAME_PROPERTY = ZooKeeperSaslClient.LOGIN_CONTEXT_NAME_KEY;
+
+    private static final String DEFAULT_LOGIN_CONTEXT_NAME = "Client";
+
+    private static final RemoteConfigurationMessages log =
+                                                        MessagesFactory.get(RemoteConfigurationMessages.class);
+
+    private Map<String, RemoteConfigurationRegistryClient> clients = new HashMap<>();
+
+    private AliasService aliasService = null;
+
+
+    @Override
+    public void init(GatewayConfig config, Map<String, String> options) throws ServiceLifecycleException {
+
+        List<RemoteConfigurationRegistryConfig> registryConfigs = new ArrayList<>();
+
+        // Load the remote registry configurations
+        registryConfigs.addAll(RemoteConfigurationRegistriesAccessor.getRemoteRegistryConfigurations(config));
+
+        // Configure registry authentication
+        RemoteConfigurationRegistryJAASConfig.configure(registryConfigs, aliasService);
+
+        if (registryConfigs.size() > 1) {
+            // Warn about current limit on number of supported client configurations
+            log.multipleRemoteRegistryConfigurations();
+        }
+
+        // Create the clients
+        for (RemoteConfigurationRegistryConfig registryConfig : registryConfigs) {
+            if (TYPE.equalsIgnoreCase(registryConfig.getRegistryType())) {
+                RemoteConfigurationRegistryClient registryClient = createClient(registryConfig);
+                clients.put(registryConfig.getName(), registryClient);
+            }
+        }
+    }
+
+    @Override
+    public void setAliasService(AliasService aliasService) {
+        this.aliasService = aliasService;
+    }
+
+    @Override
+    public void start() throws ServiceLifecycleException {
+    }
+
+    @Override
+    public void stop() throws ServiceLifecycleException {
+    }
+
+    @Override
+    public RemoteConfigurationRegistryClient get(String name) {
+        return clients.get(name);
+    }
+
+
+    private RemoteConfigurationRegistryClient createClient(RemoteConfigurationRegistryConfig config) {
+        ACLProvider aclProvider;
+        if (config.isSecureRegistry()) {
+            configureSasl(config);
+            aclProvider = new SASLOwnerACLProvider(config.getPrincipal());
+        } else {
+            // Clear SASL system property
+            System.clearProperty(LOGIN_CONTEXT_NAME_PROPERTY);
+            aclProvider = new DefaultACLProvider();
+        }
+
+        CuratorFramework client = CuratorFrameworkFactory.builder()
+                                                         .connectString(config.getConnectionString())
+                                                         .retryPolicy(new ExponentialBackoffRetry(1000, 3))
+                                                         .aclProvider(aclProvider)
+                                                         .build();
+        client.start();
+
+        return (new ClientAdapter(client, config));
+    }
+
+
+    private void configureSasl(RemoteConfigurationRegistryConfig config) {
+        String registryName = config.getName();
+        if (registryName == null) {
+            registryName = DEFAULT_LOGIN_CONTEXT_NAME;
+        }
+        System.setProperty(LOGIN_CONTEXT_NAME_PROPERTY, registryName);
+    }
+
+
+    private static final class ClientAdapter implements RemoteConfigurationRegistryClient {
+
+        private static final String DEFAULT_ENCODING = "UTF-8";
+
+        private CuratorFramework delegate;
+
+        private RemoteConfigurationRegistryConfig config;
+
+        private Map<String, NodeCache> entryNodeCaches = new HashMap<>();
+
+        ClientAdapter(CuratorFramework delegate, RemoteConfigurationRegistryConfig config) {
+            this.delegate = delegate;
+            this.config = config;
+        }
+
+        @Override
+        public String getAddress() {
+            return config.getConnectionString();
+        }
+
+        @Override
+        public boolean entryExists(String path) {
+            Stat s = null;
+            try {
+                s = delegate.checkExists().forPath(path);
+            } catch (Exception e) {
+                // Ignore
+            }
+            return (s != null);
+        }
+
+        @Override
+        public List<RemoteConfigurationRegistryClient.EntryACL> getACL(String path) {
+            List<RemoteConfigurationRegistryClient.EntryACL> acl = new ArrayList<>();
+            try {
+                List<ACL> zkACL = delegate.getACL().forPath(path);
+                if (zkACL != null) {
+                    for (ACL aclEntry : zkACL) {
+                        RemoteConfigurationRegistryClient.EntryACL entryACL = new ZooKeeperACLAdapter(aclEntry);
+                        acl.add(entryACL);
+                    }
+                }
+            } catch (Exception e) {
+                log.errorHandlingRemoteConfigACL(path, e);
+            }
+            return acl;
+        }
+
+        @Override
+        public List<String> listChildEntries(String path) {
+            List<String> result = null;
+            try {
+                result = delegate.getChildren().forPath(path);
+            } catch (Exception e) {
+                log.errorInteractingWithRemoteConfigRegistry(e);
+            }
+            return result;
+        }
+
+        @Override
+        public void addChildEntryListener(String path, ChildEntryListener listener) throws Exception {
+            PathChildrenCache childCache = new PathChildrenCache(delegate, path, false);
+            childCache.getListenable().addListener(new ChildEntryListenerAdapter(this, listener));
+            childCache.start();
+        }
+
+        @Override
+        public void addEntryListener(String path, EntryListener listener) throws Exception {
+            NodeCache nodeCache = new NodeCache(delegate, path);
+            nodeCache.getListenable().addListener(new EntryListenerAdapter(this, nodeCache, listener));
+            nodeCache.start();
+            entryNodeCaches.put(path, nodeCache);
+        }
+
+        @Override
+        public void removeEntryListener(String path) throws Exception {
+            NodeCache nodeCache = entryNodeCaches.remove(path);
+            if (nodeCache != null) {
+                nodeCache.close();
+            }
+        }
+
+        @Override
+        public String getEntryData(String path) {
+            return getEntryData(path, DEFAULT_ENCODING);
+        }
+
+        @Override
+        public String getEntryData(String path, String encoding) {
+            String result = null;
+            try {
+                byte[] data = delegate.getData().forPath(path);
+                if (data != null) {
+                    result = new String(data, Charset.forName(encoding));
+                }
+            } catch (Exception e) {
+                log.errorInteractingWithRemoteConfigRegistry(e);
+            }
+            return result;
+        }
+
+        @Override
+        public void createEntry(String path) {
+            try {
+                if (delegate.checkExists().forPath(path) == null) {
+                    delegate.create().forPath(path);
+                }
+            } catch (Exception e) {
+                log.errorInteractingWithRemoteConfigRegistry(e);
+            }
+        }
+
+        @Override
+        public void createEntry(String path, String data) {
+            createEntry(path, data, DEFAULT_ENCODING);
+        }
+
+        @Override
+        public void createEntry(String path, String data, String encoding) {
+            try {
+                createEntry(path);
+                setEntryData(path, data, encoding);
+            } catch (Exception e) {
+                log.errorInteractingWithRemoteConfigRegistry(e);
+            }
+        }
+
+        @Override
+        public int setEntryData(String path, String data) {
+            return setEntryData(path, data, DEFAULT_ENCODING);
+        }
+
+        @Override
+        public int setEntryData(String path, String data, String encoding) {
+            int version = 0;
+            try {
+                Stat s = delegate.setData().forPath(path, data.getBytes(Charset.forName(encoding)));
+                if (s != null) {
+                    version = s.getVersion();
+                }
+            } catch (Exception e) {
+                log.errorInteractingWithRemoteConfigRegistry(e);
+            }
+            return version;
+        }
+
+        @Override
+        public void deleteEntry(String path) {
+            try {
+                delegate.delete().forPath(path);
+            } catch (Exception e) {
+                log.errorInteractingWithRemoteConfigRegistry(e);
+            }
+        }
+    }
+
+    /**
+     * SASL ACLProvider
+     */
+    private static class SASLOwnerACLProvider implements ACLProvider {
+
+        private final List<ACL> saslACL;
+
+        private SASLOwnerACLProvider(String principal) {
+            this.saslACL = Collections.singletonList(new ACL(ZooDefs.Perms.ALL, new Id("sasl", principal)));
+        }
+
+        @Override
+        public List<ACL> getDefaultAcl() {
+            return saslACL;
+        }
+
+        @Override
+        public List<ACL> getAclForPath(String path) {
+            return getDefaultAcl();
+        }
+    }
+
+
+    private static final class ChildEntryListenerAdapter implements PathChildrenCacheListener {
+
+        private RemoteConfigurationRegistryClient client;
+        private ChildEntryListener delegate;
+
+        ChildEntryListenerAdapter(RemoteConfigurationRegistryClient client, ChildEntryListener delegate) {
+            this.client = client;
+            this.delegate = delegate;
+        }
+
+        @Override
+        public void childEvent(CuratorFramework curatorFramework, PathChildrenCacheEvent pathChildrenCacheEvent)
+                throws Exception {
+            ChildData childData = pathChildrenCacheEvent.getData();
+            if (childData != null) {
+                delegate.childEvent(client,
+                                    adaptType(pathChildrenCacheEvent.getType()),
+                                    childData.getPath());
+            }
+        }
+
+        private ChildEntryListener.Type adaptType(PathChildrenCacheEvent.Type type) {
+            ChildEntryListener.Type adapted = null;
+
+            switch(type) {
+                case CHILD_ADDED:
+                    adapted = ChildEntryListener.Type.ADDED;
+                    break;
+                case CHILD_REMOVED:
+                    adapted = ChildEntryListener.Type.REMOVED;
+                    break;
+                case CHILD_UPDATED:
+                    adapted = ChildEntryListener.Type.UPDATED;
+                    break;
+            }
+
+            return adapted;
+        }
+    }
+
+    private static final class EntryListenerAdapter implements NodeCacheListener {
+
+        private RemoteConfigurationRegistryClient client;
+        private EntryListener delegate;
+        private NodeCache nodeCache;
+
+        EntryListenerAdapter(RemoteConfigurationRegistryClient client, NodeCache nodeCache, EntryListener delegate) {
+            this.client = client;
+            this.nodeCache = nodeCache;
+            this.delegate = delegate;
+        }
+
+        @Override
+        public void nodeChanged() throws Exception {
+            String path = null;
+            byte[] data = null;
+
+            ChildData cd = nodeCache.getCurrentData();
+            if (cd != null) {
+                path = cd.getPath();
+                data = cd.getData();
+            }
+
+            if (path != null) {
+                delegate.entryChanged(client, path, data);
+            }
+        }
+    }
+
+    /**
+     * ACL adapter
+     */
+    private static final class ZooKeeperACLAdapter implements RemoteConfigurationRegistryClient.EntryACL {
+        private String type;
+        private String id;
+        private Object permissions;
+
+        ZooKeeperACLAdapter(ACL acl) {
+            this.permissions = acl.getPerms();
+            this.type = acl.getId().getScheme();
+            this.id = acl.getId().getId();
+        }
+
+        @Override
+        public String getId() {
+            return id;
+        }
+
+        @Override
+        public String getType() {
+            return type;
+        }
+
+        @Override
+        public Object getPermissions() {
+            return permissions;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfig.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfig.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfig.java
new file mode 100644
index 0000000..d51d7d5
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfig.java
@@ -0,0 +1,169 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.config.remote.zk;
+
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationMessages;
+import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.services.security.AliasServiceException;
+
+import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.Configuration;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Configuration decorator that adds SASL JAAS configuration to whatever JAAS config is already applied.
+ */
+class RemoteConfigurationRegistryJAASConfig extends Configuration {
+
+    // Underlying SASL mechanisms supported
+    enum SASLMechanism {
+        Kerberos,
+        Digest
+    }
+
+    static final Map<String, String> digestLoginModules = new HashMap<>();
+    static {
+        digestLoginModules.put("ZOOKEEPER", "org.apache.zookeeper.server.auth.DigestLoginModule");
+    }
+
+    private static final RemoteConfigurationMessages log = MessagesFactory.get(RemoteConfigurationMessages.class);
+
+    // Cache the current JAAS configuration
+    private Configuration delegate = Configuration.getConfiguration();
+
+    private AliasService aliasService;
+
+    private Map<String, AppConfigurationEntry[]> contextEntries =  new HashMap<>();
+
+    static RemoteConfigurationRegistryJAASConfig configure(List<RemoteConfigurationRegistryConfig> configs, AliasService aliasService) {
+        return new RemoteConfigurationRegistryJAASConfig(configs, aliasService);
+    }
+
+    private RemoteConfigurationRegistryJAASConfig(List<RemoteConfigurationRegistryConfig> configs, AliasService aliasService) {
+        this.aliasService = aliasService;
+
+        // Populate context entries
+        List<AppConfigurationEntry> appConfigEntries = new ArrayList<>();
+        for (RemoteConfigurationRegistryConfig config : configs) {
+            if (config.isSecureRegistry()) {
+                contextEntries.put(config.getName(), createEntries(config));
+            }
+        }
+
+        // If there is at least one context entry, then set this as the client configuration
+        if (!contextEntries.isEmpty()) {
+            // TODO: PJZ: ZooKeeper 3.6.0 will have per-client JAAS Configuration support; Upgrade ASAP!!
+            // For now, set this as the static JAAS configuration
+            Configuration.setConfiguration(this);
+        }
+    }
+
+    @Override
+    public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
+        AppConfigurationEntry[] result = null;
+
+        // First, try the delegate's context entries
+        result = delegate.getAppConfigurationEntry(name);
+        if (result == null || result.length < 1) {
+            // Try our additional context entries
+            result = contextEntries.get(name);
+        }
+
+        return result;
+    }
+
+    private AppConfigurationEntry[] createEntries(RemoteConfigurationRegistryConfig config) {
+        // Only supporting a single app config entry per configuration/context
+        AppConfigurationEntry[] result = new AppConfigurationEntry[1];
+        result[0] = createEntry(config);
+        return result;
+    }
+
+    private AppConfigurationEntry createEntry(RemoteConfigurationRegistryConfig config) {
+        AppConfigurationEntry entry = null;
+
+        Map<String, String> opts = new HashMap<>();
+        SASLMechanism saslMechanism = getSASLMechanism(config.getAuthType());
+        switch (saslMechanism) {
+            case Digest:
+                // Digest auth options
+                opts.put("username", config.getPrincipal());
+
+                char[] credential = null;
+                if (aliasService != null) {
+                    try {
+                        credential = aliasService.getPasswordFromAliasForGateway(config.getCredentialAlias());
+                    } catch (AliasServiceException e) {
+                        log.unresolvedCredentialAlias(config.getCredentialAlias());
+                    }
+                } else {
+                    throw new IllegalArgumentException("The AliasService is required to resolve credential aliases.");
+                }
+
+                if (credential != null) {
+                    opts.put("password", new String(credential));
+                }
+                break;
+            case Kerberos:
+                opts.put("isUseTicketCache", String.valueOf(config.isUseTicketCache()));
+                opts.put("isUseKeyTab", String.valueOf(config.isUseKeyTab()));
+                opts.put("keyTab", config.getKeytab());
+                opts.put("principal", config.getPrincipal());
+        }
+
+        entry = new AppConfigurationEntry(getLoginModuleName(config.getRegistryType(), saslMechanism),
+                                          AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+                                          opts);
+
+        return entry;
+    }
+
+    private static String getLoginModuleName(String registryType, SASLMechanism saslMechanism) {
+        String loginModuleName = null;
+
+        switch (saslMechanism) {
+            case Kerberos:
+                if (System.getProperty("java.vendor").contains("IBM")) {
+                    loginModuleName = "com.ibm.security.auth.module.Krb5LoginModule";
+                } else {
+                    loginModuleName = "com.sun.security.auth.module.Krb5LoginModule";
+                }
+                break;
+            case Digest:
+                loginModuleName = digestLoginModules.get(registryType.toUpperCase());
+        }
+        return loginModuleName;
+    }
+
+    private static SASLMechanism getSASLMechanism(String authType) {
+        SASLMechanism result = null;
+        for (SASLMechanism at : SASLMechanism.values()) {
+            if (at.name().equalsIgnoreCase(authType)) {
+                result = at;
+                break;
+            }
+        }
+        return result;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/ZooKeeperClientService.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/ZooKeeperClientService.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/ZooKeeperClientService.java
new file mode 100644
index 0000000..c4add4a
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/ZooKeeperClientService.java
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.config.remote.zk;
+
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+
+public interface ZooKeeperClientService extends RemoteConfigurationRegistryClientService {
+
+    String TYPE = "ZooKeeper";
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/ZooKeeperClientServiceProvider.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/ZooKeeperClientServiceProvider.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/ZooKeeperClientServiceProvider.java
new file mode 100644
index 0000000..f30d3da
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/ZooKeeperClientServiceProvider.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.config.remote.zk;
+
+import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider;
+
+
+public class ZooKeeperClientServiceProvider implements RemoteConfigurationRegistryClientServiceProvider {
+
+    @Override
+    public String getType() {
+        return ZooKeeperClientService.TYPE;
+    }
+
+    @Override
+    public ZooKeeperClientService newInstance() {
+        return new CuratorClientService();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-service-remoteconfig/src/main/resources/META-INF/services/org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/resources/META-INF/services/org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider b/gateway-service-remoteconfig/src/main/resources/META-INF/services/org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider
new file mode 100644
index 0000000..7f2312a
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/resources/META-INF/services/org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.hadoop.gateway.service.config.remote.zk.ZooKeeperClientServiceProvider

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistriesTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistriesTest.java b/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistriesTest.java
new file mode 100644
index 0000000..a33fcc2
--- /dev/null
+++ b/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistriesTest.java
@@ -0,0 +1,184 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.config.remote.config;
+
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import static org.junit.Assert.assertEquals;
+import static org.testng.Assert.assertNotNull;
+
+public class DefaultRemoteConfigurationRegistriesTest {
+
+    /**
+     * Test a single registry configuration with digest auth configuration.
+     */
+    @Test
+    public void testPropertiesRemoteConfigurationRegistriesSingleDigest() throws Exception {
+        Map<String, Properties> testProperties = new HashMap<>();
+        Properties p = new Properties();
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE, "ZooKeeper");
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS, "hostx:2181");
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL, "zkDigestUser");
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE, "digest");
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS, "zkDigestAlias");
+        testProperties.put("testDigest", p);
+
+        doTestPropertiesRemoteConfigurationRegistries(testProperties);
+    }
+
+
+    /**
+     * Test a single registry configuration with kerberos auth configuration.
+     */
+    @Test
+    public void testPropertiesRemoteConfigurationRegistriesSingleKerberos() throws Exception {
+        Map<String, Properties> testProperties = new HashMap<>();
+        Properties p = new Properties();
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE, "ZooKeeper");
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS, "hostx:2181");
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL, "zkUser");
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE, "kerberos");
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_KEYTAB, "/home/user/remoteregistry.keytab");
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_KEYTAB, "true");
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_TICKET_CACHE, "false");
+        testProperties.put("testKerb", p);
+
+        doTestPropertiesRemoteConfigurationRegistries(testProperties);
+    }
+
+    /**
+     * Test multiple registry configuration with varying auth configurations.
+     */
+    @Test
+    public void testPropertiesRemoteConfigurationRegistriesMultipleMixed() throws Exception {
+        Map<String, Properties> testProperties = new HashMap<>();
+
+        Properties kerb = new Properties();
+        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE, "ZooKeeper");
+        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS, "host1:2181");
+        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_NAMESPACE, "/knox/config");
+        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL, "kerbPrincipal");
+        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE, "kerberos");
+        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_KEYTAB, "/home/user/mykrb.keytab");
+        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_KEYTAB, "true");
+        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_TICKET_CACHE, "false");
+        testProperties.put("testKerb1", kerb);
+
+        Properties digest = new Properties();
+        digest.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE, "ZooKeeper");
+        digest.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS, "host2:2181");
+        digest.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL, "digestPrincipal");
+        digest.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE, "digest");
+        digest.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS, "digestPwdAlias");
+        testProperties.put("testDigest1", digest);
+
+        Properties unsecured = new Properties();
+        unsecured.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE, "ZooKeeper");
+        unsecured.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS, "host2:2181");
+        testProperties.put("testUnsecured", unsecured);
+
+        doTestPropertiesRemoteConfigurationRegistries(testProperties);
+    }
+
+
+    /**
+     * Perform the actual test.
+     *
+     * @param testProperties The test properties
+     */
+    private void doTestPropertiesRemoteConfigurationRegistries(Map<String, Properties> testProperties) throws Exception {
+        // Mock gateway config
+        GatewayConfig gc = mockGatewayConfig(testProperties);
+
+        // Create the RemoteConfigurationRegistries object to be tested from the GatewayConfig
+        RemoteConfigurationRegistries registries = new DefaultRemoteConfigurationRegistries(gc);
+
+        // Basic validation
+        assertNotNull(registries);
+        List<RemoteConfigurationRegistry> registryConfigs = registries.getRegistryConfigurations();
+        assertNotNull(registryConfigs);
+        assertEquals(testProperties.size(), registryConfigs.size());
+
+        // Validate the contents of the created object
+        for (RemoteConfigurationRegistry regConfig : registryConfigs) {
+            validateRemoteRegistryConfig(regConfig.getName(), testProperties.get(regConfig.getName()), regConfig);
+        }
+    }
+
+
+    /**
+     * Create a mock GatewayConfig based on the specified test properties.
+     *
+     * @param testProperties The test properties to set on the config
+     */
+    private GatewayConfig mockGatewayConfig(Map<String, Properties> testProperties) {
+        // Mock gateway config
+        GatewayConfig gc = EasyMock.createNiceMock(GatewayConfig.class);
+        List<String> configNames = new ArrayList<>();
+        for (String registryName : testProperties.keySet()) {
+            configNames.add(registryName);
+
+            String propertyValueString = "";
+            Properties props = testProperties.get(registryName);
+            Enumeration names = props.propertyNames();
+            while (names.hasMoreElements()) {
+                String propertyName = (String) names.nextElement();
+                propertyValueString += propertyName + "=" + props.get(propertyName);
+                if (names.hasMoreElements()) {
+                    propertyValueString += ";";
+                }
+            }
+            EasyMock.expect(gc.getRemoteRegistryConfiguration(registryName))
+                    .andReturn(propertyValueString)
+                    .anyTimes();
+        }
+        EasyMock.expect(gc.getRemoteRegistryConfigurationNames()).andReturn(configNames).anyTimes();
+        EasyMock.replay(gc);
+
+        return gc;
+    }
+
+
+    /**
+     * Validate the specified RemoteConfigurationRegistry based on the expected test properties.
+     */
+    private void validateRemoteRegistryConfig(String                      configName,
+                                              Properties                  expected,
+                                              RemoteConfigurationRegistry registryConfig) throws Exception {
+        assertEquals(configName, registryConfig.getName());
+        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE), registryConfig.getRegistryType());
+        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS), registryConfig.getConnectionString());
+        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_NAMESPACE), registryConfig.getNamespace());
+        assertEquals(registryConfig.isSecureRegistry(), expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE) != null);
+        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE), registryConfig.getAuthType());
+        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL), registryConfig.getPrincipal());
+        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS), registryConfig.getCredentialAlias());
+        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_KEYTAB), registryConfig.getKeytab());
+        assertEquals(Boolean.valueOf((String)expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_KEYTAB)), registryConfig.isUseKeyTab());
+        assertEquals(Boolean.valueOf((String)expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_TICKET_CACHE)), registryConfig.isUseTicketCache());
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistryConfigParserTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistryConfigParserTest.java b/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistryConfigParserTest.java
new file mode 100644
index 0000000..386e332
--- /dev/null
+++ b/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistryConfigParserTest.java
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.config.remote.config;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
+import org.apache.hadoop.gateway.service.config.remote.util.RemoteRegistryConfigTestUtils;
+import org.junit.Test;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import static org.apache.hadoop.gateway.service.config.remote.util.RemoteRegistryConfigTestUtils.*;
+
+public class RemoteConfigurationRegistryConfigParserTest {
+
+    @Test
+    public void testExternalXMLParsing() throws Exception {
+        final String CONN_STR = "http://my.zookeeper.host:2181";
+
+        Map<String, Map<String, String>> testRegistryConfigurations = new HashMap<>();
+
+        Map<String, String> config1 = new HashMap<>();
+        config1.put(PROPERTY_TYPE, "ZooKeeper");
+        config1.put(PROPERTY_NAME, "registry1");
+        config1.put(PROPERTY_ADDRESS, CONN_STR);
+        config1.put(PROPERTY_SECURE, "true");
+        config1.put(PROPERTY_AUTH_TYPE, "Digest");
+        config1.put(PROPERTY_PRINCIPAL, "knox");
+        config1.put(PROPERTY_CRED_ALIAS, "zkCredential");
+        testRegistryConfigurations.put(config1.get("name"), config1);
+
+        Map<String, String> config2 = new HashMap<>();
+        config2.put(PROPERTY_TYPE, "ZooKeeper");
+        config2.put(PROPERTY_NAME, "MyKerberos");
+        config2.put(PROPERTY_ADDRESS, CONN_STR);
+        config2.put(PROPERTY_SECURE, "true");
+        config2.put(PROPERTY_AUTH_TYPE, "Kerberos");
+        config2.put(PROPERTY_PRINCIPAL, "knox");
+        File myKeyTab = File.createTempFile("mytest", "keytab");
+        config2.put(PROPERTY_KEYTAB, myKeyTab.getAbsolutePath());
+        config2.put(PROPERTY_USE_KEYTAB, "false");
+        config2.put(PROPERTY_USE_TICKET_CACHE, "true");
+        testRegistryConfigurations.put(config2.get("name"), config2);
+
+        Map<String, String> config3 = new HashMap<>();
+        config3.put(PROPERTY_TYPE, "ZooKeeper");
+        config3.put(PROPERTY_NAME, "anotherRegistry");
+        config3.put(PROPERTY_ADDRESS, "whatever:1281");
+        testRegistryConfigurations.put(config3.get("name"), config3);
+
+        String configXML =
+                    RemoteRegistryConfigTestUtils.createRemoteConfigRegistriesXML(testRegistryConfigurations.values());
+
+        File registryConfigFile = File.createTempFile("remote-registries", "xml");
+        try {
+            FileUtils.writeStringToFile(registryConfigFile, configXML);
+
+            List<RemoteConfigurationRegistryConfig> configs =
+                                    RemoteConfigurationRegistriesParser.getConfig(registryConfigFile.getAbsolutePath());
+            assertNotNull(configs);
+            assertEquals(testRegistryConfigurations.keySet().size(), configs.size());
+
+            for (RemoteConfigurationRegistryConfig registryConfig : configs) {
+                Map<String, String> expected = testRegistryConfigurations.get(registryConfig.getName());
+                assertNotNull(expected);
+                validateParsedRegistryConfiguration(registryConfig, expected);
+            }
+        } finally {
+            registryConfigFile.delete();
+        }
+    }
+
+    private void validateParsedRegistryConfiguration(RemoteConfigurationRegistryConfig config,
+                                                     Map<String, String> expected) throws Exception {
+        assertEquals(expected.get(PROPERTY_TYPE), config.getRegistryType());
+        assertEquals(expected.get(PROPERTY_ADDRESS), config.getConnectionString());
+        assertEquals(expected.get(PROPERTY_NAME), config.getName());
+        assertEquals(expected.get(PROPERTY_NAMESAPCE), config.getNamespace());
+        assertEquals(Boolean.valueOf(expected.get(PROPERTY_SECURE)), config.isSecureRegistry());
+        assertEquals(expected.get(PROPERTY_AUTH_TYPE), config.getAuthType());
+        assertEquals(expected.get(PROPERTY_PRINCIPAL), config.getPrincipal());
+        assertEquals(expected.get(PROPERTY_CRED_ALIAS), config.getCredentialAlias());
+        assertEquals(expected.get(PROPERTY_KEYTAB), config.getKeytab());
+        assertEquals(Boolean.valueOf(expected.get(PROPERTY_USE_KEYTAB)), config.isUseKeyTab());
+        assertEquals(Boolean.valueOf(expected.get(PROPERTY_USE_TICKET_CACHE)), config.isUseTicketCache());
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/util/RemoteRegistryConfigTestUtils.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/util/RemoteRegistryConfigTestUtils.java b/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/util/RemoteRegistryConfigTestUtils.java
new file mode 100644
index 0000000..35919d0
--- /dev/null
+++ b/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/util/RemoteRegistryConfigTestUtils.java
@@ -0,0 +1,117 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.config.remote.util;
+
+import java.util.Collection;
+import java.util.Map;
+
+public class RemoteRegistryConfigTestUtils {
+
+    public static final String PROPERTY_TYPE = "type";
+    public static final String PROPERTY_NAME = "name";
+    public static final String PROPERTY_ADDRESS = "address";
+    public static final String PROPERTY_NAMESAPCE = "namespace";
+    public static final String PROPERTY_SECURE = "secure";
+    public static final String PROPERTY_AUTH_TYPE = "authType";
+    public static final String PROPERTY_PRINCIPAL = "principal";
+    public static final String PROPERTY_CRED_ALIAS = "credentialAlias";
+    public static final String PROPERTY_KEYTAB = "keyTab";
+    public static final String PROPERTY_USE_KEYTAB = "useKeyTab";
+    public static final String PROPERTY_USE_TICKET_CACHE = "useTicketCache";
+
+    public static String createRemoteConfigRegistriesXML(Collection<Map<String, String>> configProperties) {
+        String result = "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
+                        "<remote-configuration-registries>\n";
+
+        for (Map<String, String> props : configProperties) {
+            String authType = props.get(PROPERTY_AUTH_TYPE);
+            if ("Kerberos".equalsIgnoreCase(authType)) {
+                result +=
+                   createRemoteConfigRegistryXMLWithKerberosAuth(props.get(PROPERTY_TYPE),
+                                                                 props.get(PROPERTY_NAME),
+                                                                 props.get(PROPERTY_ADDRESS),
+                                                                 props.get(PROPERTY_PRINCIPAL),
+                                                                 props.get(PROPERTY_KEYTAB),
+                                                                 Boolean.valueOf(props.get(PROPERTY_USE_KEYTAB)),
+                                                                 Boolean.valueOf(props.get(PROPERTY_USE_TICKET_CACHE)));
+            } else if ("Digest".equalsIgnoreCase(authType)) {
+                result +=
+                    createRemoteConfigRegistryXMLWithDigestAuth(props.get(PROPERTY_TYPE),
+                                                                props.get(PROPERTY_NAME),
+                                                                props.get(PROPERTY_ADDRESS),
+                                                                props.get(PROPERTY_PRINCIPAL),
+                                                                props.get(PROPERTY_CRED_ALIAS));
+            } else {
+                result += createRemoteConfigRegistryXMLNoAuth(props.get(PROPERTY_TYPE),
+                                                              props.get(PROPERTY_NAME),
+                                                              props.get(PROPERTY_ADDRESS));
+            }
+        }
+
+        result += "</remote-configuration-registries>\n";
+
+        return result;
+    }
+
+    public static String createRemoteConfigRegistryXMLWithKerberosAuth(String type,
+                                                                       String name,
+                                                                       String address,
+                                                                       String principal,
+                                                                       String keyTab,
+                                                                       boolean userKeyTab,
+                                                                       boolean useTicketCache) {
+        return "  <remote-configuration-registry>\n" +
+               "    <name>" + name + "</name>\n" +
+               "    <type>" + type + "</type>\n" +
+               "    <address>" + address + "</address>\n" +
+               "    <secure>true</secure>\n" +
+               "    <auth-type>" + "Kerberos" + "</auth-type>\n" +
+               "    <principal>" + principal + "</principal>\n" +
+               "    <keytab>" + keyTab + "</keytab>\n" +
+               "    <use-keytab>" + String.valueOf(userKeyTab) + "</use-keytab>\n" +
+               "    <use-ticket-cache>" + String.valueOf(useTicketCache) + "</use-ticket-cache>\n" +
+               "  </remote-configuration-registry>\n";
+    }
+
+    public static String createRemoteConfigRegistryXMLWithDigestAuth(String type,
+                                                                     String name,
+                                                                     String address,
+                                                                     String principal,
+                                                                     String credentialAlias) {
+        return "  <remote-configuration-registry>\n" +
+               "    <name>" + name + "</name>\n" +
+               "    <type>" + type + "</type>\n" +
+               "    <address>" + address + "</address>\n" +
+               "    <secure>true</secure>\n" +
+               "    <auth-type>" + "Digest" + "</auth-type>\n" +
+               "    <principal>" + principal + "</principal>\n" +
+               "    <credential-alias>" + credentialAlias + "</credential-alias>\n" +
+               "  </remote-configuration-registry>\n";
+    }
+
+
+    public static String createRemoteConfigRegistryXMLNoAuth(String type,
+                                                             String name,
+                                                             String address) {
+        return "  <remote-configuration-registry>\n" +
+               "    <name>" + name + "</name>\n" +
+               "    <type>" + type + "</type>\n" +
+               "    <address>" + address + "</address>\n" +
+               "  </remote-configuration-registry>\n";
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryClientServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryClientServiceTest.java b/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryClientServiceTest.java
new file mode 100644
index 0000000..0292ee3
--- /dev/null
+++ b/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryClientServiceTest.java
@@ -0,0 +1,424 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.config.remote.zk;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.apache.curator.test.InstanceSpec;
+import org.apache.curator.test.TestingCluster;
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient.ChildEntryListener;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceFactory;
+import org.apache.hadoop.gateway.service.config.remote.util.RemoteRegistryConfigTestUtils;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.zookeeper.ZooDefs;
+import org.apache.zookeeper.data.ACL;
+import org.apache.zookeeper.data.Id;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.Configuration;
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+public class RemoteConfigurationRegistryClientServiceTest {
+
+    /**
+     * Test a configuration for an unsecured remote registry, included in the gateway configuration.
+     */
+    @Test
+    public void testUnsecuredZooKeeperWithSimpleRegistryConfig() throws Exception {
+        final String REGISTRY_CLIENT_NAME = "unsecured-zk-registry-name";
+        final String PRINCIPAL = null;
+        final String PWD = null;
+        final String CRED_ALIAS = null;
+
+        // Configure and start a secure ZK cluster
+        TestingCluster zkCluster = setupAndStartSecureTestZooKeeper(PRINCIPAL, PWD);
+
+        try {
+            // Create the setup client for the test cluster, and initialize the test znodes
+            CuratorFramework setupClient = initializeTestClientAndZNodes(zkCluster, PRINCIPAL);
+
+            // Mock configuration
+            GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+            final String registryConfigValue =
+                        GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
+                        GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString();
+            EasyMock.expect(config.getRemoteRegistryConfiguration(REGISTRY_CLIENT_NAME))
+                    .andReturn(registryConfigValue)
+                    .anyTimes();
+            EasyMock.expect(config.getRemoteRegistryConfigurationNames())
+                    .andReturn(Collections.singletonList(REGISTRY_CLIENT_NAME)).anyTimes();
+            EasyMock.replay(config);
+
+            doTestZooKeeperClient(setupClient, REGISTRY_CLIENT_NAME, config, CRED_ALIAS, PWD);
+        } finally {
+            zkCluster.stop();
+        }
+    }
+
+    /**
+     * Test multiple configurations for an unsecured remote registry.
+     */
+    @Test
+    public void testMultipleUnsecuredZooKeeperWithSimpleRegistryConfig() throws Exception {
+        final String REGISTRY_CLIENT_NAME_1 = "zkclient1";
+        final String REGISTRY_CLIENT_NAME_2 = "zkclient2";
+        final String PRINCIPAL = null;
+        final String PWD = null;
+        final String CRED_ALIAS = null;
+
+        // Configure and start a secure ZK cluster
+        TestingCluster zkCluster = setupAndStartSecureTestZooKeeper(PRINCIPAL, PWD);
+
+        try {
+            // Create the setup client for the test cluster, and initialize the test znodes
+            CuratorFramework setupClient = initializeTestClientAndZNodes(zkCluster, PRINCIPAL);
+
+            // Mock configuration
+            GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+            final String registryConfigValue1 =
+                                GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
+                                GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString();
+            EasyMock.expect(config.getRemoteRegistryConfiguration(REGISTRY_CLIENT_NAME_1))
+                    .andReturn(registryConfigValue1).anyTimes();
+            final String registryConfigValue2 =
+                                GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
+                                GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString();
+            EasyMock.expect(config.getRemoteRegistryConfiguration(REGISTRY_CLIENT_NAME_2))
+                    .andReturn(registryConfigValue2).anyTimes();
+            EasyMock.expect(config.getRemoteRegistryConfigurationNames())
+                    .andReturn(Arrays.asList(REGISTRY_CLIENT_NAME_1, REGISTRY_CLIENT_NAME_2)).anyTimes();
+            EasyMock.replay(config);
+
+            // Create the client service instance
+            RemoteConfigurationRegistryClientService clientService =
+                    RemoteConfigurationRegistryClientServiceFactory.newInstance(config);
+            assertEquals("Wrong registry client service type.", clientService.getClass(), CuratorClientService.class);
+            clientService.setAliasService(null);
+            clientService.init(config, null);
+            clientService.start();
+
+            RemoteConfigurationRegistryClient client1 = clientService.get(REGISTRY_CLIENT_NAME_1);
+            assertNotNull(client1);
+
+            RemoteConfigurationRegistryClient client2 = clientService.get(REGISTRY_CLIENT_NAME_2);
+            assertNotNull(client2);
+
+            doTestZooKeeperClient(setupClient, REGISTRY_CLIENT_NAME_1, clientService, false);
+            doTestZooKeeperClient(setupClient, REGISTRY_CLIENT_NAME_2, clientService, false);
+        } finally {
+            zkCluster.stop();
+        }
+    }
+
+    /**
+     * Test a configuration for a secure remote registry, included in the gateway configuration.
+     */
+    @Test
+    public void testZooKeeperWithSimpleRegistryConfig() throws Exception {
+        final String AUTH_TYPE = "digest";
+        final String REGISTRY_CLIENT_NAME = "zk-registry-name";
+        final String PRINCIPAL = "knox";
+        final String PWD = "knoxtest";
+        final String CRED_ALIAS = "zkCredential";
+
+        // Configure and start a secure ZK cluster
+        TestingCluster zkCluster = setupAndStartSecureTestZooKeeper(PRINCIPAL, PWD);
+
+        try {
+            // Create the setup client for the test cluster, and initialize the test znodes
+            CuratorFramework setupClient = initializeTestClientAndZNodes(zkCluster, PRINCIPAL);
+
+            // Mock configuration
+            GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+            final String registryConfigValue =
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString() + ";" +
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE + "=" + AUTH_TYPE + ";" +
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL + "=" + PRINCIPAL + ";" +
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS + "=" + CRED_ALIAS;
+            EasyMock.expect(config.getRemoteRegistryConfiguration(REGISTRY_CLIENT_NAME))
+                    .andReturn(registryConfigValue)
+                    .anyTimes();
+            EasyMock.expect(config.getRemoteRegistryConfigurationNames())
+                    .andReturn(Collections.singletonList(REGISTRY_CLIENT_NAME)).anyTimes();
+            EasyMock.replay(config);
+
+            doTestZooKeeperClient(setupClient, REGISTRY_CLIENT_NAME, config, CRED_ALIAS, PWD);
+        } finally {
+            zkCluster.stop();
+        }
+    }
+
+    /**
+     * Test the remote registry configuration external to, and referenced from, the gateway configuration, for a secure
+     * client.
+     */
+    @Test
+    public void testZooKeeperWithSingleExternalRegistryConfig() throws Exception {
+        final String AUTH_TYPE = "digest";
+        final String REGISTRY_CLIENT_NAME = "my-zookeeper_registryNAME";
+        final String PRINCIPAL = "knox";
+        final String PWD = "knoxtest";
+        final String CRED_ALIAS = "zkCredential";
+
+        // Configure and start a secure ZK cluster
+        TestingCluster zkCluster = setupAndStartSecureTestZooKeeper(PRINCIPAL, PWD);
+
+        File tmpRegConfigFile = null;
+
+        try {
+            // Create the setup client for the test cluster, and initialize the test znodes
+            CuratorFramework setupClient = initializeTestClientAndZNodes(zkCluster, PRINCIPAL);
+
+            // Mock configuration
+            Map<String, String> registryConfigProps = new HashMap<>();
+            registryConfigProps.put("type", ZooKeeperClientService.TYPE);
+            registryConfigProps.put("name", REGISTRY_CLIENT_NAME);
+            registryConfigProps.put("address", zkCluster.getConnectString());
+            registryConfigProps.put("secure", "true");
+            registryConfigProps.put("authType", AUTH_TYPE);
+            registryConfigProps.put("principal", PRINCIPAL);
+            registryConfigProps.put("credentialAlias", CRED_ALIAS);
+            String registryConfigXML =
+                  RemoteRegistryConfigTestUtils.createRemoteConfigRegistriesXML(Collections.singleton(registryConfigProps));
+            tmpRegConfigFile = File.createTempFile("myRemoteRegistryConfig", "xml");
+            FileUtils.writeStringToFile(tmpRegConfigFile, registryConfigXML);
+
+            System.setProperty("org.apache.knox.gateway.remote.registry.config.file", tmpRegConfigFile.getAbsolutePath());
+
+            GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+            EasyMock.replay(config);
+
+            doTestZooKeeperClient(setupClient, REGISTRY_CLIENT_NAME, config, CRED_ALIAS, PWD);
+        } finally {
+            zkCluster.stop();
+            if (tmpRegConfigFile != null && tmpRegConfigFile.exists()) {
+                tmpRegConfigFile.delete();
+            }
+            System.clearProperty("org.apache.knox.gateway.remote.registry.config.file");
+        }
+    }
+
+    /**
+     * Setup and start a secure test ZooKeeper cluster.
+     */
+    private TestingCluster setupAndStartSecureTestZooKeeper(String principal, String digestPassword) throws Exception {
+        final boolean applyAuthentication = (principal != null);
+
+        // Configure security for the ZK cluster instances
+        Map<String, Object> customInstanceSpecProps = new HashMap<>();
+
+        if (applyAuthentication) {
+            customInstanceSpecProps.put("authProvider.1", "org.apache.zookeeper.server.auth.SASLAuthenticationProvider");
+            customInstanceSpecProps.put("requireClientAuthScheme", "sasl");
+        }
+
+        // Define the test cluster
+        List<InstanceSpec> instanceSpecs = new ArrayList<>();
+        for (int i = 0 ; i < 3 ; i++) {
+            InstanceSpec is = new InstanceSpec(null, -1, -1, -1, false, (i+1), -1, -1, customInstanceSpecProps);
+            instanceSpecs.add(is);
+        }
+        TestingCluster zkCluster = new TestingCluster(instanceSpecs);
+
+        if (applyAuthentication) {
+            // Setup ZooKeeper server SASL
+            Map<String, String> digestOptions = new HashMap<>();
+            digestOptions.put("user_" + principal, digestPassword);
+            final AppConfigurationEntry[] serverEntries =
+                    {new AppConfigurationEntry("org.apache.zookeeper.server.auth.DigestLoginModule",
+                            AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+                            digestOptions)};
+            Configuration.setConfiguration(new Configuration() {
+                @Override
+                public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
+                    return ("Server".equalsIgnoreCase(name)) ? serverEntries : null;
+                }
+            });
+        }
+
+        // Start the cluster
+        zkCluster.start();
+
+        return zkCluster;
+    }
+
+    /**
+     * Create a ZooKeeper client with SASL digest auth configured, and initialize the test znodes.
+     */
+    private CuratorFramework initializeTestClientAndZNodes(TestingCluster zkCluster, String principal) throws Exception {
+        // Create the client for the test cluster
+        CuratorFramework setupClient = CuratorFrameworkFactory.builder()
+                                                              .connectString(zkCluster.getConnectString())
+                                                              .retryPolicy(new ExponentialBackoffRetry(100, 3))
+                                                              .build();
+        assertNotNull(setupClient);
+        setupClient.start();
+
+        List<ACL> acls = new ArrayList<>();
+        if (principal != null) {
+            acls.add(new ACL(ZooDefs.Perms.ALL, new Id("sasl", principal)));
+        } else {
+            acls.add(new ACL(ZooDefs.Perms.ALL, ZooDefs.Ids.ANYONE_ID_UNSAFE));
+        }
+        setupClient.create().creatingParentsIfNeeded().withACL(acls).forPath("/knox/config/descriptors");
+        setupClient.create().creatingParentsIfNeeded().withACL(acls).forPath("/knox/config/shared-providers");
+
+        List<ACL> negativeACLs = new ArrayList<>();
+        if (principal != null) {
+            negativeACLs.add(new ACL(ZooDefs.Perms.ALL, new Id("sasl", "notyou")));
+        } else {
+            negativeACLs.add(new ACL(ZooDefs.Perms.ALL, ZooDefs.Ids.ANYONE_ID_UNSAFE));
+        }
+        setupClient.create().creatingParentsIfNeeded().withACL(negativeACLs).forPath("/someotherconfig");
+
+        return setupClient;
+    }
+
+    private void doTestZooKeeperClient(final CuratorFramework setupClient,
+                                       final String           testClientName,
+                                       final GatewayConfig    config,
+                                       final String           credentialAlias,
+                                       final String           digestPassword) throws Exception {
+        boolean isSecureTest = (credentialAlias != null && digestPassword != null);
+
+        // Mock alias service
+        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
+        EasyMock.expect(aliasService.getPasswordFromAliasForGateway(credentialAlias))
+                .andReturn(isSecureTest ? digestPassword.toCharArray() : null)
+                .anyTimes();
+        EasyMock.replay(aliasService);
+
+        // Create the client service instance
+        RemoteConfigurationRegistryClientService clientService =
+                RemoteConfigurationRegistryClientServiceFactory.newInstance(config);
+        assertEquals("Wrong registry client service type.", clientService.getClass(), CuratorClientService.class);
+        clientService.setAliasService(aliasService);
+        clientService.init(config, null);
+        clientService.start();
+
+        doTestZooKeeperClient(setupClient, testClientName, clientService, isSecureTest);
+    }
+
+    /**
+     * Test secure ZooKeeper client interactions.
+     *
+     * @param setupClient    The client used for interacting with ZooKeeper independent from the registry client service.
+     * @param testClientName The name of the client to use from the registry client service.
+     * @param clientService  The RemoteConfigurationRegistryClientService
+     * @param isSecureTest   Flag to indicate whether this is a secure interaction test
+     */
+    private void doTestZooKeeperClient(final CuratorFramework                         setupClient,
+                                       final String                                   testClientName,
+                                       final RemoteConfigurationRegistryClientService clientService,
+                                       boolean                                        isSecureTest) throws Exception {
+
+        RemoteConfigurationRegistryClient client = clientService.get(testClientName);
+        assertNotNull(client);
+        List<String> descriptors = client.listChildEntries("/knox/config/descriptors");
+        assertNotNull(descriptors);
+        for (String descriptor : descriptors) {
+            System.out.println("Descriptor: " + descriptor);
+        }
+
+        List<String> providerConfigs = client.listChildEntries("/knox/config/shared-providers");
+        assertNotNull(providerConfigs);
+        for (String providerConfig : providerConfigs) {
+            System.out.println("Provider config: " + providerConfig);
+        }
+
+        List<String> someotherConfig = client.listChildEntries("/someotherconfig");
+        if (isSecureTest) {
+            assertNull("Expected null because of the ACL mismatch.", someotherConfig);
+        } else {
+            assertNotNull(someotherConfig);
+        }
+
+        // Test listeners
+        final String MY_NEW_ZNODE = "/clientServiceTestNode";
+        final String MY_NEW_DATA_ZNODE = MY_NEW_ZNODE + "/mydata";
+
+        if (setupClient.checkExists().forPath(MY_NEW_ZNODE) != null) {
+            setupClient.delete().deletingChildrenIfNeeded().forPath(MY_NEW_ZNODE);
+        }
+
+        final List<String> listenerLog = new ArrayList<>();
+        client.addChildEntryListener(MY_NEW_ZNODE, (c, type, path) -> {
+            listenerLog.add("EXTERNAL: " + type.toString() + ":" + path);
+            if (ChildEntryListener.Type.ADDED.equals(type)) {
+                try {
+                    c.addEntryListener(path, (cc, p, d) -> listenerLog.add("EXTERNAL: " + p + ":" + (d != null ? new String(d) : "null")));
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+            }
+        });
+
+        client.createEntry(MY_NEW_ZNODE);
+        client.createEntry(MY_NEW_DATA_ZNODE, "more test data");
+        String testData = client.getEntryData(MY_NEW_DATA_ZNODE);
+        assertNotNull(testData);
+        assertEquals("more test data", testData);
+
+        assertTrue(client.entryExists(MY_NEW_DATA_ZNODE));
+        client.setEntryData(MY_NEW_DATA_ZNODE, "still more data");
+
+        try {
+            Thread.sleep(1000);
+        } catch (InterruptedException e) {
+            //
+        }
+
+        client.setEntryData(MY_NEW_DATA_ZNODE, "changed completely");
+
+        try {
+            Thread.sleep(1000);
+        } catch (InterruptedException e) {
+            //
+        }
+
+        client.deleteEntry(MY_NEW_DATA_ZNODE);
+
+        try {
+            Thread.sleep(1000);
+        } catch (InterruptedException e) {
+            //
+        }
+
+        assertFalse(listenerLog.isEmpty());
+    }
+
+}


[38/49] knox git commit: Merge branch 'master' into KNOX-998-Package_Restructuring

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-server/src/main/java/org/apache/knox/gateway/services/topology/impl/DefaultTopologyService.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/services/topology/impl/DefaultTopologyService.java
index 38653f4,0000000..c6e373d
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/services/topology/impl/DefaultTopologyService.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/services/topology/impl/DefaultTopologyService.java
@@@ -1,818 -1,0 +1,895 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +
 +package org.apache.knox.gateway.services.topology.impl;
 +
 +
 +import org.apache.commons.digester3.Digester;
 +import org.apache.commons.digester3.binder.DigesterLoader;
 +import org.apache.commons.io.FileUtils;
 +import org.apache.commons.io.FilenameUtils;
 +import org.apache.commons.io.monitor.FileAlterationListener;
 +import org.apache.commons.io.monitor.FileAlterationListenerAdaptor;
 +import org.apache.commons.io.monitor.FileAlterationMonitor;
 +import org.apache.commons.io.monitor.FileAlterationObserver;
 +import org.apache.knox.gateway.GatewayMessages;
++import org.apache.knox.gateway.GatewayServer;
 +import org.apache.knox.gateway.audit.api.Action;
 +import org.apache.knox.gateway.audit.api.ActionOutcome;
 +import org.apache.knox.gateway.audit.api.AuditServiceFactory;
 +import org.apache.knox.gateway.audit.api.Auditor;
 +import org.apache.knox.gateway.audit.api.ResourceType;
 +import org.apache.knox.gateway.audit.log4j.audit.AuditConstants;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.i18n.messages.MessagesFactory;
 +import org.apache.knox.gateway.service.definition.ServiceDefinition;
++import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
++import org.apache.knox.gateway.services.security.AliasService;
 +import org.apache.knox.gateway.services.topology.TopologyService;
++import org.apache.knox.gateway.topology.ClusterConfigurationMonitorService;
 +import org.apache.knox.gateway.topology.Topology;
 +import org.apache.knox.gateway.topology.TopologyEvent;
 +import org.apache.knox.gateway.topology.TopologyListener;
 +import org.apache.knox.gateway.topology.TopologyMonitor;
 +import org.apache.knox.gateway.topology.TopologyProvider;
 +import org.apache.knox.gateway.topology.builder.TopologyBuilder;
++import org.apache.knox.gateway.topology.discovery.ClusterConfigurationMonitor;
++import org.apache.knox.gateway.topology.monitor.RemoteConfigurationMonitor;
++import org.apache.knox.gateway.topology.monitor.RemoteConfigurationMonitorFactory;
++import org.apache.knox.gateway.topology.simple.SimpleDescriptorHandler;
 +import org.apache.knox.gateway.topology.validation.TopologyValidator;
 +import org.apache.knox.gateway.topology.xml.AmbariFormatXmlTopologyRules;
 +import org.apache.knox.gateway.topology.xml.KnoxFormatXmlTopologyRules;
 +import org.apache.knox.gateway.util.ServiceDefinitionsLoader;
- import org.apache.knox.gateway.services.security.AliasService;
- import org.apache.knox.gateway.topology.simple.SimpleDescriptorHandler;
 +import org.eclipse.persistence.jaxb.JAXBContextProperties;
 +import org.xml.sax.SAXException;
 +
 +import javax.xml.bind.JAXBContext;
 +import javax.xml.bind.JAXBException;
 +import javax.xml.bind.Marshaller;
 +import java.io.File;
 +import java.io.FileFilter;
 +import java.io.IOException;
 +import java.net.URISyntaxException;
 +import java.util.ArrayList;
 +import java.util.Arrays;
 +import java.util.Collection;
 +import java.util.Collections;
 +import java.util.HashMap;
 +import java.util.HashSet;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.Set;
 +
 +import static org.apache.commons.digester3.binder.DigesterLoader.newLoader;
 +
 +
 +public class DefaultTopologyService
 +    extends FileAlterationListenerAdaptor
 +    implements TopologyService, TopologyMonitor, TopologyProvider, FileFilter, FileAlterationListener {
 +
 +  private static Auditor auditor = AuditServiceFactory.getAuditService().getAuditor(
 +    AuditConstants.DEFAULT_AUDITOR_NAME, AuditConstants.KNOX_SERVICE_NAME,
 +    AuditConstants.KNOX_COMPONENT_NAME);
 +
 +  private static final List<String> SUPPORTED_TOPOLOGY_FILE_EXTENSIONS = new ArrayList<String>();
 +  static {
 +    SUPPORTED_TOPOLOGY_FILE_EXTENSIONS.add("xml");
 +    SUPPORTED_TOPOLOGY_FILE_EXTENSIONS.add("conf");
 +  }
 +
 +  private static GatewayMessages log = MessagesFactory.get(GatewayMessages.class);
 +  private static DigesterLoader digesterLoader = newLoader(new KnoxFormatXmlTopologyRules(), new AmbariFormatXmlTopologyRules());
 +  private List<FileAlterationMonitor> monitors = new ArrayList<>();
 +  private File topologiesDirectory;
 +  private File sharedProvidersDirectory;
 +  private File descriptorsDirectory;
 +
 +  private DescriptorsMonitor descriptorsMonitor;
 +
 +  private Set<TopologyListener> listeners;
 +  private volatile Map<File, Topology> topologies;
 +  private AliasService aliasService;
 +
++  private RemoteConfigurationMonitor remoteMonitor = null;
 +
 +  private Topology loadTopology(File file) throws IOException, SAXException, URISyntaxException, InterruptedException {
 +    final long TIMEOUT = 250; //ms
 +    final long DELAY = 50; //ms
 +    log.loadingTopologyFile(file.getAbsolutePath());
 +    Topology topology;
 +    long start = System.currentTimeMillis();
 +    while (true) {
 +      try {
 +        topology = loadTopologyAttempt(file);
 +        break;
 +      } catch (IOException e) {
 +        if (System.currentTimeMillis() - start < TIMEOUT) {
 +          log.failedToLoadTopologyRetrying(file.getAbsolutePath(), Long.toString(DELAY), e);
 +          Thread.sleep(DELAY);
 +        } else {
 +          throw e;
 +        }
 +      } catch (SAXException e) {
 +        if (System.currentTimeMillis() - start < TIMEOUT) {
 +          log.failedToLoadTopologyRetrying(file.getAbsolutePath(), Long.toString(DELAY), e);
 +          Thread.sleep(DELAY);
 +        } else {
 +          throw e;
 +        }
 +      }
 +    }
 +    return topology;
 +  }
 +
 +  private Topology loadTopologyAttempt(File file) throws IOException, SAXException, URISyntaxException {
 +    Topology topology;
 +    Digester digester = digesterLoader.newDigester();
 +    TopologyBuilder topologyBuilder = digester.parse(FileUtils.openInputStream(file));
 +    if (null == topologyBuilder) {
 +      return null;
 +    }
 +    topology = topologyBuilder.build();
 +    topology.setUri(file.toURI());
 +    topology.setName(FilenameUtils.removeExtension(file.getName()));
 +    topology.setTimestamp(file.lastModified());
 +    return topology;
 +  }
 +
 +  private void redeployTopology(Topology topology) {
 +    File topologyFile = new File(topology.getUri());
 +    try {
 +      TopologyValidator tv = new TopologyValidator(topology);
 +
 +      if(tv.validateTopology()) {
 +        throw new SAXException(tv.getErrorString());
 +      }
 +
 +      long start = System.currentTimeMillis();
 +      long limit = 1000L; // One second.
 +      long elapsed = 1;
 +      while (elapsed <= limit) {
 +        try {
 +          long origTimestamp = topologyFile.lastModified();
 +          long setTimestamp = Math.max(System.currentTimeMillis(), topologyFile.lastModified() + elapsed);
 +          if(topologyFile.setLastModified(setTimestamp)) {
 +            long newTimstamp = topologyFile.lastModified();
 +            if(newTimstamp > origTimestamp) {
 +              break;
 +            } else {
 +              Thread.sleep(10);
 +              elapsed = System.currentTimeMillis() - start;
 +              continue;
 +            }
 +          } else {
 +            auditor.audit(Action.REDEPLOY, topology.getName(), ResourceType.TOPOLOGY,
 +                ActionOutcome.FAILURE);
 +            log.failedToRedeployTopology(topology.getName());
 +            break;
 +          }
 +        } catch (InterruptedException e) {
 +          auditor.audit(Action.REDEPLOY, topology.getName(), ResourceType.TOPOLOGY,
 +              ActionOutcome.FAILURE);
 +          log.failedToRedeployTopology(topology.getName(), e);
 +          e.printStackTrace();
 +        }
 +      }
 +    } catch (SAXException e) {
 +      auditor.audit(Action.REDEPLOY, topology.getName(), ResourceType.TOPOLOGY, ActionOutcome.FAILURE);
 +      log.failedToRedeployTopology(topology.getName(), e);
 +    }
 +  }
 +
 +  private List<TopologyEvent> createChangeEvents(
 +      Map<File, Topology> oldTopologies,
 +      Map<File, Topology> newTopologies) {
 +    ArrayList<TopologyEvent> events = new ArrayList<TopologyEvent>();
 +    // Go through the old topologies and find anything that was deleted.
 +    for (File file : oldTopologies.keySet()) {
 +      if (!newTopologies.containsKey(file)) {
 +        events.add(new TopologyEvent(TopologyEvent.Type.DELETED, oldTopologies.get(file)));
 +      }
 +    }
 +    // Go through the new topologies and figure out what was updated vs added.
 +    for (File file : newTopologies.keySet()) {
 +      if (oldTopologies.containsKey(file)) {
 +        Topology oldTopology = oldTopologies.get(file);
 +        Topology newTopology = newTopologies.get(file);
 +        if (newTopology.getTimestamp() > oldTopology.getTimestamp()) {
 +          events.add(new TopologyEvent(TopologyEvent.Type.UPDATED, newTopologies.get(file)));
 +        }
 +      } else {
 +        events.add(new TopologyEvent(TopologyEvent.Type.CREATED, newTopologies.get(file)));
 +      }
 +    }
 +    return events;
 +  }
 +
++  private File calculateAbsoluteProvidersConfigDir(GatewayConfig config) {
++    File pcDir = new File(config.getGatewayProvidersConfigDir());
++    return pcDir.getAbsoluteFile();
++  }
++
++  private File calculateAbsoluteDescriptorsDir(GatewayConfig config) {
++    File descDir = new File(config.getGatewayDescriptorsDir());
++    return descDir.getAbsoluteFile();
++  }
++
 +  private File calculateAbsoluteTopologiesDir(GatewayConfig config) {
 +    File topoDir = new File(config.getGatewayTopologyDir());
 +    topoDir = topoDir.getAbsoluteFile();
 +    return topoDir;
 +  }
 +
 +  private File calculateAbsoluteConfigDir(GatewayConfig config) {
-     File configDir = null;
++    File configDir;
 +
 +    String path = config.getGatewayConfDir();
 +    configDir = (path != null) ? new File(path) : (new File(config.getGatewayTopologyDir())).getParentFile();
 +
 +    return configDir.getAbsoluteFile();
 +  }
 +
 +  private void  initListener(FileAlterationMonitor  monitor,
 +                            File                   directory,
 +                            FileFilter             filter,
 +                            FileAlterationListener listener) {
 +    monitors.add(monitor);
 +    FileAlterationObserver observer = new FileAlterationObserver(directory, filter);
 +    observer.addListener(listener);
 +    monitor.addObserver(observer);
 +  }
 +
 +  private void initListener(File directory, FileFilter filter, FileAlterationListener listener) throws IOException, SAXException {
 +    // Increasing the monitoring interval to 5 seconds as profiling has shown
 +    // this is rather expensive in terms of generated garbage objects.
 +    initListener(new FileAlterationMonitor(5000L), directory, filter, listener);
 +  }
 +
 +  private Map<File, Topology> loadTopologies(File directory) {
 +    Map<File, Topology> map = new HashMap<>();
 +    if (directory.isDirectory() && directory.canRead()) {
 +      File[] existingTopologies = directory.listFiles(this);
 +      if (existingTopologies != null) {
 +        for (File file : existingTopologies) {
 +          try {
 +            Topology loadTopology = loadTopology(file);
 +            if (null != loadTopology) {
 +              map.put(file, loadTopology);
 +            } else {
 +              auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
 +                      ActionOutcome.FAILURE);
 +              log.failedToLoadTopology(file.getAbsolutePath());
 +            }
 +          } catch (IOException e) {
 +            // Maybe it makes sense to throw exception
 +            auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
 +                    ActionOutcome.FAILURE);
 +            log.failedToLoadTopology(file.getAbsolutePath(), e);
 +          } catch (SAXException e) {
 +            // Maybe it makes sense to throw exception
 +            auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
 +                    ActionOutcome.FAILURE);
 +            log.failedToLoadTopology(file.getAbsolutePath(), e);
 +          } catch (Exception e) {
 +            // Maybe it makes sense to throw exception
 +            auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
 +                    ActionOutcome.FAILURE);
 +            log.failedToLoadTopology(file.getAbsolutePath(), e);
 +          }
 +        }
 +      }
 +    }
 +    return map;
 +  }
 +
 +  public void setAliasService(AliasService as) {
 +    this.aliasService = as;
 +  }
 +
 +  public void deployTopology(Topology t){
 +
 +    try {
 +      File temp = new File(topologiesDirectory.getAbsolutePath() + "/" + t.getName() + ".xml.temp");
 +      Package topologyPkg = Topology.class.getPackage();
 +      String pkgName = topologyPkg.getName();
 +      String bindingFile = pkgName.replace(".", "/") + "/topology_binding-xml.xml";
 +
 +      Map<String, Object> properties = new HashMap<>(1);
 +      properties.put(JAXBContextProperties.OXM_METADATA_SOURCE, bindingFile);
 +      JAXBContext jc = JAXBContext.newInstance(pkgName, Topology.class.getClassLoader(), properties);
 +      Marshaller mr = jc.createMarshaller();
 +
 +      mr.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
 +      mr.marshal(t, temp);
 +
 +      File topology = new File(topologiesDirectory.getAbsolutePath() + "/" + t.getName() + ".xml");
 +      if(!temp.renameTo(topology)) {
 +        FileUtils.forceDelete(temp);
 +        throw new IOException("Could not rename temp file");
 +      }
 +
 +      // This code will check if the topology is valid, and retrieve the errors if it is not.
 +      TopologyValidator validator = new TopologyValidator( topology.getAbsolutePath() );
 +      if( !validator.validateTopology() ){
 +        throw new SAXException( validator.getErrorString() );
 +      }
 +
 +
 +    } catch (JAXBException e) {
 +      auditor.audit(Action.DEPLOY, t.getName(), ResourceType.TOPOLOGY, ActionOutcome.FAILURE);
 +      log.failedToDeployTopology(t.getName(), e);
 +    } catch (IOException io) {
 +      auditor.audit(Action.DEPLOY, t.getName(), ResourceType.TOPOLOGY, ActionOutcome.FAILURE);
 +      log.failedToDeployTopology(t.getName(), io);
 +    } catch (SAXException sx){
 +      auditor.audit(Action.DEPLOY, t.getName(), ResourceType.TOPOLOGY, ActionOutcome.FAILURE);
 +      log.failedToDeployTopology(t.getName(), sx);
 +    }
 +    reloadTopologies();
 +  }
 +
 +  public void redeployTopologies(String topologyName) {
 +
 +    for (Topology topology : getTopologies()) {
 +      if (topologyName == null || topologyName.equals(topology.getName())) {
 +        redeployTopology(topology);
 +      }
 +    }
 +
 +  }
 +
 +  public void reloadTopologies() {
 +    try {
 +      synchronized (this) {
 +        Map<File, Topology> oldTopologies = topologies;
 +        Map<File, Topology> newTopologies = loadTopologies(topologiesDirectory);
 +        List<TopologyEvent> events = createChangeEvents(oldTopologies, newTopologies);
 +        topologies = newTopologies;
 +        notifyChangeListeners(events);
 +      }
 +    } catch (Exception e) {
 +      // Maybe it makes sense to throw exception
 +      log.failedToReloadTopologies(e);
 +    }
 +  }
 +
 +  public void deleteTopology(Topology t) {
 +    File topoDir = topologiesDirectory;
 +
 +    if(topoDir.isDirectory() && topoDir.canRead()) {
 +      for (File f : listFiles(topoDir)) {
 +        String fName = FilenameUtils.getBaseName(f.getName());
 +        if(fName.equals(t.getName())) {
 +          f.delete();
 +        }
 +      }
 +    }
 +    reloadTopologies();
 +  }
 +
 +  private void notifyChangeListeners(List<TopologyEvent> events) {
 +    for (TopologyListener listener : listeners) {
 +      try {
 +        listener.handleTopologyEvent(events);
 +      } catch (RuntimeException e) {
 +        auditor.audit(Action.LOAD, "Topology_Event", ResourceType.TOPOLOGY, ActionOutcome.FAILURE);
 +        log.failedToHandleTopologyEvents(e);
 +      }
 +    }
 +  }
 +
 +  public Map<String, List<String>> getServiceTestURLs(Topology t, GatewayConfig config) {
 +    File tFile = null;
 +    Map<String, List<String>> urls = new HashMap<>();
 +    if (topologiesDirectory.isDirectory() && topologiesDirectory.canRead()) {
 +      for (File f : listFiles(topologiesDirectory)) {
 +        if (FilenameUtils.removeExtension(f.getName()).equals(t.getName())) {
 +          tFile = f;
 +        }
 +      }
 +    }
 +    Set<ServiceDefinition> defs;
 +    if(tFile != null) {
 +      defs = ServiceDefinitionsLoader.getServiceDefinitions(new File(config.getGatewayServicesDir()));
 +
 +      for(ServiceDefinition def : defs) {
 +        urls.put(def.getRole(), def.getTestURLs());
 +      }
 +    }
 +    return urls;
 +  }
 +
 +  public Collection<Topology> getTopologies() {
 +    Map<File, Topology> map = topologies;
 +    return Collections.unmodifiableCollection(map.values());
 +  }
 +
 +  @Override
 +  public boolean deployProviderConfiguration(String name, String content) {
 +    return writeConfig(sharedProvidersDirectory, name, content);
 +  }
 +
 +  @Override
 +  public Collection<File> getProviderConfigurations() {
 +    List<File> providerConfigs = new ArrayList<>();
 +    for (File providerConfig : listFiles(sharedProvidersDirectory)) {
 +      if (SharedProviderConfigMonitor.SUPPORTED_EXTENSIONS.contains(FilenameUtils.getExtension(providerConfig.getName()))) {
 +        providerConfigs.add(providerConfig);
 +      }
 +    }
 +    return providerConfigs;
 +  }
 +
 +  @Override
 +  public boolean deleteProviderConfiguration(String name) {
 +    boolean result = false;
 +
 +    File providerConfig = getExistingFile(sharedProvidersDirectory, name);
 +    if (providerConfig != null) {
 +      List<String> references = descriptorsMonitor.getReferencingDescriptors(providerConfig.getAbsolutePath());
 +      if (references.isEmpty()) {
 +        result = providerConfig.delete();
 +      } else {
 +        log.preventedDeletionOfSharedProviderConfiguration(providerConfig.getAbsolutePath());
 +      }
 +    } else {
 +      result = true; // If it already does NOT exist, then the delete effectively succeeded
 +    }
 +
 +    return result;
 +  }
 +
 +  @Override
 +  public boolean deployDescriptor(String name, String content) {
 +    return writeConfig(descriptorsDirectory, name, content);
 +  }
 +
 +  @Override
 +  public Collection<File> getDescriptors() {
 +    List<File> descriptors = new ArrayList<>();
 +    for (File descriptor : listFiles(descriptorsDirectory)) {
 +      if (DescriptorsMonitor.SUPPORTED_EXTENSIONS.contains(FilenameUtils.getExtension(descriptor.getName()))) {
 +        descriptors.add(descriptor);
 +      }
 +    }
 +    return descriptors;
 +  }
 +
 +  @Override
 +  public boolean deleteDescriptor(String name) {
 +    File descriptor = getExistingFile(descriptorsDirectory, name);
 +    return (descriptor == null) || descriptor.delete();
 +  }
 +
 +  @Override
 +  public void addTopologyChangeListener(TopologyListener listener) {
 +    listeners.add(listener);
 +  }
 +
 +  @Override
 +  public void startMonitor() throws Exception {
++    // Start the local configuration monitors
 +    for (FileAlterationMonitor monitor : monitors) {
 +      monitor.start();
 +    }
++
++    // Start the remote configuration monitor, if it has been initialized
++    if (remoteMonitor != null) {
++      try {
++        remoteMonitor.start();
++      } catch (Exception e) {
++        log.remoteConfigurationMonitorStartFailure(remoteMonitor.getClass().getTypeName(), e.getLocalizedMessage(), e);
++      }
++    }
 +  }
 +
 +  @Override
 +  public void stopMonitor() throws Exception {
++    // Stop the local configuration monitors
 +    for (FileAlterationMonitor monitor : monitors) {
 +      monitor.stop();
 +    }
++
++    // Stop the remote configuration monitor, if it has been initialized
++    if (remoteMonitor != null) {
++      remoteMonitor.stop();
++    }
 +  }
 +
 +  @Override
 +  public boolean accept(File file) {
 +    boolean accept = false;
 +    if (!file.isDirectory() && file.canRead()) {
 +      String extension = FilenameUtils.getExtension(file.getName());
 +      if (SUPPORTED_TOPOLOGY_FILE_EXTENSIONS.contains(extension)) {
 +        accept = true;
 +      }
 +    }
 +    return accept;
 +  }
 +
 +  @Override
 +  public void onFileCreate(File file) {
 +    onFileChange(file);
 +  }
 +
 +  @Override
 +  public void onFileDelete(java.io.File file) {
 +    // For full topology descriptors, we need to make sure to delete any corresponding simple descriptors to prevent
 +    // unintended subsequent generation of the topology descriptor
 +    for (String ext : DescriptorsMonitor.SUPPORTED_EXTENSIONS) {
 +      File simpleDesc =
 +              new File(descriptorsDirectory, FilenameUtils.getBaseName(file.getName()) + "." + ext);
 +      if (simpleDesc.exists()) {
 +        log.deletingDescriptorForTopologyDeletion(simpleDesc.getName(), file.getName());
 +        simpleDesc.delete();
 +      }
 +    }
 +
 +    onFileChange(file);
 +  }
 +
 +  @Override
 +  public void onFileChange(File file) {
 +    reloadTopologies();
 +  }
 +
 +  @Override
 +  public void stop() {
 +
 +  }
 +
 +  @Override
 +  public void start() {
- 
++    // Register a cluster configuration monitor listener for change notifications
++    ClusterConfigurationMonitorService ccms =
++                  GatewayServer.getGatewayServices().getService(GatewayServices.CLUSTER_CONFIGURATION_MONITOR_SERVICE);
++    ccms.addListener(new TopologyDiscoveryTrigger(this));
 +  }
 +
 +  @Override
 +  public void init(GatewayConfig config, Map<String, String> options) throws ServiceLifecycleException {
 +
 +    try {
-       listeners = new HashSet<>();
++      listeners  = new HashSet<>();
 +      topologies = new HashMap<>();
 +
 +      topologiesDirectory = calculateAbsoluteTopologiesDir(config);
 +
 +      File configDirectory = calculateAbsoluteConfigDir(config);
 +      descriptorsDirectory = new File(configDirectory, "descriptors");
 +      sharedProvidersDirectory = new File(configDirectory, "shared-providers");
 +
 +      // Add support for conf/topologies
 +      initListener(topologiesDirectory, this, this);
 +
 +      // Add support for conf/descriptors
 +      descriptorsMonitor = new DescriptorsMonitor(topologiesDirectory, aliasService);
 +      initListener(descriptorsDirectory,
 +                   descriptorsMonitor,
 +                   descriptorsMonitor);
 +      log.monitoringDescriptorChangesInDirectory(descriptorsDirectory.getAbsolutePath());
 +
 +      // Add support for conf/shared-providers
 +      SharedProviderConfigMonitor spm = new SharedProviderConfigMonitor(descriptorsMonitor, descriptorsDirectory);
 +      initListener(sharedProvidersDirectory, spm, spm);
 +      log.monitoringProviderConfigChangesInDirectory(sharedProvidersDirectory.getAbsolutePath());
 +
 +      // For all the descriptors currently in the descriptors dir at start-up time, trigger topology generation.
 +      // This happens prior to the start-up loading of the topologies.
 +      String[] descriptorFilenames =  descriptorsDirectory.list();
 +      if (descriptorFilenames != null) {
-           for (String descriptorFilename : descriptorFilenames) {
-               if (DescriptorsMonitor.isDescriptorFile(descriptorFilename)) {
-                   descriptorsMonitor.onFileChange(new File(descriptorsDirectory, descriptorFilename));
-               }
++        for (String descriptorFilename : descriptorFilenames) {
++          if (DescriptorsMonitor.isDescriptorFile(descriptorFilename)) {
++            // If there isn't a corresponding topology file, or if the descriptor has been modified since the
++            // corresponding topology file was generated, then trigger generation of one
++            File matchingTopologyFile = getExistingFile(topologiesDirectory, FilenameUtils.getBaseName(descriptorFilename));
++            if (matchingTopologyFile == null ||
++                    matchingTopologyFile.lastModified() < (new File(descriptorsDirectory, descriptorFilename)).lastModified()) {
++              descriptorsMonitor.onFileChange(new File(descriptorsDirectory, descriptorFilename));
++            }
 +          }
++        }
 +      }
 +
++      // Initialize the remote configuration monitor, if it has been configured
++      remoteMonitor = RemoteConfigurationMonitorFactory.get(config);
++
 +    } catch (IOException | SAXException io) {
 +      throw new ServiceLifecycleException(io.getMessage());
 +    }
 +  }
 +
- 
 +  /**
 +   * Utility method for listing the files in the specified directory.
 +   * This method is "nicer" than the File#listFiles() because it will not return null.
 +   *
 +   * @param directory The directory whose files should be returned.
 +   *
 +   * @return A List of the Files on the directory.
 +   */
 +  private static List<File> listFiles(File directory) {
-     List<File> result = null;
++    List<File> result;
 +    File[] files = directory.listFiles();
 +    if (files != null) {
 +      result = Arrays.asList(files);
 +    } else {
 +      result = Collections.emptyList();
 +    }
 +    return result;
 +  }
 +
 +  /**
 +   * Search for a file in the specified directory whose base name (filename without extension) matches the
 +   * specified basename.
 +   *
 +   * @param directory The directory in which to search.
 +   * @param basename  The basename of interest.
 +   *
 +   * @return The matching File
 +   */
 +  private static File getExistingFile(File directory, String basename) {
 +    File match = null;
 +    for (File file : listFiles(directory)) {
 +      if (FilenameUtils.getBaseName(file.getName()).equals(basename)) {
 +        match = file;
 +        break;
 +      }
 +    }
 +    return match;
 +  }
 +
 +  /**
 +   * Write the specified content to a file.
 +   *
 +   * @param dest    The destination directory.
 +   * @param name    The name of the file.
 +   * @param content The contents of the file.
 +   *
 +   * @return true, if the write succeeds; otherwise, false.
 +   */
 +  private static boolean writeConfig(File dest, String name, String content) {
 +    boolean result = false;
 +
 +    File destFile = new File(dest, name);
 +    try {
 +      FileUtils.writeStringToFile(destFile, content);
 +      log.wroteConfigurationFile(destFile.getAbsolutePath());
 +      result = true;
 +    } catch (IOException e) {
 +      log.failedToWriteConfigurationFile(destFile.getAbsolutePath(), e);
 +    }
 +
 +    return result;
 +  }
 +
 +
 +  /**
 +   * Change handler for simple descriptors
 +   */
 +  public static class DescriptorsMonitor extends FileAlterationListenerAdaptor
 +                                          implements FileFilter {
 +
 +    static final List<String> SUPPORTED_EXTENSIONS = new ArrayList<String>();
 +    static {
 +      SUPPORTED_EXTENSIONS.add("json");
 +      SUPPORTED_EXTENSIONS.add("yml");
 +      SUPPORTED_EXTENSIONS.add("yaml");
 +    }
 +
 +    private File topologiesDir;
 +
 +    private AliasService aliasService;
 +
 +    private Map<String, List<String>> providerConfigReferences = new HashMap<>();
 +
 +
 +    static boolean isDescriptorFile(String filename) {
 +      return SUPPORTED_EXTENSIONS.contains(FilenameUtils.getExtension(filename));
 +    }
 +
 +    public DescriptorsMonitor(File topologiesDir, AliasService aliasService) {
 +      this.topologiesDir  = topologiesDir;
 +      this.aliasService   = aliasService;
 +    }
 +
 +    List<String> getReferencingDescriptors(String providerConfigPath) {
 +      List<String> result = providerConfigReferences.get(FilenameUtils.normalize(providerConfigPath));
 +      if (result == null) {
 +        result = Collections.emptyList();
 +      }
 +      return result;
 +    }
 +
 +    @Override
 +    public void onFileCreate(File file) {
 +      onFileChange(file);
 +    }
 +
 +    @Override
 +    public void onFileDelete(File file) {
 +      // For simple descriptors, we need to make sure to delete any corresponding full topology descriptors to trigger undeployment
 +      for (String ext : DefaultTopologyService.SUPPORTED_TOPOLOGY_FILE_EXTENSIONS) {
 +        File topologyFile =
 +                new File(topologiesDir, FilenameUtils.getBaseName(file.getName()) + "." + ext);
 +        if (topologyFile.exists()) {
 +          log.deletingTopologyForDescriptorDeletion(topologyFile.getName(), file.getName());
 +          topologyFile.delete();
 +        }
 +      }
 +
 +      String normalizedFilePath = FilenameUtils.normalize(file.getAbsolutePath());
 +      String reference = null;
 +      for (Map.Entry<String, List<String>> entry : providerConfigReferences.entrySet()) {
 +        if (entry.getValue().contains(normalizedFilePath)) {
 +          reference = entry.getKey();
 +          break;
 +        }
 +      }
 +
 +      if (reference != null) {
 +        providerConfigReferences.get(reference).remove(normalizedFilePath);
 +        log.removedProviderConfigurationReference(normalizedFilePath, reference);
 +      }
 +    }
 +
 +    @Override
 +    public void onFileChange(File file) {
 +      try {
 +        // When a simple descriptor has been created or modified, generate the new topology descriptor
 +        Map<String, File> result = SimpleDescriptorHandler.handle(file, topologiesDir, aliasService);
 +        log.generatedTopologyForDescriptorChange(result.get("topology").getName(), file.getName());
 +
 +        // Add the provider config reference relationship for handling updates to the provider config
 +        String providerConfig = FilenameUtils.normalize(result.get("reference").getAbsolutePath());
 +        if (!providerConfigReferences.containsKey(providerConfig)) {
 +          providerConfigReferences.put(providerConfig, new ArrayList<String>());
 +        }
 +        List<String> refs = providerConfigReferences.get(providerConfig);
 +        String descriptorName = FilenameUtils.normalize(file.getAbsolutePath());
 +        if (!refs.contains(descriptorName)) {
 +          // Need to check if descriptor had previously referenced another provider config, so it can be removed
 +          for (List<String> descs : providerConfigReferences.values()) {
 +            if (descs.contains(descriptorName)) {
 +              descs.remove(descriptorName);
 +            }
 +          }
 +
 +          // Add the current reference relationship
 +          refs.add(descriptorName);
 +          log.addedProviderConfigurationReference(descriptorName, providerConfig);
 +        }
 +      } catch (Exception e) {
 +        log.simpleDescriptorHandlingError(file.getName(), e);
 +      }
 +    }
 +
 +    @Override
 +    public boolean accept(File file) {
 +      boolean accept = false;
 +      if (!file.isDirectory() && file.canRead()) {
 +        String extension = FilenameUtils.getExtension(file.getName());
 +        if (SUPPORTED_EXTENSIONS.contains(extension)) {
 +          accept = true;
 +        }
 +      }
 +      return accept;
 +    }
 +  }
 +
 +  /**
 +   * Change handler for shared provider configurations
 +   */
 +  public static class SharedProviderConfigMonitor extends FileAlterationListenerAdaptor
 +          implements FileFilter {
 +
 +    static final List<String> SUPPORTED_EXTENSIONS = new ArrayList<>();
 +    static {
 +      SUPPORTED_EXTENSIONS.add("xml");
 +    }
 +
 +    private DescriptorsMonitor descriptorsMonitor;
 +    private File descriptorsDir;
 +
 +
 +    SharedProviderConfigMonitor(DescriptorsMonitor descMonitor, File descriptorsDir) {
 +      this.descriptorsMonitor = descMonitor;
 +      this.descriptorsDir     = descriptorsDir;
 +    }
 +
 +    @Override
 +    public void onFileCreate(File file) {
 +      onFileChange(file);
 +    }
 +
 +    @Override
 +    public void onFileDelete(File file) {
 +      onFileChange(file);
 +    }
 +
 +    @Override
 +    public void onFileChange(File file) {
 +      // For shared provider configuration, we need to update any simple descriptors that reference it
 +      for (File descriptor : getReferencingDescriptors(file)) {
 +        descriptor.setLastModified(System.currentTimeMillis());
 +      }
 +    }
 +
 +    private List<File> getReferencingDescriptors(File sharedProviderConfig) {
 +      List<File> references = new ArrayList<>();
 +
 +      for (File descriptor : listFiles(descriptorsDir)) {
 +        if (DescriptorsMonitor.SUPPORTED_EXTENSIONS.contains(FilenameUtils.getExtension(descriptor.getName()))) {
 +          for (String reference : descriptorsMonitor.getReferencingDescriptors(FilenameUtils.normalize(sharedProviderConfig.getAbsolutePath()))) {
 +            references.add(new File(reference));
 +          }
 +        }
 +      }
 +
 +      return references;
 +    }
 +
 +    @Override
 +    public boolean accept(File file) {
 +      boolean accept = false;
 +      if (!file.isDirectory() && file.canRead()) {
 +        String extension = FilenameUtils.getExtension(file.getName());
 +        if (SUPPORTED_EXTENSIONS.contains(extension)) {
 +          accept = true;
 +        }
 +      }
 +      return accept;
 +    }
 +  }
 +
++  /**
++   * Listener for Ambari config change events, which will trigger re-generation (including re-discovery) of the
++   * affected topologies.
++   */
++  private static class TopologyDiscoveryTrigger implements ClusterConfigurationMonitor.ConfigurationChangeListener {
++
++    private TopologyService topologyService = null;
++
++    TopologyDiscoveryTrigger(TopologyService topologyService) {
++      this.topologyService = topologyService;
++    }
++
++    @Override
++    public void onConfigurationChange(String source, String clusterName) {
++      log.noticedClusterConfigurationChange(source, clusterName);
++      try {
++        // Identify any descriptors associated with the cluster configuration change
++        for (File descriptor : topologyService.getDescriptors()) {
++          String descriptorContent = FileUtils.readFileToString(descriptor);
++          if (descriptorContent.contains(source)) {
++            if (descriptorContent.contains(clusterName)) {
++              log.triggeringTopologyRegeneration(source, clusterName, descriptor.getAbsolutePath());
++              // 'Touch' the descriptor to trigger re-generation of the associated topology
++              descriptor.setLastModified(System.currentTimeMillis());
++            }
++          }
++        }
++      } catch (Exception e) {
++        log.errorRespondingToConfigChange(source, clusterName, e);
++      }
++    }
++  }
++
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorFactory.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorFactory.java
index 254dca1,0000000..4def2b7
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorFactory.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorFactory.java
@@@ -1,71 -1,0 +1,71 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one or more
 + * contributor license agreements. See the NOTICE file distributed with this
 + * work for additional information regarding copyright ownership. The ASF
 + * licenses this file to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance with the License.
 + * You may obtain a copy of the License at
 + *
 + * http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 + * License for the specific language governing permissions and limitations under
 + * the License.
 + */
 +package org.apache.knox.gateway.topology.simple;
 +
 +import com.fasterxml.jackson.databind.ObjectMapper;
 +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
 +import org.apache.commons.io.FilenameUtils;
 +
 +import java.io.File;
 +import java.io.IOException;
 +
 +
 +public class SimpleDescriptorFactory {
 +
 +    /**
 +     * Create a SimpleDescriptor from the specified file.
 +     *
 +     * @param path The path to the file.
 +     * @return A SimpleDescriptor based on the contents of the file.
 +     *
 +     * @throws IOException
 +     */
 +    public static SimpleDescriptor parse(String path) throws IOException {
 +        SimpleDescriptor sd;
 +
 +        if (path.endsWith(".json")) {
 +            sd = parseJSON(path);
-         } else if (path.endsWith(".yml")) {
++        } else if (path.endsWith(".yml") || path.endsWith(".yaml")) {
 +            sd = parseYAML(path);
 +        } else {
 +           throw new IllegalArgumentException("Unsupported simple descriptor format: " + path.substring(path.lastIndexOf('.')));
 +        }
 +
 +        return sd;
 +    }
 +
 +
 +    static SimpleDescriptor parseJSON(String path) throws IOException {
 +        final ObjectMapper mapper = new ObjectMapper();
 +        SimpleDescriptorImpl sd = mapper.readValue(new File(path), SimpleDescriptorImpl.class);
 +        if (sd != null) {
 +            sd.setName(FilenameUtils.getBaseName(path));
 +        }
 +        return sd;
 +    }
 +
 +
 +    static SimpleDescriptor parseYAML(String path) throws IOException {
 +        final ObjectMapper mapper = new ObjectMapper(new YAMLFactory());
 +        SimpleDescriptorImpl sd = mapper.readValue(new File(path), SimpleDescriptorImpl.class);
 +        if (sd != null) {
 +            sd.setName(FilenameUtils.getBaseName(path));
 +        }
 +        return sd;
 +    }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java
index 2e3214d,0000000..30786dc
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java
@@@ -1,316 -1,0 +1,382 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one or more
 + * contributor license agreements. See the NOTICE file distributed with this
 + * work for additional information regarding copyright ownership. The ASF
 + * licenses this file to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance with the License.
 + * You may obtain a copy of the License at
 + *
 + * http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 + * License for the specific language governing permissions and limitations under
 + * the License.
 + */
 +package org.apache.knox.gateway.topology.simple;
 +
- import org.apache.knox.gateway.i18n.messages.MessagesFactory;
- import org.apache.knox.gateway.services.Service;
- import org.apache.knox.gateway.topology.discovery.DefaultServiceDiscoveryConfig;
- import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
- import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryFactory;
 +import java.io.BufferedWriter;
 +import java.io.File;
 +import java.io.FileInputStream;
 +import java.io.FileWriter;
 +import java.io.InputStreamReader;
 +import java.io.IOException;
 +
 +import java.net.URI;
 +import java.net.URISyntaxException;
 +
 +import java.util.ArrayList;
 +import java.util.Collections;
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
 +
++import org.apache.knox.gateway.GatewayServer;
++import org.apache.knox.gateway.i18n.messages.MessagesFactory;
++import org.apache.knox.gateway.services.GatewayServices;
++import org.apache.knox.gateway.services.Service;
++import org.apache.knox.gateway.services.security.AliasService;
++import org.apache.knox.gateway.services.security.KeystoreService;
++import org.apache.knox.gateway.services.security.MasterService;
++import org.apache.knox.gateway.topology.discovery.DefaultServiceDiscoveryConfig;
++import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
++import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryFactory;
 +
 +
 +/**
 + * Processes simple topology descriptors, producing full topology files, which can subsequently be deployed to the
 + * gateway.
 + */
 +public class SimpleDescriptorHandler {
 +
 +    private static final Service[] NO_GATEWAY_SERVICES = new Service[]{};
 +
 +    private static final SimpleDescriptorMessages log = MessagesFactory.get(SimpleDescriptorMessages.class);
 +
++    private static Map<String, ServiceDiscovery> discoveryInstances = new HashMap<>();
++
 +    public static Map<String, File> handle(File desc) throws IOException {
 +        return handle(desc, NO_GATEWAY_SERVICES);
 +    }
 +
 +    public static Map<String, File> handle(File desc, Service...gatewayServices) throws IOException {
 +        return handle(desc, desc.getParentFile(), gatewayServices);
 +    }
 +
 +    public static Map<String, File> handle(File desc, File destDirectory) throws IOException {
 +        return handle(desc, destDirectory, NO_GATEWAY_SERVICES);
 +    }
 +
 +    public static Map<String, File> handle(File desc, File destDirectory, Service...gatewayServices) throws IOException {
 +        return handle(SimpleDescriptorFactory.parse(desc.getAbsolutePath()), desc.getParentFile(), destDirectory, gatewayServices);
 +    }
 +
 +    public static Map<String, File> handle(SimpleDescriptor desc, File srcDirectory, File destDirectory) {
 +        return handle(desc, srcDirectory, destDirectory, NO_GATEWAY_SERVICES);
 +    }
 +
 +    public static Map<String, File> handle(SimpleDescriptor desc, File srcDirectory, File destDirectory, Service...gatewayServices) {
 +        Map<String, File> result = new HashMap<>();
 +
 +        File topologyDescriptor;
 +
 +        DefaultServiceDiscoveryConfig sdc = new DefaultServiceDiscoveryConfig(desc.getDiscoveryAddress());
 +        sdc.setUser(desc.getDiscoveryUser());
 +        sdc.setPasswordAlias(desc.getDiscoveryPasswordAlias());
 +
 +        // Use the discovery type from the descriptor. If it's unspecified, employ the default type.
 +        String discoveryType = desc.getDiscoveryType();
 +        if (discoveryType == null) {
 +            discoveryType = "AMBARI";
 +        }
 +
-         ServiceDiscovery sd = ServiceDiscoveryFactory.get(discoveryType, gatewayServices);
++        // Use the cached discovery object for the required type, if it has already been loaded
++        ServiceDiscovery sd = discoveryInstances.get(discoveryType);
++        if (sd == null) {
++            sd = ServiceDiscoveryFactory.get(discoveryType, gatewayServices);
++            discoveryInstances.put(discoveryType, sd);
++        }
 +        ServiceDiscovery.Cluster cluster = sd.discover(sdc, desc.getClusterName());
 +
 +        List<String> validServiceNames = new ArrayList<>();
 +
 +        Map<String, Map<String, String>> serviceParams = new HashMap<>();
 +        Map<String, List<String>>        serviceURLs   = new HashMap<>();
 +
 +        if (cluster != null) {
 +            for (SimpleDescriptor.Service descService : desc.getServices()) {
 +                String serviceName = descService.getName();
 +
 +                List<String> descServiceURLs = descService.getURLs();
 +                if (descServiceURLs == null || descServiceURLs.isEmpty()) {
 +                    descServiceURLs = cluster.getServiceURLs(serviceName);
 +                }
 +
 +                // Validate the discovered service URLs
 +                List<String> validURLs = new ArrayList<>();
 +                if (descServiceURLs != null && !descServiceURLs.isEmpty()) {
 +                    // Validate the URL(s)
 +                    for (String descServiceURL : descServiceURLs) {
 +                        if (validateURL(serviceName, descServiceURL)) {
 +                            validURLs.add(descServiceURL);
 +                        }
 +                    }
 +
 +                    if (!validURLs.isEmpty()) {
 +                        validServiceNames.add(serviceName);
 +                    }
 +                }
 +
 +                // If there is at least one valid URL associated with the service, then add it to the map
 +                if (!validURLs.isEmpty()) {
 +                    serviceURLs.put(serviceName, validURLs);
 +                } else {
 +                    log.failedToDiscoverClusterServiceURLs(serviceName, cluster.getName());
 +                }
 +
 +                // Service params
 +                if (descService.getParams() != null) {
 +                    serviceParams.put(serviceName, descService.getParams());
 +                    if (!validServiceNames.contains(serviceName)) {
 +                        validServiceNames.add(serviceName);
 +                    }
 +                }
 +            }
 +        } else {
 +            log.failedToDiscoverClusterServices(desc.getClusterName());
 +        }
 +
++        // Provision the query param encryption password here, rather than relying on the random password generated
++        // when the topology is deployed. This is to support Knox HA deployments, where multiple Knox instances are
++        // generating topologies based on a shared remote descriptor, and they must all be able to encrypt/decrypt
++        // query params with the same credentials. (KNOX-1136)
++        if (!provisionQueryParamEncryptionCredential(desc.getName())) {
++            log.unableCreatePasswordForEncryption(desc.getName());
++        }
++
 +        BufferedWriter fw = null;
 +        topologyDescriptor = null;
 +        File providerConfig;
 +        try {
 +            // Verify that the referenced provider configuration exists before attempting to reading it
 +            providerConfig = resolveProviderConfigurationReference(desc.getProviderConfig(), srcDirectory);
 +            if (providerConfig == null) {
 +                log.failedToResolveProviderConfigRef(desc.getProviderConfig());
 +                throw new IllegalArgumentException("Unresolved provider configuration reference: " +
 +                                                   desc.getProviderConfig() + " ; Topology update aborted!");
 +            }
 +            result.put("reference", providerConfig);
 +
 +            // TODO: Should the contents of the provider config be validated before incorporating it into the topology?
 +
 +            String topologyFilename = desc.getName();
 +            if (topologyFilename == null) {
 +                topologyFilename = desc.getClusterName();
 +            }
 +            topologyDescriptor = new File(destDirectory, topologyFilename + ".xml");
 +
 +            fw = new BufferedWriter(new FileWriter(topologyDescriptor));
 +
 +            fw.write("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n");
 +
 +            fw.write("<!--==============================================-->\n");
 +            fw.write("<!-- DO NOT EDIT. This is an auto-generated file. -->\n");
 +            fw.write("<!--==============================================-->\n");
 +
 +            fw.write("<topology>\n");
 +
 +            // KNOX-1105 Indicate that this topology was auto-generated
 +            fw.write("    <generated>true</generated>\n");
 +
 +            // Copy the externalized provider configuration content into the topology descriptor in-line
 +            InputStreamReader policyReader = new InputStreamReader(new FileInputStream(providerConfig));
 +            char[] buffer = new char[1024];
 +            int count;
 +            while ((count = policyReader.read(buffer)) > 0) {
 +                fw.write(buffer, 0, count);
 +            }
 +            policyReader.close();
 +
 +            // Services
 +            // Sort the service names to write the services alphabetically
 +            List<String> serviceNames = new ArrayList<>(validServiceNames);
 +            Collections.sort(serviceNames);
 +
 +            // Write the service declarations
 +            for (String serviceName : serviceNames) {
++                fw.write("\n");
 +                fw.write("    <service>\n");
 +                fw.write("        <role>" + serviceName + "</role>\n");
 +
 +                // URLs
 +                List<String> urls = serviceURLs.get(serviceName);
 +                if (urls != null) {
 +                    for (String url : urls) {
 +                        fw.write("        <url>" + url + "</url>\n");
 +                    }
 +                }
 +
 +                // Params
 +                Map<String, String> svcParams = serviceParams.get(serviceName);
 +                if (svcParams != null) {
 +                    for (String paramName : svcParams.keySet()) {
 +                        fw.write("        <param>\n");
 +                        fw.write("            <name>" + paramName + "</name>\n");
 +                        fw.write("            <value>" + svcParams.get(paramName) + "</value>\n");
 +                        fw.write("        </param>\n");
 +                    }
 +                }
 +
 +                fw.write("    </service>\n");
 +            }
 +
 +            // Applications
 +            List<SimpleDescriptor.Application> apps = desc.getApplications();
 +            if (apps != null) {
 +                for (SimpleDescriptor.Application app : apps) {
 +                    fw.write("    <application>\n");
 +                    fw.write("        <name>" + app.getName() + "</name>\n");
 +
 +                    // URLs
 +                    List<String> urls = app.getURLs();
 +                    if (urls != null) {
 +                        for (String url : urls) {
 +                            fw.write("        <url>" + url + "</url>\n");
 +                        }
 +                    }
 +
 +                    // Params
 +                    Map<String, String> appParams = app.getParams();
 +                    if (appParams != null) {
 +                        for (String paramName : appParams.keySet()) {
 +                            fw.write("        <param>\n");
 +                            fw.write("            <name>" + paramName + "</name>\n");
 +                            fw.write("            <value>" + appParams.get(paramName) + "</value>\n");
 +                            fw.write("        </param>\n");
 +                        }
 +                    }
 +
 +                    fw.write("    </application>\n");
 +                }
 +            }
 +
 +            fw.write("</topology>\n");
 +
 +            fw.flush();
 +        } catch (IOException e) {
 +            log.failedToGenerateTopologyFromSimpleDescriptor(topologyDescriptor.getName(), e);
 +            topologyDescriptor.delete();
 +        } finally {
 +            if (fw != null) {
 +                try {
 +                    fw.close();
 +                } catch (IOException e) {
 +                    // ignore
 +                }
 +            }
 +        }
 +
 +        result.put("topology", topologyDescriptor);
 +        return result;
 +    }
 +
 +
++    /**
++     * KNOX-1136
++     *
++     * Provision the query string encryption password prior to it being randomly generated during the topology
++     * deployment.
++     *
++     * @param topologyName The name of the topology for which the credential will be provisioned.
++     *
++     * @return true if the credential was successfully provisioned; otherwise, false.
++     */
++    private static boolean provisionQueryParamEncryptionCredential(String topologyName) {
++        boolean result = false;
++
++        try {
++            GatewayServices services = GatewayServer.getGatewayServices();
++            if (services != null) {
++                MasterService ms = services.getService("MasterService");
++                if (ms != null) {
++                    KeystoreService ks = services.getService(GatewayServices.KEYSTORE_SERVICE);
++                    if (ks != null) {
++                        if (!ks.isCredentialStoreForClusterAvailable(topologyName)) {
++                            ks.createCredentialStoreForCluster(topologyName);
++                        }
++
++                        // If the credential store existed, or it was just successfully created
++                        if (ks.getCredentialStoreForCluster(topologyName) != null) {
++                            AliasService aliasService = services.getService(GatewayServices.ALIAS_SERVICE);
++                            if (aliasService != null) {
++                                // Derive and set the query param encryption password
++                                String queryEncryptionPass = new String(ms.getMasterSecret()) + topologyName;
++                                aliasService.addAliasForCluster(topologyName, "encryptQueryString", queryEncryptionPass);
++                                result = true;
++                            }
++                        }
++                    }
++                }
++            }
++        } catch (Exception e) {
++            log.exceptionCreatingPasswordForEncryption(topologyName, e);
++        }
++
++        return result;
++    }
++
++
 +    private static boolean validateURL(String serviceName, String url) {
 +        boolean result = false;
 +
 +        if (url != null && !url.isEmpty()) {
 +            try {
 +                new URI(url);
 +                result = true;
 +            } catch (URISyntaxException e) {
 +                log.serviceURLValidationFailed(serviceName, url, e);
 +            }
 +        }
 +
 +        return result;
 +    }
 +
 +
 +    private static File resolveProviderConfigurationReference(String reference, File srcDirectory) {
 +        File providerConfig;
 +
 +        // If the reference includes a path
 +        if (reference.contains(File.separator)) {
 +            // Check if it's an absolute path
 +            providerConfig = new File(reference);
 +            if (!providerConfig.exists()) {
 +                // If it's not an absolute path, try treating it as a relative path
 +                providerConfig = new File(srcDirectory, reference);
 +                if (!providerConfig.exists()) {
 +                    providerConfig = null;
 +                }
 +            }
 +        } else { // No file path, just a name
 +            // Check if it's co-located with the referencing descriptor
 +            providerConfig = new File(srcDirectory, reference);
 +            if (!providerConfig.exists()) {
 +                // Check the shared-providers config location
 +                File sharedProvidersDir = new File(srcDirectory, "../shared-providers");
 +                if (sharedProvidersDir.exists()) {
 +                    providerConfig = new File(sharedProvidersDir, reference);
 +                    if (!providerConfig.exists()) {
 +                        // Check if it's a valid name without the extension
 +                        providerConfig = new File(sharedProvidersDir, reference + ".xml");
 +                        if (!providerConfig.exists()) {
 +                            providerConfig = null;
 +                        }
 +                    }
 +                }
 +            }
 +        }
 +
 +        return providerConfig;
 +    }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorMessages.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorMessages.java
index 07c4350,0000000..28962f9
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorMessages.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorMessages.java
@@@ -1,50 -1,0 +1,59 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one or more
 + * contributor license agreements. See the NOTICE file distributed with this
 + * work for additional information regarding copyright ownership. The ASF
 + * licenses this file to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance with the License.
 + * You may obtain a copy of the License at
 + * <p>
 + * http://www.apache.org/licenses/LICENSE-2.0
 + * <p>
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 + * License for the specific language governing permissions and limitations under
 + * the License.
 + */
 +package org.apache.knox.gateway.topology.simple;
 +
 +import org.apache.knox.gateway.i18n.messages.Message;
 +import org.apache.knox.gateway.i18n.messages.MessageLevel;
 +import org.apache.knox.gateway.i18n.messages.Messages;
 +import org.apache.knox.gateway.i18n.messages.StackTrace;
 +
 +@Messages(logger="org.apache.gateway.topology.simple")
 +public interface SimpleDescriptorMessages {
 +
 +    @Message(level = MessageLevel.ERROR,
 +            text = "Service discovery for cluster {0} failed.")
 +    void failedToDiscoverClusterServices(final String cluster);
 +
 +    @Message(level = MessageLevel.ERROR,
 +            text = "No valid URLs were discovered for {0} in the {1} cluster.")
 +    void failedToDiscoverClusterServiceURLs(final String serviceName, final String clusterName);
 +
 +    @Message(level = MessageLevel.ERROR,
 +            text = "Failed to resolve the referenced provider configuration {0}.")
 +    void failedToResolveProviderConfigRef(final String providerConfigRef);
 +
 +    @Message(level = MessageLevel.ERROR,
 +            text = "URL validation failed for {0} URL {1} : {2}")
 +    void serviceURLValidationFailed(final String serviceName,
 +                                    final String url,
 +                                    @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +    @Message(level = MessageLevel.ERROR,
 +            text = "Error generating topology {0} from simple descriptor: {1}")
 +    void failedToGenerateTopologyFromSimpleDescriptor(final String topologyFile,
 +                                                      @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
++    @Message( level = MessageLevel.ERROR,
++              text = "Error creating a password for query string encryption for {0}: {1}" )
++    void exceptionCreatingPasswordForEncryption(String topologyName,
++                                                @StackTrace( level = MessageLevel.DEBUG) Exception e);
++
++    @Message( level = MessageLevel.ERROR,
++            text = "Failed to create a password for query string encryption for {0}." )
++    void unableCreatePasswordForEncryption(String topologyName);
++
 +}


[33/49] knox git commit: Merge branch 'master' into KNOX-998-Package_Restructuring

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayBasicFuncTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewayBasicFuncTest.java
index 02be270,0000000..f6536d9
mode 100644,000000..100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayBasicFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayBasicFuncTest.java
@@@ -1,4508 -1,0 +1,4508 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
 +import java.io.ByteArrayOutputStream;
 +import java.io.File;
 +import java.io.FileFilter;
 +import java.io.FileNotFoundException;
 +import java.io.IOException;
 +import java.io.PrintStream;
 +import java.io.StringWriter;
 +import java.net.InetAddress;
 +import java.net.InetSocketAddress;
 +import java.net.URI;
 +import java.net.URISyntaxException;
 +import java.net.URL;
 +import java.nio.charset.Charset;
 +import java.util.HashMap;
 +import java.util.Map;
 +import java.util.Map.Entry;
 +import javax.ws.rs.core.MediaType;
 +
 +import io.restassured.RestAssured;
 +import io.restassured.http.ContentType;
 +import io.restassured.http.Cookie;
 +import io.restassured.http.Header;
 +import io.restassured.path.json.JsonPath;
 +import io.restassured.response.Response;
 +import io.restassured.specification.ResponseSpecification;
 +import com.mycila.xmltool.XMLDoc;
 +import com.mycila.xmltool.XMLTag;
 +import org.apache.commons.io.filefilter.WildcardFileFilter;
 +import org.apache.commons.lang3.ArrayUtils;
 +import org.apache.knox.gateway.util.KnoxCLI;
 +import org.apache.knox.test.TestUtils;
 +import org.apache.knox.test.category.MediumTests;
 +import org.apache.knox.test.category.VerifyTest;
 +import org.apache.knox.test.mock.MockRequestMatcher;
 +import org.apache.http.HttpHost;
 +import org.apache.http.HttpResponse;
 +import org.apache.http.HttpStatus;
 +import org.apache.http.auth.AuthScope;
 +import org.apache.http.auth.UsernamePasswordCredentials;
 +import org.apache.http.client.AuthCache;
 +import org.apache.http.client.CredentialsProvider;
 +import org.apache.http.client.methods.HttpGet;
 +import org.apache.http.client.methods.HttpPost;
 +import org.apache.http.client.protocol.HttpClientContext;
 +import org.apache.http.entity.StringEntity;
 +import org.apache.http.impl.auth.BasicScheme;
 +import org.apache.http.impl.client.BasicAuthCache;
 +import org.apache.http.impl.client.BasicCredentialsProvider;
 +import org.apache.http.impl.client.CloseableHttpClient;
 +import org.apache.http.impl.client.HttpClientBuilder;
 +import org.apache.http.util.EntityUtils;
 +import org.apache.velocity.Template;
 +import org.apache.velocity.VelocityContext;
 +import org.apache.velocity.app.VelocityEngine;
 +import org.apache.velocity.runtime.RuntimeConstants;
 +import org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader;
 +import org.hamcrest.CoreMatchers;
 +import org.hamcrest.Matcher;
 +import org.hamcrest.MatcherAssert;
 +import org.hamcrest.Matchers;
 +import org.junit.After;
 +import org.junit.AfterClass;
 +import org.junit.Assert;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +import org.junit.experimental.categories.Category;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +import static io.restassured.RestAssured.given;
 +import static org.apache.knox.test.TestUtils.LOG_ENTER;
 +import static org.apache.knox.test.TestUtils.LOG_EXIT;
 +import static org.hamcrest.CoreMatchers.*;
 +import static org.hamcrest.Matchers.containsString;
 +import static org.hamcrest.Matchers.greaterThan;
 +import static org.hamcrest.text.IsEmptyString.isEmptyString;
 +import static org.junit.Assert.assertThat;
 +import static org.junit.Assert.assertTrue;
 +import static org.xmlmatchers.XmlMatchers.isEquivalentTo;
 +import static org.xmlmatchers.transform.XmlConverters.the;
 +import static uk.co.datumedge.hamcrest.json.SameJSONAs.sameJSONAs;
 +
 +@Category( { VerifyTest.class, MediumTests.class } )
 +public class GatewayBasicFuncTest {
 +
 +  private static final Charset UTF8 = Charset.forName("UTF-8");
 +
 +  // Uncomment to cause the test to hang after the gateway instance is setup.
 +  // This will allow the gateway instance to be hit directly via some external client.
 +//  @Test
 +//  public void hang() throws IOException {
 +//    System.out.println( "Server on port " + driver.gateway.getAddresses()[0].getPort() );
 +//    System.out.println();
 +//    System.in.read();
 +//  }
 +
 +  private static Logger log = LoggerFactory.getLogger( GatewayBasicFuncTest.class );
 +
 +  private static GatewayTestDriver driver = new GatewayTestDriver();
 +
 +  // Controls the host name to which the gateway dispatch requests.  This may be the name of a sandbox VM
 +  // or an EC2 instance.  Currently only a single host is supported.
 +  private static final String TEST_HOST = "vm.local";
 +
 +  // Specifies if the test requests should go through the gateway or directly to the services.
 +  // This is frequently used to verify the behavior of the test both with and without the gateway.
 +  private static final boolean USE_GATEWAY = true;
 +
 +  // Specifies if the test requests should be sent to mock services or the real services.
 +  // This is frequently used to verify the behavior of the test both with and without mock services.
 +  private static final boolean USE_MOCK_SERVICES = true;
 +
 +  // Specifies if the GATEWAY_HOME created for the test should be deleted when the test suite is complete.
 +  // This is frequently used during debugging to keep the GATEWAY_HOME around for inspection.
 +  private static final boolean CLEANUP_TEST = true;
 +
 +//  private static final boolean USE_GATEWAY = false;
 +//  private static final boolean USE_MOCK_SERVICES = false;
 +//  private static final boolean CLEANUP_TEST = false;
 +
 +  /**
 +   * Creates a deployment of a gateway instance that all test methods will share.  This method also creates a
 +   * registry of sorts for all of the services that will be used by the test methods.
 +   * The createTopology method is used to create the topology file that would normally be read from disk.
 +   * The driver.setupGateway invocation is where the creation of GATEWAY_HOME occurs.
 +   * @throws Exception Thrown if any failure occurs.
 +   */
 +  @BeforeClass
 +  public static void setupSuite() throws Exception {
 +    //Log.setLog( new NoOpLogger() );
 +    LOG_ENTER();
 +    GatewayTestConfig config = new GatewayTestConfig();
 +    driver.setResourceBase(GatewayBasicFuncTest.class);
 +    driver.setupLdap(0);
 +    driver.setupService("WEBHDFS", "http://" + TEST_HOST + ":50070/webhdfs", "/cluster/webhdfs", USE_MOCK_SERVICES);
 +    driver.setupService( "DATANODE", "http://" + TEST_HOST + ":50075/webhdfs", "/cluster/webhdfs/data", USE_MOCK_SERVICES );
 +    driver.setupService( "WEBHCAT", "http://" + TEST_HOST + ":50111/templeton", "/cluster/templeton", USE_MOCK_SERVICES );
 +    driver.setupService( "OOZIE", "http://" + TEST_HOST + ":11000/oozie", "/cluster/oozie", USE_MOCK_SERVICES );
 +    driver.setupService( "HIVE", "http://" + TEST_HOST + ":10000", "/cluster/hive", USE_MOCK_SERVICES );
 +    driver.setupService( "WEBHBASE", "http://" + TEST_HOST + ":60080", "/cluster/hbase", USE_MOCK_SERVICES );
 +    driver.setupService( "NAMENODE", "hdfs://" + TEST_HOST + ":8020", null, USE_MOCK_SERVICES );
 +    driver.setupService( "JOBTRACKER", "thrift://" + TEST_HOST + ":8021", null, USE_MOCK_SERVICES );
 +    driver.setupService( "RESOURCEMANAGER", "http://" + TEST_HOST + ":8088/ws", "/cluster/resourcemanager", USE_MOCK_SERVICES );
 +    driver.setupService( "FALCON", "http://" + TEST_HOST + ":15000", "/cluster/falcon", USE_MOCK_SERVICES );
 +    driver.setupService( "STORM", "http://" + TEST_HOST + ":8477", "/cluster/storm", USE_MOCK_SERVICES );
 +    driver.setupService( "STORM-LOGVIEWER", "http://" + TEST_HOST + ":8477", "/cluster/storm", USE_MOCK_SERVICES );
 +    driver.setupService( "SOLR", "http://" + TEST_HOST + ":8983", "/cluster/solr", USE_MOCK_SERVICES );
 +    driver.setupService( "KAFKA", "http://" + TEST_HOST + ":8477", "/cluster/kafka", USE_MOCK_SERVICES );
 +    driver.setupGateway( config, "cluster", createTopology(), USE_GATEWAY );
 +    LOG_EXIT();
 +  }
 +
 +  @AfterClass
 +  public static void cleanupSuite() throws Exception {
 +    LOG_ENTER();
 +    if( CLEANUP_TEST ) {
 +      driver.cleanup();
 +    }
 +    LOG_EXIT();
 +  }
 +
 +  @After
 +  public void cleanupTest() {
 +    driver.reset();
 +  }
 +
 +  /**
 +   * Creates a topology that is deployed to the gateway instance for the test suite.
 +   * Note that this topology is shared by all of the test methods in this suite.
 +   * @return A populated XML structure for a topology file.
 +   */
 +  private static XMLTag createTopology() {
 +    XMLTag xml = XMLDoc.newDocument( true )
 +        .addRoot( "topology" )
 +          .addTag( "gateway" )
 +            .addTag( "provider" )
 +              .addTag( "role" ).addText( "webappsec" )
 +              .addTag("name").addText("WebAppSec")
 +              .addTag("enabled").addText("true")
 +              .addTag( "param" )
 +                .addTag("name").addText("csrf.enabled")
 +                .addTag("value").addText("true").gotoParent().gotoParent()
 +            .addTag("provider")
 +              .addTag("role").addText("authentication")
 +              .addTag("name").addText("ShiroProvider")
 +              .addTag("enabled").addText("true")
 +              .addTag( "param" )
 +                .addTag("name").addText("main.ldapRealm")
 +                .addTag("value").addText("org.apache.knox.gateway.shirorealm.KnoxLdapRealm").gotoParent()
 +              .addTag( "param" )
 +                .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
 +                .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
 +              .addTag( "param" )
 +                .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
 +                .addTag( "value" ).addText( driver.getLdapUrl() ).gotoParent()
 +              .addTag( "param" )
 +                .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
 +                .addTag( "value" ).addText( "simple" ).gotoParent()
 +              .addTag( "param" )
 +                .addTag( "name" ).addText( "urls./**" )
 +                .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
 +            .addTag("provider")
 +              .addTag("role").addText("identity-assertion")
 +              .addTag("enabled").addText("true")
 +              .addTag("name").addText("Default").gotoParent()
 +            .addTag("provider")
 +              .addTag( "role" ).addText( "authorization" )
 +              .addTag( "enabled" ).addText( "true" )
 +              .addTag("name").addText("AclsAuthz").gotoParent()
 +              .addTag("param")
 +                .addTag("name").addText( "webhdfs-acl" )
 +                .addTag("value").addText( "hdfs;*;*" ).gotoParent()
 +          .gotoRoot()
 +          .addTag("service")
 +            .addTag("role").addText("WEBHDFS")
 +            .addTag("url").addText(driver.getRealUrl("WEBHDFS")).gotoParent()
 +          .addTag( "service" )
 +            .addTag( "role" ).addText( "NAMENODE" )
 +            .addTag( "url" ).addText( driver.getRealUrl( "NAMENODE" ) ).gotoParent()
 +          .addTag( "service" )
 +            .addTag( "role" ).addText( "DATANODE" )
 +            .addTag( "url" ).addText( driver.getRealUrl( "DATANODE" ) ).gotoParent()
 +          .addTag( "service" )
 +            .addTag( "role" ).addText( "JOBTRACKER" )
 +            .addTag( "url" ).addText( driver.getRealUrl( "JOBTRACKER" ) ).gotoParent()
 +          .addTag( "service" )
 +            .addTag( "role" ).addText( "WEBHCAT" )
 +            .addTag( "url" ).addText( driver.getRealUrl( "WEBHCAT" ) ).gotoParent()
 +          .addTag( "service" )
 +            .addTag( "role" ).addText( "OOZIE" )
 +            .addTag( "url" ).addText( driver.getRealUrl( "OOZIE" ) ).gotoParent()
 +          .addTag( "service" )
 +            .addTag( "role" ).addText( "HIVE" )
 +            .addTag( "url" ).addText( driver.getRealUrl( "HIVE" ) ).gotoParent()
 +          .addTag( "service" )
 +            .addTag( "role" ).addText( "WEBHBASE" )
 +            .addTag( "url" ).addText( driver.getRealUrl( "WEBHBASE" ) ).gotoParent()
 +        .addTag("service")
 +            .addTag("role").addText("RESOURCEMANAGER")
 +            .addTag("url").addText(driver.getRealUrl("RESOURCEMANAGER")).gotoParent()
 +        .addTag("service")
 +            .addTag("role").addText("FALCON")
 +            .addTag("url").addText(driver.getRealUrl("FALCON")).gotoParent()
 +        .addTag("service")
 +            .addTag("role").addText("STORM")
 +            .addTag("url").addText(driver.getRealUrl("STORM")).gotoParent()
 +        .addTag("service")
 +            .addTag("role").addText("STORM-LOGVIEWER")
 +            .addTag("url").addText(driver.getRealUrl("STORM-LOGVIEWER")).gotoParent()
 +        .addTag("service")
 +            .addTag("role").addText("SOLR")
 +            .addTag("url").addText(driver.getRealUrl("SOLR")).gotoParent()
 +        .addTag("service")
 +            .addTag("role").addText("KAFKA")
 +            .addTag("url").addText(driver.getRealUrl("KAFKA")).gotoParent()
 +        .addTag("service")
 +        .addTag("role").addText("SERVICE-TEST")
 +        .gotoRoot();
 +//     System.out.println( "GATEWAY=" + xml.toString() );
 +    return xml;
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testBasicJsonUseCase() throws IOException {
 +    LOG_ENTER();
 +    String root = "/tmp/GatewayBasicFuncTest/testBasicJsonUseCase";
 +    String username = "hdfs";
 +    String password = "hdfs-password";
 +    /* Create a directory.
 +    curl -i -X PUT "http://<HOST>:<PORT>/<PATH>?op=MKDIRS[&permission=<OCTAL>]"
 +
 +    The client receives a respond with a boolean JSON object:
 +    HTTP/1.1 HttpStatus.SC_OK OK
 +    Content-Type: application/json
 +    Transfer-Encoding: chunked
 +
 +    {"boolean": true}
 +    */
 +    driver.getMock( "WEBHDFS" )
 +        .expect()
 +        .method( "PUT" )
 +        .pathInfo( "/v1" + root + "/dir" )
 +        .queryParam( "op", "MKDIRS" )
 +        .queryParam( "user.name", username )
 +        .respond()
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "webhdfs-success.json" ) )
 +        .contentType( "application/json" );
 +    Cookie cookie = given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "MKDIRS" )
 +        .then()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "application/json" )
 +        .body( "boolean", is( true ) )
 +        .when().put( driver.getUrl( "WEBHDFS" ) + "/v1" + root + "/dir" ).getDetailedCookie( "JSESSIONID" );
 +    assertThat( cookie.isSecured(), is( true ) );
 +    assertThat( cookie.isHttpOnly(), is( true ) );
 +    assertThat( cookie.getPath(), is( "/gateway/cluster" ) );
 +    assertThat( cookie.getValue().length(), greaterThan( 16 ) );
 +    driver.assertComplete();
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testBasicOutboundHeaderUseCase() throws IOException {
 +    LOG_ENTER();
 +    String root = "/tmp/GatewayBasicFuncTest/testBasicOutboundHeaderUseCase";
 +    String username = "hdfs";
 +    String password = "hdfs-password";
 +    InetSocketAddress gatewayAddress = driver.gateway.getAddresses()[0];
 +    String gatewayHostName = gatewayAddress.getHostName();
 +    String gatewayAddrName = InetAddress.getByName(gatewayHostName).getHostAddress();
 +
 +    driver.getMock( "WEBHDFS" )
 +        .expect()
 +        .method( "PUT" )
 +        .pathInfo( "/v1" + root + "/dir/file" )
 +        .header( "Host", driver.getRealAddr( "WEBHDFS" ) )
 +        .queryParam( "op", "CREATE" )
 +        .queryParam( "user.name", username )
 +        .respond()
 +        .status( HttpStatus.SC_TEMPORARY_REDIRECT )
 +        .header("Location", driver.getRealUrl("DATANODE") + "/v1" + root + "/dir/file?op=CREATE&user.name=hdfs");
 +    Response response = given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "CREATE" )
 +        .then()
 +        //.log().ifError()
 +        .statusCode( HttpStatus.SC_TEMPORARY_REDIRECT )
 +        .when().put( driver.getUrl("WEBHDFS") + "/v1" + root + "/dir/file" );
 +    String location = response.getHeader( "Location" );
 +    //System.out.println( location );
 +    log.debug( "Redirect location: " + response.getHeader( "Location" ) );
 +    if( driver.isUseGateway() ) {
 +      MatcherAssert.assertThat( location, anyOf(
 +          startsWith( "http://" + gatewayHostName + ":" + gatewayAddress.getPort() + "/" ),
 +          startsWith( "http://" + gatewayAddrName + ":" + gatewayAddress.getPort() + "/" ) ) );
 +      MatcherAssert.assertThat( location, containsString( "?_=" ) );
 +    }
 +    MatcherAssert.assertThat(location, not(containsString("host=")));
 +    MatcherAssert.assertThat(location, not(containsString("port=")));
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testBasicOutboundEncodedHeaderUseCase() throws IOException {
 +    LOG_ENTER();
 +    String root = "/tmp/GatewayBasicFuncTest/testBasicOutboundHeaderUseCase";
 +    String username = "hdfs";
 +    String password = "hdfs-password";
 +
 +    driver.getMock( "WEBHDFS" )
 +        .expect()
 +        .method( "PUT" )
 +        .pathInfo( "/v1" + root + "/dir/fileレポー" )
 +        .header( "Host", driver.getRealAddr( "WEBHDFS" ) )
 +        .queryParam( "op", "CREATE" )
 +        .queryParam( "user.name", username )
 +        .respond()
 +        .status( HttpStatus.SC_TEMPORARY_REDIRECT )
 +        .header("Location", driver.getRealUrl("DATANODE") + "/v1" + root + "/dir/file%E3%83%AC%E3%83%9D%E3%83%BC?op=CREATE&user.name=hdfs");
 +    Response response = given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "CREATE" )
 +        .then()
 +        //.log().ifError()
 +        .statusCode( HttpStatus.SC_TEMPORARY_REDIRECT )
 +        .when().put( driver.getUrl("WEBHDFS") + "/v1" + root + "/dir/fileレポー" );
 +//        .when().put( driver.getUrl("WEBHDFS") + "/v1" + root + "/dir/file%E3%83%AC%E3%83%9D%E3%83%BC" );
 +    String location = response.getHeader( "Location" );
 +    //System.out.println( location );
 +    log.debug( "Redirect location: " + response.getHeader( "Location" ) );
 +    if( driver.isUseGateway() ) {
 +      MatcherAssert.assertThat( location, containsString("/dir/file%E3%83%AC%E3%83%9D%E3%83%BC") );
 +    }
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testHdfsTildeUseCase() throws IOException {
 +    LOG_ENTER();
 +    String root = "/tmp/GatewayBasicFuncTest/testHdfsTildeUseCase";
 +    String username = "hdfs";
 +    String password = "hdfs-password";
 +
 +    // Attempt to delete the test directory in case a previous run failed.
 +    // Ignore any result.
 +    // Cleanup anything that might have been leftover because the test failed previously.
 +    driver.getMock( "WEBHDFS" )
 +        .expect()
 +        .method( "DELETE" )
 +        .from( "testHdfsTildeUseCase" )
 +        .pathInfo( "/v1/user/hdfs" + root )
 +        .queryParam( "op", "DELETE" )
 +        .queryParam( "user.name", username )
 +        .queryParam( "recursive", "true" )
 +        .respond()
 +        .status( HttpStatus.SC_OK );
 +
 +    try {
 +      // Need to turn off URL encoding here or otherwise the tilde gets encoded and the rewrite rules fail
 +      RestAssured.urlEncodingEnabled = false;
 +      given()
 +          //.log().all()
 +          .auth().preemptive().basic( username, password )
 +          .header("X-XSRF-Header", "jksdhfkhdsf")
 +          .queryParam( "op", "DELETE" )
 +          .queryParam( "recursive", "true" )
 +          .then()
 +          //.log().all()
 +          .statusCode( HttpStatus.SC_OK )
 +          .when().delete( driver.getUrl( "WEBHDFS" ) + "/v1/~" + root + ( driver.isUseGateway() ? "" : "?user.name=" + username ) );
 +      driver.assertComplete();
 +
 +      driver.getMock( "WEBHDFS" )
 +          .expect()
 +          .method( "PUT" )
 +          .pathInfo( "/v1/user/hdfs/dir" )
 +          .queryParam( "op", "MKDIRS" )
 +          .queryParam( "user.name", username )
 +          .respond()
 +          .status( HttpStatus.SC_OK )
 +          .content( driver.getResourceBytes( "webhdfs-success.json" ) )
 +          .contentType("application/json");
 +      given()
 +          //.log().all()
 +          .auth().preemptive().basic( username, password )
 +          .header("X-XSRF-Header", "jksdhfkhdsf")
 +          .queryParam( "op", "MKDIRS" )
 +          .then()
 +          //.log().all();
 +          .statusCode( HttpStatus.SC_OK )
 +          .contentType( "application/json" )
 +          .body( "boolean", is( true ) )
 +          .when().put( driver.getUrl( "WEBHDFS" ) + "/v1/~/dir" );
 +      driver.assertComplete();
 +    } finally {
 +      RestAssured.urlEncodingEnabled = true;
 +    }
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testBasicHdfsUseCase() throws IOException {
 +    LOG_ENTER();
 +    String root = "/tmp/GatewayBasicFuncTest/testBasicHdfsUseCase";
 +    String username = "hdfs";
 +    String password = "hdfs-password";
 +    InetSocketAddress gatewayAddress = driver.gateway.getAddresses()[0];
 +    String gatewayHostName = gatewayAddress.getHostName();
 +    String gatewayAddrName = InetAddress.getByName( gatewayHostName ).getHostAddress();
 +
 +    // Attempt to delete the test directory in case a previous run failed.
 +    // Ignore any result.
 +    // Cleanup anything that might have been leftover because the test failed previously.
 +    driver.getMock( "WEBHDFS" )
 +        .expect()
 +        .method( "DELETE" )
 +        .from( "testBasicHdfsUseCase-1" )
 +        .pathInfo( "/v1" + root )
 +        .queryParam( "op", "DELETE" )
 +        .queryParam( "user.name", username )
 +        .queryParam( "recursive", "true" )
 +        .respond()
 +        .status( HttpStatus.SC_OK );
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "DELETE" )
 +        .queryParam( "recursive", "true" )
 +        .then()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .when().delete( driver.getUrl( "WEBHDFS" ) + "/v1" + root + ( driver.isUseGateway() ? "" : "?user.name=" + username ) );
 +    driver.assertComplete();
 +
 +    /* Create a directory.
 +    curl -i -X PUT "http://<HOST>:<PORT>/<PATH>?op=MKDIRS[&permission=<OCTAL>]"
 +
 +    The client receives a respond with a boolean JSON object:
 +    HTTP/1.1 HttpStatus.SC_OK OK
 +    Content-Type: application/json
 +    Transfer-Encoding: chunked
 +
 +    {"boolean": true}
 +    */
 +    driver.getMock( "WEBHDFS" )
 +        .expect()
 +        .method( "PUT" )
 +        .pathInfo( "/v1" + root + "/dir" )
 +        .queryParam( "op", "MKDIRS" )
 +        .queryParam( "user.name", username )
 +        .respond()
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "webhdfs-success.json" ) )
 +        .contentType( "application/json" );
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "MKDIRS" )
 +        .then()
 +        //.log().all();
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "application/json" )
 +        .body( "boolean", is( true ) )
 +        .when().put( driver.getUrl( "WEBHDFS" ) + "/v1" + root + "/dir" );
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHDFS" )
 +        .expect()
 +        .method( "GET" )
 +        .pathInfo( "/v1" + root )
 +        .queryParam( "op", "LISTSTATUS" )
 +        .queryParam( "user.name", username )
 +        .respond()
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "webhdfs-liststatus-test.json" ) )
 +        .contentType( "application/json" );
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "LISTSTATUS" )
 +        .then()
 +        //.log().ifError()
 +        .statusCode( HttpStatus.SC_OK )
 +        .body( "FileStatuses.FileStatus[0].pathSuffix", is( "dir" ) )
 +        .when().get( driver.getUrl( "WEBHDFS" ) + "/v1" + root );
 +    driver.assertComplete();
 +
 +    //NEGATIVE: Test a bad password.
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, "invalid-password" )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "LISTSTATUS" )
 +        .then()
 +        //.log().ifError()
 +        .statusCode( HttpStatus.SC_UNAUTHORIZED )
 +        .when().get( driver.getUrl( "WEBHDFS" ) + "/v1" + root );
 +    driver.assertComplete();
 +
 +    //NEGATIVE: Test a bad user.
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( "hdfs-user", "hdfs-password" )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "LISTSTATUS" )
 +        .then()
 +        //.log().ifError()
 +        .statusCode( HttpStatus.SC_UNAUTHORIZED )
 +        .when().get( driver.getUrl( "WEBHDFS" ) + "/v1" + root );
 +    driver.assertComplete();
 +
 +    //NEGATIVE: Test a valid but unauthorized user.
 +    given()
 +      //.log().all()
 +      .auth().preemptive().basic( "mapred-user", "mapred-password" )
 +      .header("X-XSRF-Header", "jksdhfkhdsf")
 +      .queryParam( "op", "LISTSTATUS" )
 +      .then()
 +      //.log().ifError()
 +      .statusCode( HttpStatus.SC_UNAUTHORIZED )
 +      .when().get( driver.getUrl( "WEBHDFS" ) + "/v1" + root );
 +
 +    /* Add a file.
 +    curl -i -X PUT "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=CREATE
 +                       [&overwrite=<true|false>][&blocksize=<LONG>][&replication=<SHORT>]
 +                     [&permission=<OCTAL>][&buffersize=<INT>]"
 +
 +    The then is redirected to a datanode where the file data is to be written:
 +    HTTP/1.1 307 TEMPORARY_REDIRECT
 +    Location: http://<DATANODE>:<PORT>/webhdfs/v1/<PATH>?op=CREATE...
 +    Content-Length: 0
 +
 +    Step 2: Submit another HTTP PUT then using the URL in the Location header with the file data to be written.
 +    curl -i -X PUT -T <LOCAL_FILE> "http://<DATANODE>:<PORT>/webhdfs/v1/<PATH>?op=CREATE..."
 +
 +    The client receives a HttpStatus.SC_CREATED Created respond with zero content length and the WebHDFS URI of the file in the Location header:
 +    HTTP/1.1 HttpStatus.SC_CREATED Created
 +    Location: webhdfs://<HOST>:<PORT>/<PATH>
 +    Content-Length: 0
 +    */
 +    driver.getMock( "WEBHDFS" )
 +        .expect()
 +        .method( "PUT" )
 +        .pathInfo( "/v1" + root + "/dir/file" )
 +        .queryParam( "op", "CREATE" )
 +        .queryParam( "user.name", username )
 +        .respond()
 +        .status( HttpStatus.SC_TEMPORARY_REDIRECT )
 +        .header( "Location", driver.getRealUrl( "DATANODE" ) + "/v1" + root + "/dir/file?op=CREATE&user.name=hdfs" );
 +    driver.getMock( "DATANODE" )
 +        .expect()
 +        .method( "PUT" )
 +        .pathInfo( "/v1" + root + "/dir/file" )
 +        .queryParam( "op", "CREATE" )
 +        .queryParam( "user.name", username )
 +        .contentType( "text/plain" )
 +        .content( driver.getResourceBytes( "test.txt" ) )
 +            //.content( driver.gerResourceBytes( "hadoop-examples.jar" ) )
 +        .respond()
 +        .status( HttpStatus.SC_CREATED )
 +        .header( "Location", "webhdfs://" + driver.getRealAddr( "DATANODE" ) + "/v1" + root + "/dir/file" );
 +    Response response = given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "CREATE" )
 +        .then()
 +        //.log().ifError()
 +        .statusCode( HttpStatus.SC_TEMPORARY_REDIRECT )
 +        .when().put( driver.getUrl("WEBHDFS") + "/v1" + root + "/dir/file" );
 +    String location = response.getHeader( "Location" );
 +    log.debug( "Redirect location: " + response.getHeader( "Location" ) );
 +    if( driver.isUseGateway() ) {
 +      MatcherAssert.assertThat( location, anyOf(
 +          startsWith( "http://" + gatewayHostName + ":" + gatewayAddress.getPort() + "/" ),
 +          startsWith( "http://" + gatewayAddrName + ":" + gatewayAddress.getPort() + "/" ) ) );
 +      MatcherAssert.assertThat( location, containsString( "?_=" ) );
 +    }
 +    MatcherAssert.assertThat( location, not( containsString( "host=" ) ) );
 +    MatcherAssert.assertThat( location, not( containsString( "port=" ) ) );
 +    response = given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .body( driver.getResourceBytes( "test.txt" ) )
 +        .contentType( "text/plain" )
 +        .then()
 +        //.log().ifError()
 +        .statusCode( HttpStatus.SC_CREATED )
 +        .when().put( location );
 +    location = response.getHeader( "Location" );
 +    log.debug( "Created location: " + location );
 +    if( driver.isUseGateway() ) {
 +      MatcherAssert.assertThat( location, anyOf(
 +          startsWith( "http://" + gatewayHostName + ":" + gatewayAddress.getPort() + "/" ),
 +          startsWith( "http://" + gatewayAddrName + ":" + gatewayAddress.getPort() + "/" ) ) );
 +    }
 +    driver.assertComplete();
 +
 +    /* Get the file.
 +    curl -i -L "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=OPEN
 +                       [&offset=<LONG>][&length=<LONG>][&buffersize=<INT>]"
 +
 +    The then is redirected to a datanode where the file data can be read:
 +    HTTP/1.1 307 TEMPORARY_REDIRECT
 +    Location: http://<DATANODE>:<PORT>/webhdfs/v1/<PATH>?op=OPEN...
 +    Content-Length: 0
 +
 +    The client follows the redirect to the datanode and receives the file data:
 +    HTTP/1.1 HttpStatus.SC_OK OK
 +    Content-Type: application/octet-stream
 +    Content-Length: 22
 +
 +    Hello, webhdfs user!
 +    */
 +    driver.getMock( "WEBHDFS" )
 +        .expect()
 +        .method( "GET" )
 +        .pathInfo( "/v1" + root + "/dir/file" )
 +        .queryParam( "op", "OPEN" )
 +        .queryParam( "user.name", username )
 +        .respond()
 +        .status( HttpStatus.SC_TEMPORARY_REDIRECT )
 +        .header( "Location", driver.getRealUrl( "DATANODE" ) + "/v1" + root + "/dir/file?op=OPEN&user.name=hdfs" );
 +    driver.getMock( "DATANODE" )
 +        .expect()
 +        .method( "GET" )
 +        .pathInfo( "/v1" + root + "/dir/file" )
 +        .queryParam( "op", "OPEN" )
 +        .queryParam( "user.name", username )
 +        .respond()
 +        .status( HttpStatus.SC_OK )
 +        .contentType( "text/plain" )
 +        .content( driver.getResourceBytes( "test.txt" ) );
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "OPEN" )
 +        .then()
 +        //.log().ifError()
 +        .statusCode( HttpStatus.SC_OK )
 +        .body( is( "TEST" ) )
 +        .when().get( driver.getUrl("WEBHDFS") + "/v1" + root + "/dir/file" );
 +    driver.assertComplete();
 +
 +    /* Delete the directory.
 +    curl -i -X DELETE "http://<host>:<port>/webhdfs/v1/<path>?op=DELETE
 +                                 [&recursive=<true|false>]"
 +
 +    The client receives a respond with a boolean JSON object:
 +    HTTP/1.1 HttpStatus.SC_OK OK
 +    Content-Type: application/json
 +    Transfer-Encoding: chunked
 +
 +    {"boolean": true}
 +    */
 +    // Mock the interaction with the namenode.
 +    driver.getMock( "WEBHDFS" )
 +        .expect()
 +        .from( "testBasicHdfsUseCase-1" )
 +        .method( "DELETE" )
 +        .pathInfo( "/v1" + root )
 +        .queryParam( "op", "DELETE" )
 +        .queryParam( "user.name", username )
 +        .queryParam( "recursive", "true" )
 +        .respond()
 +        .status( HttpStatus.SC_OK );
 +    given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "DELETE" )
 +        .queryParam( "recursive", "true" )
 +        .then()
 +        //.log().ifError()
 +        .statusCode( HttpStatus.SC_OK )
 +        .when().delete( driver.getUrl( "WEBHDFS" ) + "/v1" + root );
 +    driver.assertComplete();
 +    LOG_EXIT();
 +  }
 +
 +  // User hdfs in groups hadoop, hdfs
 +  // User mapred in groups hadoop, mapred
 +  // User hcat in group hcat
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testPmHdfsM1UseCase() throws IOException {
 +    LOG_ENTER();
 +    String root = "/tmp/GatewayBasicFuncTest/testPmHdfdM1UseCase";
 +    String userA = "hdfs";
 +    String passA = "hdfs-password";
 +    String userB = "mapred";
 +    String passB = "mapred-password";
 +    String userC = "hcat";
 +    String passC = "hcat-password";
 +    String groupA = "hdfs";
 +    String groupB = "mapred";
 +    String groupAB = "hadoop";
 +    String groupC = "hcat";
 +
 +    deleteFile( userA, passA, root, "true", 200 );
 +
 +    createDir( userA, passA, groupA, root + "/dirA700", "700", 200, 200 );
 +    createDir( userA, passA, groupA, root + "/dirA770", "770", 200, 200 );
 +    createDir( userA, passA, groupA, root + "/dirA707", "707", 200, 200 );
 +    createDir( userA, passA, groupA, root + "/dirA777", "777", 200, 200 );
 +    createDir( userA, passA, groupAB, root + "/dirAB700", "700", 200, 200 );
 +    createDir( userA, passA, groupAB, root + "/dirAB770", "770", 200, 200 );
 +    createDir( userA, passA, groupAB, root + "/dirAB707", "707", 200, 200 );
 +    createDir( userA, passA, groupAB, root + "/dirAB777", "777", 200, 200 );
 +
 +    // CREATE: Files
 +    // userA:groupA
 +    createFile( userA, passA, groupA, root + "/dirA700/fileA700", "700", "text/plain", "small1.txt", 307, 201, 200 );
 +    createFile( userA, passA, groupA, root + "/dirA770/fileA770", "770", "text/plain", "small1.txt", 307, 201, 200 );
 +    createFile( userA, passA, groupA, root + "/dirA707/fileA707", "707", "text/plain", "small1.txt", 307, 201, 200 );
 +    createFile( userA, passA, groupA, root + "/dirA777/fileA777", "777", "text/plain", "small1.txt", 307, 201, 200 );
 +    // userA:groupAB
 +    createFile( userA, passA, groupAB, root + "/dirAB700/fileAB700", "700", "text/plain", "small1.txt", 307, 201, 200 );
 +    createFile( userA, passA, groupAB, root + "/dirAB770/fileAB770", "770", "text/plain", "small1.txt", 307, 201, 200 );
 +    createFile( userA, passA, groupAB, root + "/dirAB707/fileAB707", "707", "text/plain", "small1.txt", 307, 201, 200 );
 +    createFile( userA, passA, groupAB, root + "/dirAB777/fileAB777", "777", "text/plain", "small1.txt", 307, 201, 200 );
 +    // userB:groupB
 +    createFile( userB, passB, groupB, root + "/dirA700/fileB700", "700", "text/plain", "small1.txt", 307, 403, 0 );
 +    createFile( userB, passB, groupB, root + "/dirA770/fileB700", "700", "text/plain", "small1.txt", 307, 403, 0 );
 +//kam:20130219[ chmod seems to be broken at least in Sandbox 1.2
 +//    createFile( userB, passB, groupB, root + "/dirA707/fileB700", "700", "text/plain", "small1.txt", 307, 201, 200 );
 +//    createFile( userB, passB, groupB, root + "/dirA777/fileB700", "700", "text/plain", "small1.txt", 307, 201, 200 );
 +//kam]
 +    // userB:groupAB
 +    createFile( userB, passB, groupAB, root + "/dirA700/fileBA700", "700", "text/plain", "small1.txt", 307, 403, 0 );
 +    createFile( userB, passB, groupAB, root + "/dirA770/fileBA700", "700", "text/plain", "small1.txt", 307, 403, 0 );
 +    createFile( userB, passB, groupAB, root + "/dirA707/fileBA700", "700", "text/plain", "small1.txt", 307, 201, 200 );
 +    createFile( userB, passB, groupAB, root + "/dirA777/fileBA700", "700", "text/plain", "small1.txt", 307, 201, 200 );
 +    // userC:groupC
 +    createFile( userC, passC, groupC, root + "/dirA700/fileC700", "700", "text/plain", "small1.txt", 307, 403, 0 );
 +    createFile( userC, passC, groupC, root + "/dirA770/fileC700", "700", "text/plain", "small1.txt", 307, 403, 0 );
 +//kam:20130219[ chmod seems to be broken at least in Sandbox 1.2
 +//    createFile( userC, passC, groupC, root + "/dirA707/fileC700", "700", "text/plain", "small1.txt", 307, 201, 200 );
 +//    createFile( userC, passC, groupC, root + "/dirA777/fileC700", "700", "text/plain", "small1.txt", 307, 201, 200 );
 +//kam]
 +
 +    // READ
 +    // userA
 +    readFile( userA, passA, root + "/dirA700/fileA700", "text/plain", "small1.txt", HttpStatus.SC_OK );
 +    readFile( userA, passA, root + "/dirA770/fileA770", "text/plain", "small1.txt", HttpStatus.SC_OK );
 +    readFile( userA, passA, root + "/dirA707/fileA707", "text/plain", "small1.txt", HttpStatus.SC_OK );
 +    readFile( userA, passA, root + "/dirA777/fileA777", "text/plain", "small1.txt", HttpStatus.SC_OK );
 +    // userB:groupB
 +    readFile( userB, passB, root + "/dirA700/fileA700", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
 +    readFile( userB, passB, root + "/dirA770/fileA770", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
 +    readFile( userB, passB, root + "/dirA707/fileA707", "text/plain", "small1.txt", HttpStatus.SC_OK );
 +    readFile( userB, passB, root + "/dirA777/fileA777", "text/plain", "small1.txt", HttpStatus.SC_OK );
 +    // userB:groupAB
 +    readFile( userB, passB, root + "/dirAB700/fileAB700", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
 +    readFile( userB, passB, root + "/dirAB770/fileAB770", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
 +    readFile( userB, passB, root + "/dirAB707/fileAB707", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
 +    readFile( userB, passB, root + "/dirAB777/fileAB777", "text/plain", "small1.txt", HttpStatus.SC_OK );
 +    // userC:groupC
 +    readFile( userC, passC, root + "/dirA700/fileA700", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
 +    readFile( userC, passC, root + "/dirA770/fileA770", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
 +    readFile( userC, passC, root + "/dirA707/fileA707", "text/plain", "small1.txt", HttpStatus.SC_OK );
 +    readFile( userC, passC, root + "/dirA777/fileA777", "text/plain", "small1.txt", HttpStatus.SC_OK );
 +
 +    //NEGATIVE: Test a bad password.
 +    if( driver.isUseGateway() ) {
 +      given()
 +          //.log().all()
 +          .auth().preemptive().basic( userA, "invalid-password" )
 +          .header("X-XSRF-Header", "jksdhfkhdsf")
 +          .queryParam( "op", "OPEN" )
 +          .then()
 +          //.log().all()
 +          .statusCode( HttpStatus.SC_UNAUTHORIZED )
 +          .when().get( driver.getUrl("WEBHDFS") + "/v1" + root + "/dirA700/fileA700" );
 +    }
 +    driver.assertComplete();
 +
 +    // UPDATE (Negative First)
 +    updateFile( userC, passC, root + "/dirA700/fileA700", "text/plain", "small2.txt", 307, 403 );
 +    updateFile( userB, passB, root + "/dirAB700/fileAB700", "text/plain", "small2.txt", 307, 403 );
 +    updateFile( userB, passB, root + "/dirAB770/fileAB700", "text/plain", "small2.txt", 307, 403 );
 +    updateFile( userB, passB, root + "/dirAB770/fileAB770", "text/plain", "small2.txt", 307, 403 );
 +    updateFile( userA, passA, root + "/dirA700/fileA700", "text/plain", "small2.txt", 307, 201 );
 +
 +    // DELETE (Negative First)
 +    deleteFile( userC, passC, root + "/dirA700/fileA700", "false", HttpStatus.SC_FORBIDDEN );
 +    deleteFile( userB, passB, root + "/dirAB700/fileAB700", "false", HttpStatus.SC_FORBIDDEN );
 +    deleteFile( userB, passB, root + "/dirAB770/fileAB770", "false", HttpStatus.SC_FORBIDDEN );
 +    deleteFile( userA, passA, root + "/dirA700/fileA700", "false", HttpStatus.SC_OK );
 +
 +    // Cleanup anything that might have been leftover because the test failed previously.
 +    deleteFile( userA, passA, root, "true", HttpStatus.SC_OK );
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testJavaMapReduceViaWebHCat() throws IOException {
 +    LOG_ENTER();
 +    String root = "/tmp/GatewayBasicFuncTest/testJavaMapReduceViaWebHCat";
 +    String user = "mapred";
 +    String pass = "mapred-password";
 +//    String user = "hcat";
 +//    String pass = "hcat-password";
 +//    String group = "hcat";
 +
 +    // Cleanup anything that might have been leftover because the test failed previously.
 +    deleteFile( user, pass, root, "true", HttpStatus.SC_OK );
 +
 +    /* Put the mapreduce code into HDFS. (hadoop-examples.jar)
 +    curl -X PUT --data-binary @hadoop-examples.jar 'http://192.168.1.163:8888/org.apache.org.apache.knox.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/hadoop-examples.jar?user.name=hdfs&op=CREATE'
 +     */
 +    createFile( user, pass, null, root+"/hadoop-examples.jar", "777", "application/octet-stream", findHadoopExamplesJar(), 307, 201, 200 );
 +
 +    /* Put the data file into HDFS (changes.txt)
 +    curl -X PUT --data-binary @changes.txt 'http://192.168.1.163:8888/org.apache.org.apache.knox.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/input/changes.txt?user.name=hdfs&op=CREATE'
 +     */
 +    createFile( user, pass, null, root+"/input/changes.txt", "777", "text/plain", "changes.txt", 307, 201, 200 );
 +
 +    /* Create the output directory
 +    curl -X PUT 'http://192.168.1.163:8888/org.apache.org.apache.knox.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/output?op=MKDIRS&user.name=hdfs'
 +    */
 +    createDir( user, pass, null, root+"/output", "777", 200, 200 );
 +
 +    /* Submit the job
 +    curl -d user.name=hdfs -d jar=wordcount/hadoop-examples.jar -d class=org.apache.org.apache.hadoop.examples.WordCount -d arg=wordcount/input -d arg=wordcount/output 'http://localhost:8888/org.apache.org.apache.knox.gateway/cluster/templeton/v1/mapreduce/jar'
 +    {"id":"job_201210301335_0059"}
 +    */
 +    String job = submitJava(
 +        user, pass,
 +        root+"/hadoop-examples.jar", "org.apache.org.apache.hadoop.examples.WordCount",
 +        root+"/input", root+"/output",
 +        200 );
 +
 +    /* Get the job status
 +    curl 'http://vm:50111/templeton/v1/queue/:jobid?user.name=hdfs'
 +    */
 +    queryQueue( user, pass, job );
 +
 +    // Can't really check for the output here because the job won't be done.
 +    /* Retrieve results
 +    curl 'http://192.168.1.163:8888/org.apache.org.apache.knox.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/input?op=LISTSTATUS'
 +    */
 +
 +    if( CLEANUP_TEST ) {
 +      // Cleanup anything that might have been leftover because the test failed previously.
 +      deleteFile( user, pass, root, "true", HttpStatus.SC_OK );
 +    }
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testPigViaWebHCat() throws IOException {
 +    LOG_ENTER();
 +    String root = "/tmp/GatewayWebHCatFuncTest/testPigViaWebHCat";
 +    String user = "mapred";
 +    String pass = "mapred-password";
 +    String group = "mapred";
 +
 +    // Cleanup if previous run failed.
 +    deleteFile( user, pass, root, "true", 200, 404 );
 +
 +    // Post the data to HDFS
 +    createFile( user, pass, null, root + "/passwd.txt", "777", "text/plain", "passwd.txt", 307, 201, 200 );
 +
 +    // Post the script to HDFS
 +    createFile( user, pass, null, root+"/script.pig", "777", "text/plain", "script.pig", 307, 201, 200 );
 +
 +    // Create the output directory
 +    createDir( user, pass, null, root + "/output", "777", 200, 200 );
 +
 +    // Submit the job
 +    submitPig( user, pass, group, root + "/script.pig", "-v", root + "/output", 200 );
 +
 +    // Check job status (if possible)
 +    // Check output (if possible)
 +
 +    // Cleanup
 +    deleteFile( user, pass, root, "true", 200 );
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testHiveViaWebHCat() throws IOException {
 +    LOG_ENTER();
 +    String user = "hive";
 +    String pass = "hive-password";
 +    String group = "hive";
 +    String root = "/tmp/GatewayWebHCatFuncTest/testHiveViaWebHCat";
 +
 +    // Cleanup if previous run failed.
 +    deleteFile( user, pass, root, "true", 200, 404 );
 +
 +    // Post the data to HDFS
 +
 +    // Post the script to HDFS
 +    createFile(user, pass, null, root + "/script.hive", "777", "text/plain", "script.hive", 307, 201, 200);
 +
 +    // Submit the job
 +    submitHive(user, pass, group, root + "/script.hive", root + "/output", 200);
 +
 +    // Check job status (if possible)
 +    // Check output (if possible)
 +
 +    // Cleanup
 +    deleteFile( user, pass, root, "true", 200 );
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testOozieJobSubmission() throws Exception {
 +    LOG_ENTER();
 +    String root = "/tmp/GatewayBasicFuncTest/testOozieJobSubmission";
 +    String user = "hdfs";
 +    String pass = "hdfs-password";
 +    String group = "hdfs";
 +
 +    // Cleanup anything that might have been leftover because the test failed previously.
 +    deleteFile( user, pass, root, "true", HttpStatus.SC_OK );
 +
 +    /* Put the workflow definition into HDFS */
 +    createFile( user, pass, group, root+"/workflow.xml", "666", "application/octet-stream", "oozie-workflow.xml", 307, 201, 200 );
 +
 +    /* Put the mapreduce code into HDFS. (hadoop-examples.jar)
 +    curl -X PUT --data-binary @hadoop-examples.jar 'http://192.168.1.163:8888/org.apache.org.apache.knox.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/hadoop-examples.jar?user.name=hdfs&op=CREATE'
 +     */
 +    createFile( user, pass, group, root+"/lib/hadoop-examples.jar", "777", "application/octet-stream", findHadoopExamplesJar(), 307, 201, 200 );
 +
 +    /* Put the data file into HDFS (changes.txt)
 +    curl -X PUT --data-binary @changes.txt 'http://192.168.1.163:8888/org.apache.org.apache.knox.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/input/changes.txt?user.name=hdfs&op=CREATE'
 +     */
 +    createFile( user, pass, group, root+"/input/changes.txt", "666", "text/plain", "changes.txt", 307, 201, 200 );
 +
 +    VelocityEngine velocity = new VelocityEngine();
 +    velocity.setProperty( RuntimeConstants.RUNTIME_LOG_LOGSYSTEM_CLASS, "org.apache.velocity.runtime.log.NullLogSystem" );
 +    velocity.setProperty( RuntimeConstants.RESOURCE_LOADER, "classpath" );
 +    velocity.setProperty( "classpath.resource.loader.class", ClasspathResourceLoader.class.getName() );
 +    velocity.init();
 +
 +    VelocityContext context = new VelocityContext();
 +    context.put( "userName", user );
 +    context.put( "nameNode", "hdfs://sandbox:8020" );
 +    context.put( "jobTracker", "sandbox:50300" );
 +    //context.put( "appPath", "hdfs://sandbox:8020" + root );
 +    context.put( "appPath", root );
 +    context.put( "inputDir", root + "/input" );
 +    context.put( "outputDir", root + "/output" );
 +
 +    //URL url = TestUtils.getResourceUrl( GatewayBasicFuncTest.class, "oozie-jobs-submit-request.xml" );
 +    //String name = url.toExternalForm();
 +    String name = TestUtils.getResourceName( this.getClass(), "oozie-jobs-submit-request.xml" );
 +    Template template = velocity.getTemplate( name );
 +    StringWriter sw = new StringWriter();
 +    template.merge( context, sw );
 +    String request = sw.toString();
 +    //System.out.println( "REQUEST=" + request );
 +
 +    /* Submit the job via Oozie. */
 +    String id = oozieSubmitJob( user, pass, request, 201 );
 +    //System.out.println( "ID=" + id );
 +
 +    String success = "SUCCEEDED";
 +    String status = "UNKNOWN";
 +    long delay = 1000 * 1; // 1 second.
 +    long limit = 1000 * 60; // 60 seconds.
 +    long start = System.currentTimeMillis();
 +    while( System.currentTimeMillis() <= start+limit ) {
 +      status = oozieQueryJobStatus( user, pass, id, 200 );
 +      //System.out.println( "Status=" + status );
 +      if( success.equalsIgnoreCase( status ) ) {
 +        break;
 +      } else {
 +        //System.out.println( "Status=" + status );
 +        Thread.sleep( delay );
 +      }
 +    }
 +    //System.out.println( "Status is " + status + " after " + ((System.currentTimeMillis()-start)/1000) + " seconds." );
 +    MatcherAssert.assertThat( status, is( success ) );
 +
 +    if( CLEANUP_TEST ) {
 +      // Cleanup anything that might have been leftover because the test failed previously.
 +      deleteFile( user, pass, root, "true", HttpStatus.SC_OK );
 +    }
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testBasicHiveJDBCUseCase() throws IOException {
 +    LOG_ENTER();
 +    String username = "hive";
 +    String password = "hive-password";
 +
 +    // This use case emulates simple JDBC scenario which consists of following steps:
 +    // -open connection;
 +    // -configure Hive using 'execute' statements (this also includes execution of 'close operation' requests internally);
 +    // -execution of create table command;
 +    // -execution of select from table command;
 +    // Data insertion is omitted because it causes a lot of additional command during insertion/querying.
 +    // All binary data was intercepted during real scenario and stored into files as array of bytes.
 +
 +    // open session
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/open-session-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/open-session-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    Response response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .body( driver.getResourceBytes( "hive/open-session-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .then()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/open-session-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/open-session-result.bin" ) ) );
 +
 +    driver.assertComplete();
 +
 +    // execute 'set hive.fetch.output.serde=...' (is called internally be JDBC driver)
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/execute-set-fetch-output-serde-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/execute-set-fetch-output-serde-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .body( driver.getResourceBytes( "hive/execute-set-fetch-output-serde-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .then()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/execute-set-fetch-output-serde-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/execute-set-fetch-output-serde-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // close operation for execute 'set hive.fetch.output.serde=...'
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/close-operation-1-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/close-operation-1-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .body( driver.getResourceBytes( "hive/close-operation-1-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .then()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/close-operation-1-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/close-operation-1-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // execute 'set hive.server2.http.path=...' (is called internally be JDBC driver)
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/execute-set-server2-http-path-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/execute-set-server2-http-path-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .body( driver.getResourceBytes( "hive/execute-set-server2-http-path-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .then()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/execute-set-server2-http-path-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/execute-set-server2-http-path-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // close operation for execute 'set hive.server2.http.path=...'
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/close-operation-2-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/close-operation-2-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .body( driver.getResourceBytes( "hive/close-operation-2-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .then()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/close-operation-2-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/close-operation-2-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // execute 'set hive.server2.servermode=...' (is called internally be JDBC driver)
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/execute-set-server2-servermode-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/execute-set-server2-servermode-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .body( driver.getResourceBytes( "hive/execute-set-server2-servermode-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .then()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/execute-set-server2-servermode-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/execute-set-server2-servermode-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // close operation for execute 'set hive.server2.servermode=...'
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/close-operation-3-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/close-operation-3-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .body( driver.getResourceBytes( "hive/close-operation-3-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .then()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/close-operation-3-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/close-operation-3-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // execute 'set hive.security.authorization.enabled=...'
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/execute-set-security-authorization-enabled-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/execute-set-security-authorization-enabled-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .body( driver.getResourceBytes( "hive/execute-set-security-authorization-enabled-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .then()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/execute-set-security-authorization-enabled-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/execute-set-security-authorization-enabled-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // close operation for execute 'set hive.security.authorization.enabled=...'
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/close-operation-4-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/close-operation-4-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .body( driver.getResourceBytes( "hive/close-operation-4-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .then()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/close-operation-4-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/close-operation-4-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // execute 'create table...'
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/execute-create-table-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/execute-create-table-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .body( driver.getResourceBytes( "hive/execute-create-table-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .then()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/execute-create-table-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/execute-create-table-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // close operation for execute 'create table...'
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/close-operation-5-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/close-operation-5-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .body( driver.getResourceBytes( "hive/close-operation-5-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .then()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/close-operation-5-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/close-operation-5-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // execute 'select * from...'
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/execute-select-from-table-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/execute-select-from-table-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .body( driver.getResourceBytes( "hive/execute-select-from-table-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .then()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/execute-select-from-table-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/execute-select-from-table-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // execute 'GetResultSetMetadata' (is called internally be JDBC driver)
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/get-result-set-metadata-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/get-result-set-metadata-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .body( driver.getResourceBytes( "hive/get-result-set-metadata-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .then()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/get-result-set-metadata-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/get-result-set-metadata-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // execute 'FetchResults' (is called internally be JDBC driver)
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/fetch-results-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/fetch-results-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .body( driver.getResourceBytes( "hive/fetch-results-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .then()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/fetch-results-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/fetch-results-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // close operation for execute 'select * from...'
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/close-operation-6-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/close-operation-6-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .body( driver.getResourceBytes( "hive/close-operation-6-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .then()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/close-operation-6-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/close-operation-6-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // close session
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/close-session-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/close-session-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .body( driver.getResourceBytes( "hive/close-session-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .then()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/close-session-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/close-session-result.bin" ) ) );
 +    driver.assertComplete();
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testHBaseGetTableList() throws IOException {
 +    LOG_ENTER();
 +    String username = "hbase";
 +    String password = "hbase-password";
 +    String resourceName = "hbase/table-list";
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "GET" )
 +    .pathInfo( "/" )
 +    .header( "Accept", ContentType.XML.toString() )
 +    .respond()
 +    .status( HttpStatus.SC_OK )
 +    .content( driver.getResourceBytes( resourceName + ".xml" ) )
 +    .contentType( ContentType.XML.toString() );
 +
 +    Response response = given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .header( "Accept", ContentType.XML.toString() )
 +    .then()
 +    .statusCode( HttpStatus.SC_OK )
 +    .contentType( ContentType.XML )
 +    .when().get( driver.getUrl( "WEBHBASE" ) );
 +
 +    MatcherAssert
 +        .assertThat(
 +            the( response.getBody().asString() ),
 +            isEquivalentTo( the( driver.getResourceString( resourceName + ".xml", UTF8 ) ) ) );
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "GET" )
 +    .pathInfo( "/" )
 +    .header( "Accept", ContentType.JSON.toString() )
 +    .respond()
 +    .status( HttpStatus.SC_OK )
 +    .content( driver.getResourceBytes( resourceName + ".json" ) )
 +    .contentType( ContentType.JSON.toString() );
 +
 +    response = given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .header( "Accept", ContentType.JSON.toString() )
 +    .then()
 +    .statusCode( HttpStatus.SC_OK )
 +    .contentType( ContentType.JSON )
 +    .when().get( driver.getUrl( "WEBHBASE" ) );
 +
 +    MatcherAssert
 +    .assertThat( response.getBody().asString(), sameJSONAs( driver.getResourceString( resourceName + ".json", UTF8 ) ) );
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "GET" )
 +    .pathInfo( "/" )
 +    .header( "Accept", "application/x-protobuf" )
 +    .respond()
 +    .status( HttpStatus.SC_OK )
 +    .content( driver.getResourceString( resourceName + ".protobuf", UTF8 ), UTF8 )
 +    .contentType( "application/x-protobuf" );
 +
 +    given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .header( "Accept", "application/x-protobuf" )
 +    .then()
 +    .statusCode( HttpStatus.SC_OK )
 +    .contentType( "application/x-protobuf" )
 +    .body( is( driver.getResourceString( resourceName + ".protobuf", UTF8 ) ) )
 +    .when().get( driver.getUrl( "WEBHBASE" ) );
 +    driver.assertComplete();
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testHBaseCreateTableAndVerifySchema() throws IOException {
 +    LOG_ENTER();
 +    String username = "hbase";
 +    String password = "hbase-password";
 +    String resourceName = "hbase/table-schema";
 +    String path = "/table/schema";
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "PUT" )
 +    .pathInfo( path )
 +    .respond()
 +    .status( HttpStatus.SC_CREATED )
 +    .content( driver.getResourceBytes( resourceName + ".xml" ) )
 +    .contentType( ContentType.XML.toString() )
 +    .header( "Location", driver.getRealUrl( "WEBHBASE" ) + path  );
 +
 +    given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .then()
 +    .statusCode( HttpStatus.SC_CREATED )
 +    .contentType( ContentType.XML )
 +    .header( "Location", startsWith( driver.getUrl( "WEBHBASE" ) + path ) )
 +    .when().put(driver.getUrl("WEBHBASE") + path);
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "PUT" )
 +    .pathInfo( path )
 +    .respond()
 +    .status(HttpStatus.SC_CREATED)
 +    .content(driver.getResourceBytes(resourceName + ".json"))
 +    .contentType(ContentType.JSON.toString())
 +    .header("Location", driver.getRealUrl("WEBHBASE") + path);
 +
 +    given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .then()
 +    .statusCode( HttpStatus.SC_CREATED )
 +    .contentType( ContentType.JSON )
 +    .header( "Location", startsWith( driver.getUrl( "WEBHBASE" ) + path ) )
 +    .when().put( driver.getUrl( "WEBHBASE" ) + path );
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "PUT" )
 +    .pathInfo( path )
 +    .respond()
 +    .status( HttpStatus.SC_CREATED )
 +    .content( driver.getResourceBytes( resourceName + ".protobuf" ) )
 +    .contentType( "application/x-protobuf" )
 +    .header("Location", driver.getRealUrl("WEBHBASE") + path);
 +
 +    given()
 +    .auth().preemptive().basic(username, password)
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .then()
 +    .statusCode(HttpStatus.SC_CREATED)
 +    .contentType("application/x-protobuf")
 +    .header("Location", startsWith(driver.getUrl("WEBHBASE") + path))
 +    .when().put(driver.getUrl("WEBHBASE") + path);
 +    driver.assertComplete();
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testHBaseGetTableSchema() throws IOException {
 +    LOG_ENTER();
 +    String username = "hbase";
 +    String password = "hbase-password";
 +    String resourceName = "hbase/table-metadata";
 +    String path = "/table/schema";
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "GET" )
 +    .pathInfo( path )
 +    .header("Accept", ContentType.XML.toString())
 +    .respond()
 +    .status(HttpStatus.SC_OK)
 +    .content(driver.getResourceBytes(resourceName + ".xml"))
 +    .contentType(ContentType.XML.toString());
 +
 +    Response response = given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .header( "Accept", ContentType.XML.toString() )
 +    .then()
 +    .statusCode( HttpStatus.SC_OK )
 +    .contentType( ContentType.XML )
 +    .when().get( driver.getUrl( "WEBHBASE" ) + path );
 +
 +    MatcherAssert
 +        .assertThat(
 +            the(response.getBody().asString()),
 +            isEquivalentTo(the(driver.getResourceString(resourceName + ".xml", UTF8))));
 +    driver.assertComplete();
 +
 +    driver.getMock("WEBHBASE")
 +    .expect()
 +    .method("GET")
 +    .pathInfo(path)
 +    .header("Accept", ContentType.JSON.toString())
 +    .respond()
 +    .status(HttpStatus.SC_OK)
 +    .content(driver.getResourceBytes(resourceName + ".json"))
 +    .contentType(ContentType.JSON.toString());
 +
 +    response = given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .header( "Accept", ContentType.JSON.toString() )
 +    .then()
 +    .statusCode( HttpStatus.SC_OK )
 +    .contentType( ContentType.JSON )
 +    .when().get( driver.getUrl( "WEBHBASE" ) + path );
 +
 +    MatcherAssert
 +    .assertThat(response.getBody().asString(), sameJSONAs(driver.getResourceString(resourceName + ".json", UTF8)));
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "GET" )
 +    .pathInfo( path )
 +    .header( "Accept", "application/x-protobuf" )
 +    .respond()
 +    .status( HttpStatus.SC_OK )
 +    .content( driver.getResourceBytes( resourceName + ".protobuf" ) )
 +    .contentType("application/x-protobuf");
 +
 +    response = given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .header( "Accept", "application/x-protobuf" )
 +    .then()
 +    .statusCode( HttpStatus.SC_OK )
 +    //.content( is( driver.getResourceBytes( resourceName + ".protobuf" ) ) )
 +    .contentType( "application/x-protobuf" )
 +    .when().get( driver.getUrl( "WEBHBASE" ) + path );
 +    // RestAssured seems to be screwing up the binary comparison so do it explicitly.
 +    assertThat( driver.getResourceBytes( resourceName + ".protobuf" ), is( response.body().asByteArray() ) );
 +    driver.assertComplete();
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testHBaseInsertDataIntoTable() throws IOException {
 +    LOG_ENTER();
 +    String username = "hbase";
 +    String password = "hbase-password";
 +
 +    String resourceName = "hbase/table-data";
 +    String singleRowPath = "/table/testrow";
 +    String multipleRowPath = "/table/false-row-key";
 +
 +    //PUT request
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "PUT" )
 +    .pathInfo( multipleRowPath )
 +    //.header( "Content-Type", ContentType.XML.toString() )
 +    .content( driver.getResourceBytes( resourceName + ".xml" ) )
 +    .contentType( ContentType.XML.toString() )
 +    .respond()
 +    .status(HttpStatus.SC_OK);
 +
 +    given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    //.header( "Content-Type", ContentType.XML.toString() )
 +    .body( driver.getResourceBytes( resourceName + ".xml" ) )
 +    .contentType( ContentType.XML.toString() )
 +    .then()
 +    .statusCode( HttpStatus.SC_OK )
 +    .when().put(driver.getUrl("WEBHBASE") + multipleRowPath);
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "PUT" )
 +    .pathInfo( singleRowPath )
 +    //.header( "Content-Type", ContentType.JSON.toString() )
 +    .contentType( ContentType.JSON.toString() )
 +    .respond()
 +    .status( HttpStatus.SC_OK );
 +
 +    given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    //.header( "Content-Type", ContentType.JSON.toString() )
 +    .body( driver.getResourceBytes( resourceName + ".json" ) )
 +    .contentType( ContentType.JSON.toString() )
 +    .then()
 +    .statusCode( HttpStatus.SC_OK )
 +    .when().put(driver.getUrl("WEBHBASE") + singleRowPath);
 +    driver.assertComplete();
 +
 +    driver.getMock("WEBHBASE")
 +    .expect()
 +    .method("PUT")
 +    .pathInfo(multipleRowPath)
 +    //.header( "Content-Type", "application/x-protobuf" )
 +    .contentType("application/x-protobuf")
 +    .content(driver.getResourceBytes(resourceName + ".protobuf"))
 +    .respond()
 +    .status(HttpStatus.SC_OK);
 +
 +    given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    //.header( "Content-Type", "application/x-protobuf" )
 +    .body( driver.getResourceBytes( resourceName + ".protobuf" ) )
 +    .contentType( "application/x-protobuf" )
 +    .then()
 +    .statusCode( HttpStatus.SC_OK )
 +    .when().put( driver.getUrl( "WEBHBASE" ) + multipleRowPath );
 +    driver.assertComplete();
 +
 +    //POST request
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "POST" )
 +    .pathInfo( multipleRowPath )
 +    //.header( "Content-Type", ContentType.XML.toString() )
 +    .content( driver.getResourceBytes( resourceName + ".xml" ) )
 +    .contentType( ContentType.XML.toString() )
 +    .respond()
 +    .status( HttpStatus.SC_OK );
 +
 +    given()
 +      .auth().preemptive().basic( username, password )
 +      .header("X-XSRF-Header", "jksdhfkhdsf")
 +      //.header( "Content-Type", ContentType.XML.toString() )
 +      .body( driver.getResourceBytes( resourceName + ".xml" ) )
 +      .contentType( ContentType.XML.toString() )
 +      .then()
 +      .statusCode( HttpStatus.SC_OK )
 +      .when().post( driver.getUrl( "WEBHBASE" ) + multipleRowPath );
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "POST" )
 +    .pathInfo( singleRowPath )
 +    //.header( "Content-Type", ContentType.JSON.toString() )
 +    .contentType( ContentType.JSON.toString() )
 +    .respond()
 +    .status( HttpStatus.SC_OK );
 +
 +    given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    //.header( "Content-Type", ContentType.JSON.toString() )
 +    .body( driver.getResourceBytes( resourceName + ".json" ) )
 +    .contentType( ContentType.JSON.toString() )
 +    .then()
 +    .statusCode( HttpStatus.SC_OK )
 +    .when().post( driver.getUrl( "WEBHBASE" ) + singleRowPath );
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "POST" )
 +    .pathInfo( multipleRowPath )
 +    //.header( "Content-Type", "application/x-protobuf" )
 +    .content( driver.getResourceBytes( resourceName + ".protobuf" ) )
 +    .contentType( "application/x-protobuf" )
 +    .respond()
 +    .status( HttpStatus.SC_OK );
 +
 +    given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    //.header( "Content-Type", "application/x-protobuf" )
 +    .body( driver.getResourceBytes( resourceName + ".protobuf" ) )
 +    .contentType( "application/x-protobuf" )
 +    .then()
 +    .statusCode( HttpStatus.SC_OK )
 +    .when().post(driver.getUrl("WEBHBASE") + multipleRowPath);
 +    driver.assertComplete();
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testHBaseDeleteDataFromTable() {
 +    LOG_ENTER();
 +    String username = "hbase";
 +    String password = "hbase-password";
 +
 +    String tableId = "table";
 +    String rowId = "row";
 +    String familyId = "family";
 +    String columnId = "column";
 +
 +    driver.getMock("WEBHBASE")
 +    .expect()
 +    .from("testHBaseDeleteDataFromTable-1")
 +    .method("DELETE")
 +    .pathInfo("/" + tableId + "/" + rowId)
 +    .respond()
 +    .status(HttpStatus.SC_OK);
 +
 +    given()
 +    .auth().preemptive().basic(username, password)
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .then()
 +    .statusCode( HttpStatus.SC_OK )
 +    .when().delete(driver.getUrl("WEBHBASE") + "/" + tableId + "/" + rowId);
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .from("testHBaseDeleteDataFromTable-2")
 +    .method("DELETE")
 +    .pathInfo("/" + tableId + "/" + rowId + "/" + familyId)
 +    .respond()
 +    .status( HttpStatus.SC_OK );
 +
 +    given()
 +    .auth().preemptive().basic(username, password)
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .then()
 +    .statusCode( HttpStatus.SC_OK )
 +    .when().delete(driver.getUrl("WEBHBASE") + "/" + tableId + "/" + rowId + "/" + familyId);
 +    driver.assertComplete();
 +
 +    driver.getMock("WEBHBASE")
 +    .expect()
 +    .from("testHBaseDeleteDataFromTable-3")
 +    .method("DELETE")
 +    .pathInfo("/" + tableId + "/" + rowId + "/" + familyId + ":" + columnId)
 +    .respond()
 +    .status(HttpStatus.SC_OK);
 +
 +    given()
 +    .auth().preemptive().basic(username, password)
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .then()
 +    .statusCode( HttpStatus.SC_OK )
 +    .when().delete(driver.getUrl("WEBHBASE") + "/" + tableId + "/" + rowId + "/" + familyId + ":" + columnId);
 +    driver.assertComplete();
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testHBaseQueryTableData() throws IOException {
 +    LOG_ENTER();
 +    String username = "hbase";
 +    String password = "hbase-password";
 +
 +    String resourceName = "hbase/table-data";
 +
 +    String allRowsPath = "/table/*";
 +    String rowsStartsWithPath = "/table/row*";
 +    String rowsWithKeyPath = "/table/row";
 +    String rowsWithKeyAndColumnPath = "/table/row/family:col";
 +
 +    driver.getMock("WEBHBASE")
 +    .expect()
 +    .method("GET")
 +    .pathInfo(allRowsPath)
 +    .header("Accept", ContentType.XML.toString())
 +    .respond()
 +    .status(HttpStatus.SC_OK)
 +    .content(driver.getResourceBytes(resourceName + ".xml"))
 +    .contentType(ContentType.XML.toString());
 +
 +    Response response = given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .header( "Accept", ContentType.XML.toString() )
 +    .then()
 +    .statusCode( HttpStatus.SC_OK )
 +    .contentType( ContentType.XML )
 +    .when().get( driver.getUrl( "WEBHBASE" ) + allRowsPath );
 +
 +    MatcherAssert
 +    .assertThat(
 +        the(response.getBody().asString()),
 +        isEquivalentTo(the(driver.getResourceString(resourceName + ".xml", UTF8))));
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "GET" )
 +    .pathInfo( rowsStartsWithPath )
 +    .header( "Accept", ContentType.XML.toString() )
 +    .respond()
 +    .status( HttpStatus.SC_OK )
 +    .content( driver.getResourceBytes( resourceName + ".xml" ) )
 +    .contentType(ContentType.XML.toString());
 +
 +    response = given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .header( "Accept", ContentType.XML.toString() )
 +    .then()
 +    .statusCode( HttpStatus.SC_OK )
 +    .contentType( ContentType.XML )
 +    .when().get( driver.getUrl( "WEBHBASE" ) + rowsStartsWithPath );
 +
 +    MatcherAssert
 +    .assertThat(
 +        the(response.getBody().asString()),
 +        isEquivalentTo(the(driver.getResourceString(resourceName + ".xml", UTF8))));
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "GET" )
 +    .pathInfo( rowsWithKeyPath )
 +    .header( "Accept", ContentType.JSON.toString() )
 +    .respond()
 +    .status( HttpStatus.SC_OK )
 +    .content( driver.getResourceBytes( resourceName + ".json" ) )
 +    .contentType( ContentType.JSON.toString() );
 +
 +    response = given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .header( "Accept", ContentType.JSON.toString() )
 +    .then()
 +    .statusCode( HttpStatus.SC_OK )
 +    .contentType( ContentType.JSON )
 +    .when().get( driver.getUrl( "WEBHBASE" ) + rowsWithKeyPath );
 +
 +    MatcherAssert
 +    .assertThat( response.getBody().asString(), sameJSONAs( driver.getResourceString( resourceName + ".json", UTF8 ) ) );
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "GET" )
 +    .pathInfo( rowsWithKeyAndColumnPath )
 +    .header( "Accept", ContentType.JSON.toString() )
 +    .respond()
 +    .status( HttpStatus.SC_OK )
 +    .content( driver.getResourceBytes( resourceName + ".json" ) )
 +    .contentType( ContentType.JSON.toString() );
 +
 +    response = given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .header( "Accept", ContentType.JSON.toString() )
 +    .then()
 +    .statusCode( HttpStatus.SC_OK )
 +    .contentType( ContentType.JSON )
 +    .when().get( driver.getUrl( "WEBHBASE" ) + rowsWithKeyAndColumnPath );
 +
 +    MatcherAssert
 +    .assertThat( response.getBody().asString(), sameJSONAs( driver.getResourceString( resourceName + ".json", UTF8 ) ) );
 +    driver.assertComplete();
 +    LOG_EXIT();
 

<TRUNCATED>
http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/pom.xml
----------------------------------------------------------------------


[16/49] knox git commit: KNOX-1128 - Readonly protection for generated topologies in Knox Admin UI

Posted by mo...@apache.org.
KNOX-1128 - Readonly protection for generated topologies in Knox Admin UI


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/d835af99
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/d835af99
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/d835af99

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: d835af99ba4c0c5fdfeddb6d8ccd03f2c6616333
Parents: 2ff3e75
Author: Sandeep More <mo...@apache.org>
Authored: Thu Nov 30 15:29:57 2017 -0500
Committer: Sandeep More <mo...@apache.org>
Committed: Thu Nov 30 15:29:57 2017 -0500

----------------------------------------------------------------------
 .gitignore                                      |    1 +
 gateway-admin-ui/README.md                      |   34 +-
 gateway-admin-ui/angular-cli.json               |   22 +-
 gateway-admin-ui/package.json                   |   60 +-
 gateway-admin-ui/src/app/app.module.ts          |    6 +-
 .../src/app/topology-detail.component.ts        |   55 +-
 gateway-admin-ui/src/app/topology.component.ts  |    2 +-
 .../src/assets/knox-logo-transparent.gif        |  Bin 0 -> 19703 bytes
 gateway-admin-ui/src/index.html                 |    4 +-
 gateway-admin-ui/src/tsconfig.json              |    4 +-
 .../app/assets/knox-logo-transparent.gif        |  Bin 0 -> 19703 bytes
 .../applications/admin-ui/app/index.html        |   62 +-
 .../app/inline.b47d11937c275f76ce02.bundle.js   |    1 +
 .../app/inline.d41d8cd98f00b204e980.bundle.js   |    2 -
 .../app/inline.d41d8cd98f00b204e980.bundle.map  |    1 -
 .../app/main.806d67070af66e18c2fc.bundle.js     |    2 -
 .../app/main.806d67070af66e18c2fc.bundle.js.gz  |  Bin 3657 -> 0 bytes
 .../app/main.806d67070af66e18c2fc.bundle.map    |    1 -
 .../app/main.a69408978854e3a77fb2.bundle.js     |    1 +
 .../app/scripts.2c89ed78f648df44c10f.bundle.js  |   12 +
 .../app/styles.b2328beb0372c051d06d.bundle.js   |    2 -
 .../app/styles.b2328beb0372c051d06d.bundle.map  |    1 -
 .../app/styles.d41d8cd98f00b204e980.bundle.css  |    0
 ....d41d8cd98f00b204e9800998ecf8427e.bundle.css |    2 -
 .../app/vendor.48771018d3da89d3269f.bundle.js   | 2035 ------------------
 .../vendor.48771018d3da89d3269f.bundle.js.gz    |  Bin 459997 -> 0 bytes
 .../app/vendor.48771018d3da89d3269f.bundle.map  |    1 -
 27 files changed, 127 insertions(+), 2184 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/.gitignore
----------------------------------------------------------------------
diff --git a/.gitignore b/.gitignore
index 61d27c8..01310ca 100644
--- a/.gitignore
+++ b/.gitignore
@@ -64,6 +64,7 @@ e2e/*.js
 e2e/*.map
 gateway-admin-ui/src/app/**/*.js
 gateway-admin-ui/src/app/**/*.map
+gateway-admin-ui/package-lock.json
 
 #System Files
 Thumbs.db

http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-admin-ui/README.md
----------------------------------------------------------------------
diff --git a/gateway-admin-ui/README.md b/gateway-admin-ui/README.md
index 40a5d3d..c5e8a4e 100644
--- a/gateway-admin-ui/README.md
+++ b/gateway-admin-ui/README.md
@@ -1,23 +1,7 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
- 
+
 # NgKnoxUi
 
-This project was generated with [angular-cli](https://github.com/angular/angular-cli) version 1.0.0-beta.22-1.
+This project was generated with [Angular CLI](https://cli.angular.io/) version 1.5.3.
 
 ## Development server
 Run `ng serve` for a dev server. Navigate to `http://localhost:4200/`. The app will automatically reload if you change any of the source files.
@@ -28,8 +12,22 @@ Run `ng generate component component-name` to generate a new component. You can
 
 ## Build
 
+### Using ant
+Run `ant build-admin-ui`
+
+### For direct Angular builds:
 Run `ng build` to build the project. The build artifacts will be stored in the `dist/` directory. Use the `-prod` flag for a production build.
 
+## Install (Debug)
+Install the Admin UI build into the test gateway home installation
+
+Run `ant install-test-admin-ui`
+
+## Checkin / Release
+Copy the build artifacts of the admin UI to the applications module for check-in
+
+Run `ant promote-admin-ui`
+
 ## Running unit tests
 
 Run `ng test` to execute the unit tests via [Karma](https://karma-runner.github.io).

http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-admin-ui/angular-cli.json
----------------------------------------------------------------------
diff --git a/gateway-admin-ui/angular-cli.json b/gateway-admin-ui/angular-cli.json
index 832c4f0..b5caba2 100644
--- a/gateway-admin-ui/angular-cli.json
+++ b/gateway-admin-ui/angular-cli.json
@@ -20,9 +20,12 @@
       "styles": [
         "styles.css"
       ],
-      "scripts": [],
+      "scripts": [
+		  "../node_modules/ace-builds/src-min/ace.js",
+		  "../node_modules/ace-builds/src-min/theme-monokai.js"
+      ],
+	    "environmentSource": "environments/environment.ts",
       "environments": {
-        "source": "environments/environment.ts",
         "dev": "environments/environment.ts",
         "prod": "environments/environment.prod.ts"
       }
@@ -42,18 +45,9 @@
   },
   "defaults": {
     "styleExt": "css",
-    "prefixInterfaces": false,
-    "inline": {
-      "style": false,
-      "template": false
-    },
-    "spec": {
-      "class": false,
-      "component": true,
-      "directive": true,
-      "module": false,
-      "pipe": true,
-      "service": true
+    "component": {
+      "inlineTemplate": false,
+      "spec": true
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-admin-ui/package.json
----------------------------------------------------------------------
diff --git a/gateway-admin-ui/package.json b/gateway-admin-ui/package.json
index 0d7fdd7..4218fd5 100644
--- a/gateway-admin-ui/package.json
+++ b/gateway-admin-ui/package.json
@@ -2,9 +2,9 @@
   "name": "ng-knox-ui",
   "version": "0.0.0",
   "license": "MIT",
-  "angular-cli": {},
   "scripts": {
     "start": "ng serve",
+    "build": "ng build",
     "lint": "tslint \"src/**/*.ts\"",
     "test": "ng test",
     "pree2e": "webdriver-manager update",
@@ -12,38 +12,42 @@
   },
   "private": true,
   "dependencies": {
-    "@angular/common": "2.2.3",
-    "@angular/compiler": "2.2.3",
-    "@angular/core": "2.2.3",
-    "@angular/forms": "2.2.3",
-    "@angular/http": "2.2.3",
-    "@angular/platform-browser": "2.2.3",
-    "@angular/platform-browser-dynamic": "2.2.3",
-    "@angular/router": "3.2.3",
+    "@angular/common": "^5.0.0",
+    "@angular/compiler": "^5.0.0",
+    "@angular/core": "^5.0.0",
+    "@angular/forms": "^5.0.0",
+    "@angular/http": "^5.0.0",
+    "@angular/platform-browser": "^5.0.0",
+    "@angular/platform-browser-dynamic": "^5.0.0",
+    "@angular/router": "^5.0.0",
     "core-js": "^2.4.1",
+    "rxjs": "^5.5.2",
+    "zone.js": "^0.8.14",
+    "ng2-ace-editor": "0.3.3",
     "ng2-bs3-modal": "^0.10.4",
-    "ng2-ace-editor": "0.1.3",
-    "rxjs": "5.0.0-beta.12",
-    "ts-helpers": "^1.1.1",
-    "zone.js": "^0.6.23"
+    "ts-helpers": "^1.1.1"
   },
   "devDependencies": {
-    "@angular/compiler-cli": "2.2.3",
-    "@types/jasmine": "2.5.38",
-    "@types/node": "^6.0.42",
-    "angular-cli": "1.0.0-beta.22-1",
-    "codelyzer": "~2.0.0-beta.1",
-    "jasmine-core": "2.5.2",
-    "jasmine-spec-reporter": "2.5.0",
-    "karma": "1.2.0",
-    "karma-chrome-launcher": "^2.0.0",
-    "karma-cli": "^1.0.1",
-    "karma-jasmine": "^1.0.2",
+    "@angular/cli": "1.5.3",
+    "@angular/compiler-cli": "^5.0.0",
+    "@angular/language-service": "^5.0.0",
+    "@types/jasmine": "~2.5.53",
+    "@types/jasminewd2": "~2.0.2",
+    "@types/node": "~6.0.60",
+    "codelyzer": "~3.2.0",
+    "jasmine-core": "~2.6.2",
+    "jasmine-spec-reporter": "~4.1.0",
+    "karma": "~1.7.0",
+    "karma-chrome-launcher": "~2.1.1",
+    "karma-cli": "~1.0.1",
+    "karma-coverage-istanbul-reporter": "^1.2.1",
+    "karma-jasmine": "~1.1.0",
+    "karma-jasmine-html-reporter": "^0.2.2",
     "karma-remap-istanbul": "^0.2.1",
-    "protractor": "4.0.9",
-    "ts-node": "1.2.1",
-    "tslint": "^4.0.2",
-    "typescript": "~2.0.3",
+    "protractor": "~5.1.2",
+    "ts-node": "~3.2.0",
+    "tslint": "~5.7.0",
+    "typescript": "~2.4.2",
     "webdriver-manager": "10.2.5"
   }
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-admin-ui/src/app/app.module.ts
----------------------------------------------------------------------
diff --git a/gateway-admin-ui/src/app/app.module.ts b/gateway-admin-ui/src/app/app.module.ts
index afa8f4b..a081ead 100644
--- a/gateway-admin-ui/src/app/app.module.ts
+++ b/gateway-admin-ui/src/app/app.module.ts
@@ -30,20 +30,20 @@ import {JsonPrettyPipe} from "./utils/json-pretty.pipe";
 import { TabComponent } from './utils/tab.component';
 import { TabsComponent } from './utils/tabs.component';
 
-import { AceEditorDirective } from 'ng2-ace-editor'; 
+import { AceEditorModule } from 'ng2-ace-editor';
 import { Ng2Bs3ModalModule } from 'ng2-bs3-modal/ng2-bs3-modal'
 
 @NgModule({
   imports: [ BrowserModule,
     HttpModule,
     FormsModule,
-    Ng2Bs3ModalModule
+    Ng2Bs3ModalModule,
+    AceEditorModule
     ],
   declarations: [ AppComponent,
     TopologyComponent,
       TopologyDetailComponent,
     GatewayVersionComponent,
-    AceEditorDirective,
     XmlPipe,
     JsonPrettyPipe,
     TabsComponent,

http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-admin-ui/src/app/topology-detail.component.ts
----------------------------------------------------------------------
diff --git a/gateway-admin-ui/src/app/topology-detail.component.ts b/gateway-admin-ui/src/app/topology-detail.component.ts
index 630788b..e93aaaa 100644
--- a/gateway-admin-ui/src/app/topology-detail.component.ts
+++ b/gateway-admin-ui/src/app/topology-detail.component.ts
@@ -18,28 +18,35 @@ import { Component, OnInit, ViewChild, ViewEncapsulation} from '@angular/core';
 import { Topology } from './topology';
 import {TopologyService} from "./topology.service";
 import { ModalComponent } from 'ng2-bs3-modal/ng2-bs3-modal';
+import { ViewChildren } from '@angular/core/src/metadata/di';
+
+import 'brace/theme/monokai';
+import 'brace/mode/xml';
 
 @Component({
     selector: 'topology-detail',
     template: `
      <div class="panel panel-default">
         <div class="panel-heading">
-            <h4 class="panel-title">{{title}} <span class="label label-default pull-right">{{titleSuffix}}</span></h4>
+            <h4 class="panel-title">{{title}} <span *ngIf="showEditOptions == false" style="padding-left: 15%;" class="text-danger text-center" > Ready Only (generated file) </span> <span class="label label-default pull-right">{{titleSuffix}}</span></h4>
          </div>
      <div *ngIf="topologyContent" class="panel-body">
-      <div ace-editor
-       [readOnly]="false" [text]="topologyContent | xml" [mode]="'xml'" [options]="options" 
-        [theme]="'monokai'"
-         style="min-height: 300px; width:100%; overflow: auto;" (textChanged)="onChange($event)">
-      </div>
-       <div class="panel-footer">
-        <button (click)="duplicateModal.open('sm')" class="btn btn-default btn-sm" type="submit">
+      <ace-editor
+        [(text)]="topologyContent" 
+        [mode]="'xml'" 
+        [options]="options" 
+        [theme]="theme"
+        style="min-height: 430px; width:100%; overflow: auto;" 
+        (textChanged)="onChange($event)">
+      </ace-editor>
+       <div *ngIf="showEditOptions" class="panel-footer">
+        <button id="duplicateTopology" (click)="duplicateModal.open('sm')" class="btn btn-default btn-sm" type="submit" >
             <span class="glyphicon glyphicon-duplicate" aria-hidden="true"></span>
         </button>
-        <button (click)="deleteConfirmModal.open('sm')" class="btn btn-default btn-sm" type="submit">
+        <button id="deleteTopology" (click)="deleteConfirmModal.open('sm')" class="btn btn-default btn-sm" type="submit" >
             <span class="glyphicon glyphicon-trash" aria-hidden="true"></span>
         </button>
-       <button (click)="saveTopology()" class="btn btn-default btn-sm pull-right" [disabled]="!changedTopology" type="submit">
+       <button id="saveTopology" (click)="saveTopology()" class="btn btn-default btn-sm pull-right" [disabled]="!changedTopology" type="submit" >
             <span class="glyphicon glyphicon-floppy-disk" aria-hidden="true"></span>
         </button>
        </div>
@@ -83,6 +90,9 @@ export class TopologyDetailComponent implements OnInit {
     topologyContent: string;
     changedTopology: string;
     newTopologyName: string;
+    readOnly: boolean;
+    showEditOptions:boolean = true;
+    theme: String = "monokai";
     options:any = {useWorker: false, printMargin: false};
 
     @ViewChild('duplicateModal')
@@ -91,6 +101,8 @@ export class TopologyDetailComponent implements OnInit {
     @ViewChild('deleteConfirmModal')
     deleteConfirmModal: ModalComponent;
 
+    @ViewChild('editor') editor;
+
     constructor(private topologyService : TopologyService) {
     }
 
@@ -130,10 +142,31 @@ export class TopologyDetailComponent implements OnInit {
         this.setTitle(topology.name);
         if (this.topology) {
             if (this.topology.href) {
-                this.topologyService.getTopology(this.topology.href).then( content => this.topologyContent = content);
+              this.topologyService.getTopology(this.topology.href).then( content => this.topologyContent = content).then(() => this.makeReadOnly(this.topologyContent, 'generated') );
             }
         }
     }
 
+    /*
+    * Parse the XML and depending on the  
+    * provided tag value make the editor read only
+    */
+    makeReadOnly(text, tag) {
+        var parser = new DOMParser();
+        var parsed = parser.parseFromString(text,"text/xml");
+
+        var tagValue = parsed.getElementsByTagName(tag);
+        var result = tagValue[0].childNodes[0].nodeValue;
+        
+        if(result === 'true') {
+            this.showEditOptions = false;
+            this.options = {readOnly: true, useWorker: false, printMargin: false, highlightActiveLine: false, highlightGutterLine: false}; 
+        } else {
+            this.showEditOptions = true;
+            this.options = {readOnly: false, useWorker: false, printMargin: false}; 
+        }
+
+    }
+
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-admin-ui/src/app/topology.component.ts
----------------------------------------------------------------------
diff --git a/gateway-admin-ui/src/app/topology.component.ts b/gateway-admin-ui/src/app/topology.component.ts
index 3878425..d0d17a6 100644
--- a/gateway-admin-ui/src/app/topology.component.ts
+++ b/gateway-admin-ui/src/app/topology.component.ts
@@ -36,7 +36,7 @@ import { Subscription }   from 'rxjs/Subscription';
           [class.selected]="topology === selectedTopology"
         (click)="onSelect(topology)">
          <td>{{topology.name}}</td> 
-         <td>{{topology.timestamp | date:'yMMMdjms'}}</td> 
+         <td>{{topology.timestamp | date:'medium'}}</td> 
          </tr>
         </tbody>
         </table>

http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-admin-ui/src/assets/knox-logo-transparent.gif
----------------------------------------------------------------------
diff --git a/gateway-admin-ui/src/assets/knox-logo-transparent.gif b/gateway-admin-ui/src/assets/knox-logo-transparent.gif
new file mode 100644
index 0000000..7099659
Binary files /dev/null and b/gateway-admin-ui/src/assets/knox-logo-transparent.gif differ

http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-admin-ui/src/index.html
----------------------------------------------------------------------
diff --git a/gateway-admin-ui/src/index.html b/gateway-admin-ui/src/index.html
index ebef9b0..02bc3bd 100644
--- a/gateway-admin-ui/src/index.html
+++ b/gateway-admin-ui/src/index.html
@@ -47,7 +47,7 @@
               <span class="icon-bar"></span>
               <span class="icon-bar"></span>
             </button>
-            <a class="navbar-brand" href="#">Apache Knox Manager</a>
+            <a class="navbar-brand" href="#"> <img style="max-width:100px; margin-top: -9px;" src="assets/knox-logo-transparent.gif" alt="Apache Knox Manager"> </a>
           </div>
           <!--<div id="navbar" class="navbar-collapse collapse">
             <ul class="nav navbar-nav  navbar-right">
@@ -61,10 +61,12 @@
   <div class="container">
 
     <!-- Main component for a primary marketing message or call to action -->
+    <!--
     <div class="jumbotron">
       <h3>Topology Management</h3>
       <p>Resource Access Management</p>
     </div>
+    -->
   </div>
 
 

http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-admin-ui/src/tsconfig.json
----------------------------------------------------------------------
diff --git a/gateway-admin-ui/src/tsconfig.json b/gateway-admin-ui/src/tsconfig.json
index 1cf713a..c22aff4 100644
--- a/gateway-admin-ui/src/tsconfig.json
+++ b/gateway-admin-ui/src/tsconfig.json
@@ -1,10 +1,10 @@
 {
   "compilerOptions": {
-    "baseUrl": "",
+    "baseUrl": "src",
     "declaration": false,
     "emitDecoratorMetadata": true,
     "experimentalDecorators": true,
-    "lib": ["es6", "dom"],
+    "lib": ["es2017", "dom"],
     "mapRoot": "./",
     "module": "es6",
     "moduleResolution": "node",

http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-applications/src/main/resources/applications/admin-ui/app/assets/knox-logo-transparent.gif
----------------------------------------------------------------------
diff --git a/gateway-applications/src/main/resources/applications/admin-ui/app/assets/knox-logo-transparent.gif b/gateway-applications/src/main/resources/applications/admin-ui/app/assets/knox-logo-transparent.gif
new file mode 100644
index 0000000..7099659
Binary files /dev/null and b/gateway-applications/src/main/resources/applications/admin-ui/app/assets/knox-logo-transparent.gif differ

http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-applications/src/main/resources/applications/admin-ui/app/index.html
----------------------------------------------------------------------
diff --git a/gateway-applications/src/main/resources/applications/admin-ui/app/index.html b/gateway-applications/src/main/resources/applications/admin-ui/app/index.html
index 1274693..befe668 100644
--- a/gateway-applications/src/main/resources/applications/admin-ui/app/index.html
+++ b/gateway-applications/src/main/resources/applications/admin-ui/app/index.html
@@ -11,69 +11,13 @@
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License.
--->
-<!doctype html>
-<html>
-<head>
-  <meta charset="utf-8">
-  <title>Apache Knox Manager</title>
-  <meta name="viewport" content="width=device-width, initial-scale=1">
-  <link rel="icon" type="image/x-icon" href="favicon.ico">
-      <meta name="viewport" content="width=device-width, initial-scale=1">
-    <!-- Latest compiled and minified CSS -->
-    <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css" integrity="sha384-BVYiiSIFeK1dGmJRAkycuHAHRg32OmUcww7on3RYdg4Va+PmSTsz/K68vbdEjh4u" crossorigin="anonymous">
-
-    <!-- Optional theme -->
-    <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap-theme.min.css" integrity="sha384-rHyoN1iRsVXV4nD0JutlnGaslCJuC7uwjduW9SVrLvRYooPp2bWYgmgJQIXwl/Sp" crossorigin="anonymous">
-    <!-- Custom styles for this template -->
-    <link href="assets/sticky-footer.css" rel="stylesheet">
-
-    <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.12.4/jquery.min.js"></script>
-    <!-- Latest compiled and minified JavaScript -->
-    <script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js" integrity="sha384-Tc5IQib027qvyjSMfHjOMaLkfuWVxZxUPnCJA7l2mCWNIpG9mGCD8wGNIcPD7Txa" crossorigin="anonymous"></script>
-    <script src="assets/vkbeautify.js"></script>
-
-<link href="styles.d41d8cd98f00b204e9800998ecf8427e.bundle.css" rel="stylesheet"></head>
-<body>
-  <div class="navbar-wrapper">
-    <div class="container-fluid">
-
-      <nav class="navbar navbar-inverse navbar-static-top">
-        <div class="container-fluid">
-          <div class="navbar-header">
-            <button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar" aria-expanded="false" aria-controls="navbar">
-              <span class="sr-only">Toggle navigation</span>
-              <span class="icon-bar"></span>
-              <span class="icon-bar"></span>
-              <span class="icon-bar"></span>
-            </button>
-            <a class="navbar-brand" href="#">Apache Knox Manager</a>
-          </div>
-          <!--<div id="navbar" class="navbar-collapse collapse">
+--><!doctype html><html><head><meta charset="utf-8"><title>Apache Knox Manager</title><meta name="viewport" content="width=device-width,initial-scale=1"><link rel="icon" type="image/x-icon" href="favicon.ico"><meta name="viewport" content="width=device-width,initial-scale=1"><!-- Latest compiled and minified CSS --><link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css" integrity="sha384-BVYiiSIFeK1dGmJRAkycuHAHRg32OmUcww7on3RYdg4Va+PmSTsz/K68vbdEjh4u" crossorigin="anonymous"><!-- Optional theme --><link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap-theme.min.css" integrity="sha384-rHyoN1iRsVXV4nD0JutlnGaslCJuC7uwjduW9SVrLvRYooPp2bWYgmgJQIXwl/Sp" crossorigin="anonymous"><!-- Custom styles for this template --><link href="assets/sticky-footer.css" rel="stylesheet"><script src="https://ajax.googleapis.com/ajax/libs/jquery/1.12.4/jquery.min.js"></script><!-- Latest compiled and minified JavaScript --><sc
 ript src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js" integrity="sha384-Tc5IQib027qvyjSMfHjOMaLkfuWVxZxUPnCJA7l2mCWNIpG9mGCD8wGNIcPD7Txa" crossorigin="anonymous"></script><script src="assets/vkbeautify.js"></script><link href="styles.d41d8cd98f00b204e980.bundle.css" rel="stylesheet"/></head><body><div class="navbar-wrapper"><div class="container-fluid"><nav class="navbar navbar-inverse navbar-static-top"><div class="container-fluid"><div class="navbar-header"><button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar" aria-expanded="false" aria-controls="navbar"><span class="sr-only">Toggle navigation</span> <span class="icon-bar"></span> <span class="icon-bar"></span> <span class="icon-bar"></span></button> <a class="navbar-brand" href="#"><img style="max-width:100px; margin-top: -9px;" src="assets/knox-logo-transparent.gif" alt="Apache Knox Manager"></a></div><!--<div id="navbar" class="navbar-collapse collapse">
             <ul class="nav navbar-nav  navbar-right">
               <li><a href="./">Logout</a></li>
             </ul>
-          </div>-->
-        </div>
-      </nav>
-
-  </div>
-  <div class="container">
-
-    <!-- Main component for a primary marketing message or call to action -->
+          </div>--></div></nav></div><div class="container"><!-- Main component for a primary marketing message or call to action --><!--
     <div class="jumbotron">
       <h3>Topology Management</h3>
       <p>Resource Access Management</p>
     </div>
-  </div>
-
-
-  <!-- Content -->
-  <topology-management></topology-management>
-
-  <footer class="footer">
-    <div>Knox Manager Version 0.0.1</div>
-    <gateway-version></gateway-version>
-  </footer>
-<script type="text/javascript" src="inline.d41d8cd98f00b204e980.bundle.js"></script><script type="text/javascript" src="styles.b2328beb0372c051d06d.bundle.js"></script><script type="text/javascript" src="vendor.48771018d3da89d3269f.bundle.js"></script><script type="text/javascript" src="main.806d67070af66e18c2fc.bundle.js"></script></body>
-</html>
+    --></div><!-- Content --><topology-management></topology-management><footer class="footer"><div>Knox Manager Version 0.0.1</div><gateway-version></gateway-version></footer><script type="text/javascript" src="inline.b47d11937c275f76ce02.bundle.js"></script><script type="text/javascript" src="scripts.2c89ed78f648df44c10f.bundle.js"></script><script type="text/javascript" src="main.a69408978854e3a77fb2.bundle.js"></script></div></body></html>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-applications/src/main/resources/applications/admin-ui/app/inline.b47d11937c275f76ce02.bundle.js
----------------------------------------------------------------------
diff --git a/gateway-applications/src/main/resources/applications/admin-ui/app/inline.b47d11937c275f76ce02.bundle.js b/gateway-applications/src/main/resources/applications/admin-ui/app/inline.b47d11937c275f76ce02.bundle.js
new file mode 100644
index 0000000..830c105
--- /dev/null
+++ b/gateway-applications/src/main/resources/applications/admin-ui/app/inline.b47d11937c275f76ce02.bundle.js
@@ -0,0 +1 @@
+!function(e){function n(r){if(t[r])return t[r].exports;var o=t[r]={i:r,l:!1,exports:{}};return e[r].call(o.exports,o,o.exports,n),o.l=!0,o.exports}var r=window.webpackJsonp;window.webpackJsonp=function(t,a,c){for(var u,i,f,l=0,s=[];l<t.length;l++)o[i=t[l]]&&s.push(o[i][0]),o[i]=0;for(u in a)Object.prototype.hasOwnProperty.call(a,u)&&(e[u]=a[u]);for(r&&r(t,a,c);s.length;)s.shift()();if(c)for(l=0;l<c.length;l++)f=n(n.s=c[l]);return f};var t={},o={2:0};n.e=function(e){function r(){u.onerror=u.onload=null,clearTimeout(i);var n=o[e];0!==n&&(n&&n[1](new Error("Loading chunk "+e+" failed.")),o[e]=void 0)}var t=o[e];if(0===t)return new Promise(function(e){e()});if(t)return t[2];var a=new Promise(function(n,r){t=o[e]=[n,r]});t[2]=a;var c=document.getElementsByTagName("head")[0],u=document.createElement("script");u.type="text/javascript",u.charset="utf-8",u.async=!0,u.timeout=12e4,n.nc&&u.setAttribute("nonce",n.nc),u.src=n.p+""+e+"."+{0:"a69408978854e3a77fb2",1:"d3594ed74ba57b178a74"}[e]+".ch
 unk.js";var i=setTimeout(r,12e4);return u.onerror=u.onload=r,c.appendChild(u),a},n.m=e,n.c=t,n.d=function(e,r,t){n.o(e,r)||Object.defineProperty(e,r,{configurable:!1,enumerable:!0,get:t})},n.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(r,"a",r),r},n.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},n.p="",n.oe=function(e){throw console.error(e),e}}([]);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-applications/src/main/resources/applications/admin-ui/app/inline.d41d8cd98f00b204e980.bundle.js
----------------------------------------------------------------------
diff --git a/gateway-applications/src/main/resources/applications/admin-ui/app/inline.d41d8cd98f00b204e980.bundle.js b/gateway-applications/src/main/resources/applications/admin-ui/app/inline.d41d8cd98f00b204e980.bundle.js
deleted file mode 100644
index 98ff1a4..0000000
--- a/gateway-applications/src/main/resources/applications/admin-ui/app/inline.d41d8cd98f00b204e980.bundle.js
+++ /dev/null
@@ -1,2 +0,0 @@
-!function(e){function r(n){if(t[n])return t[n].exports;var o=t[n]={i:n,l:!1,exports:{}};return e[n].call(o.exports,o,o.exports,r),o.l=!0,o.exports}var n=window.webpackJsonp;window.webpackJsonp=function(t,c,u){for(var i,a,f,l=0,s=[];l<t.length;l++)a=t[l],o[a]&&s.push(o[a][0]),o[a]=0;for(i in c)Object.prototype.hasOwnProperty.call(c,i)&&(e[i]=c[i]);for(n&&n(t,c,u);s.length;)s.shift()();if(u)for(l=0;l<u.length;l++)f=r(r.s=u[l]);return f};var t={},o={3:0};r.e=function(e){function n(){c.onerror=c.onload=null,clearTimeout(u);var r=o[e];0!==r&&(r&&r[1](new Error("Loading chunk "+e+" failed.")),o[e]=void 0)}if(0===o[e])return Promise.resolve();if(o[e])return o[e][2];var t=document.getElementsByTagName("head")[0],c=document.createElement("script");c.type="text/javascript",c.charset="utf-8",c.async=!0,c.timeout=12e4,c.src=r.p+""+e+"."+{0:"806d67070af66e18c2fc",1:"b2328beb0372c051d06d"}[e]+".chunk.js";var u=setTimeout(n,12e4);c.onerror=c.onload=n,t.appendChild(c);var i=new Promise(function(r,n
 ){o[e]=[r,n]});return o[e][2]=i},r.m=e,r.c=t,r.i=function(e){return e},r.d=function(e,r,n){Object.defineProperty(e,r,{configurable:!1,enumerable:!0,get:n})},r.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return r.d(n,"a",n),n},r.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},r.p="",r.oe=function(e){throw console.error(e),e}}([]);
-//# sourceMappingURL=inline.d41d8cd98f00b204e980.bundle.map
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-applications/src/main/resources/applications/admin-ui/app/inline.d41d8cd98f00b204e980.bundle.map
----------------------------------------------------------------------
diff --git a/gateway-applications/src/main/resources/applications/admin-ui/app/inline.d41d8cd98f00b204e980.bundle.map b/gateway-applications/src/main/resources/applications/admin-ui/app/inline.d41d8cd98f00b204e980.bundle.map
deleted file mode 100644
index a69df12..0000000
--- a/gateway-applications/src/main/resources/applications/admin-ui/app/inline.d41d8cd98f00b204e980.bundle.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"sources":["webpack:///inline.d41d8cd98f00b204e980.bundle.js","webpack:///webpack/bootstrap 1715ff82e9b7d7596e76"],"names":["modules","__webpack_require__","moduleId","installedModules","exports","module","i","l","call","parentJsonpFunction","window","chunkIds","moreModules","executeModules","chunkId","result","resolves","length","installedChunks","push","Object","prototype","hasOwnProperty","shift","s","3","e","onScriptComplete","script","onerror","onload","clearTimeout","timeout","chunk","Error","undefined","Promise","resolve","head","document","getElementsByTagName","createElement","type","charset","async","src","p","0","1","setTimeout","appendChild","promise","reject","m","c","value","d","name","getter","defineProperty","configurable","enumerable","get","n","__esModule","o","object","property","oe","err","console","error"],"mappings":"CAAS,SAAUA,GCqCnB,QAAAC,GAAAC,GAGA,GAAAC,EAAAD,GACA,MAAAC,GAAAD,GAAAE,OAGA,IAAAC,GAAAF,EAAAD,IACAI,EAAAJ,EACAK,GAAA,EACAH,WAUA,OANAJ,
 GAAAE,GAAAM,KAAAH,EAAAD,QAAAC,IAAAD,QAAAH,GAGAI,EAAAE,GAAA,EAGAF,EAAAD,QAxDA,GAAAK,GAAAC,OAAA,YACAA,QAAA,sBAAAC,EAAAC,EAAAC,GAIA,IADA,GAAAX,GAAAY,EAAAC,EAAAT,EAAA,EAAAU,KACQV,EAAAK,EAAAM,OAAoBX,IAC5BQ,EAAAH,EAAAL,GACAY,EAAAJ,IACAE,EAAAG,KAAAD,EAAAJ,GAAA,IACAI,EAAAJ,GAAA,CAEA,KAAAZ,IAAAU,GACAQ,OAAAC,UAAAC,eAAAd,KAAAI,EAAAV,KACAF,EAAAE,GAAAU,EAAAV,GAIA,KADAO,KAAAE,EAAAC,EAAAC,GACAG,EAAAC,QACAD,EAAAO,SACA,IAAAV,EACA,IAAAP,EAAA,EAAYA,EAAAO,EAAAI,OAA2BX,IACvCS,EAAAd,IAAAuB,EAAAX,EAAAP,GAGA,OAAAS,GAIA,IAAAZ,MAGAe,GACAO,EAAA,EA6BAxB,GAAAyB,EAAA,SAAAZ,GAmBA,QAAAa,KAEAC,EAAAC,QAAAD,EAAAE,OAAA,KACAC,aAAAC,EACA,IAAAC,GAAAf,EAAAJ,EACA,KAAAmB,IACAA,KAAA,MAAAC,OAAA,iBAAApB,EAAA,aACAI,EAAAJ,GAAAqB,QAzBA,OAAAjB,EAAAJ,GACA,MAAAsB,SAAAC,SAGA,IAAAnB,EAAAJ,GACA,MAAAI,GAAAJ,GAAA,EAGA,IAAAwB,GAAAC,SAAAC,qBAAA,WACAZ,EAAAW,SAAAE,cAAA,SACAb,GAAAc,KAAA,kBACAd,EAAAe,QAAA,QACAf,EAAAgB,OAAA,EACAhB,EAAAI,QAAA,KAEAJ,EAAAiB,IAAA5C,EAAA6C,EAAA,GAAAhC,EAAA,KAA8DiC,EAAA,uBAAAC,EAAA,wBAAsDlC,GAAA,WACpH,IAAAkB,GAAAiB,W
 AAAtB,EAAA,KACAC,GAAAC,QAAAD,EAAAE,OAAAH,EAWAW,EAAAY,YAAAtB,EAEA,IAAAuB,GAAA,GAAAf,SAAA,SAAAC,EAAAe,GACAlC,EAAAJ,IAAAuB,EAAAe,IAEA,OAAAlC,GAAAJ,GAAA,GAAAqC,GAIAlD,EAAAoD,EAAArD,EAGAC,EAAAqD,EAAAnD,EAGAF,EAAAK,EAAA,SAAAiD,GAA2C,MAAAA,IAG3CtD,EAAAuD,EAAA,SAAApD,EAAAqD,EAAAC,GACAtC,OAAAuC,eAAAvD,EAAAqD,GACAG,cAAA,EACAC,YAAA,EACAC,IAAAJ,KAKAzD,EAAA8D,EAAA,SAAA1D,GACA,GAAAqD,GAAArD,KAAA2D,WACA,WAA2B,MAAA3D,GAAA,SAC3B,WAAiC,MAAAA,GAEjC,OADAJ,GAAAuD,EAAAE,EAAA,IAAAA,GACAA,GAIAzD,EAAAgE,EAAA,SAAAC,EAAAC,GAAsD,MAAA/C,QAAAC,UAAAC,eAAAd,KAAA0D,EAAAC,IAGtDlE,EAAA6C,EAAA,GAGA7C,EAAAmE,GAAA,SAAAC,GAA8D,KAApBC,SAAAC,MAAAF,GAAoBA","file":"inline.d41d8cd98f00b204e980.bundle.js","sourcesContent":["/******/ (function(modules) { // webpackBootstrap\n/******/ \t// install a JSONP callback for chunk loading\n/******/ \tvar parentJsonpFunction = window[\"webpackJsonp\"];\n/******/ \twindow[\"webpackJsonp\"] = function webpackJsonpCallback(chunkIds, moreModules, executeModules) {\n/******/ \t\t// add \"mor
 eModules\" to the modules object,\n/******/ \t\t// then flag all \"chunkIds\" as loaded and fire callback\n/******/ \t\tvar moduleId, chunkId, i = 0, resolves = [], result;\n/******/ \t\tfor(;i < chunkIds.length; i++) {\n/******/ \t\t\tchunkId = chunkIds[i];\n/******/ \t\t\tif(installedChunks[chunkId])\n/******/ \t\t\t\tresolves.push(installedChunks[chunkId][0]);\n/******/ \t\t\tinstalledChunks[chunkId] = 0;\n/******/ \t\t}\n/******/ \t\tfor(moduleId in moreModules) {\n/******/ \t\t\tif(Object.prototype.hasOwnProperty.call(moreModules, moduleId)) {\n/******/ \t\t\t\tmodules[moduleId] = moreModules[moduleId];\n/******/ \t\t\t}\n/******/ \t\t}\n/******/ \t\tif(parentJsonpFunction) parentJsonpFunction(chunkIds, moreModules, executeModules);\n/******/ \t\twhile(resolves.length)\n/******/ \t\t\tresolves.shift()();\n/******/ \t\tif(executeModules) {\n/******/ \t\t\tfor(i=0; i < executeModules.length; i++) {\n/******/ \t\t\t\tresult = __webpack_require__(__webpack_require__.s = executeModu
 les[i]);\n/******/ \t\t\t}\n/******/ \t\t}\n/******/ \t\treturn result;\n/******/ \t};\n/******/\n/******/ \t// The module cache\n/******/ \tvar installedModules = {};\n/******/\n/******/ \t// objects to store loaded and loading chunks\n/******/ \tvar installedChunks = {\n/******/ \t\t3: 0\n/******/ \t};\n/******/\n/******/ \t// The require function\n/******/ \tfunction __webpack_require__(moduleId) {\n/******/\n/******/ \t\t// Check if module is in cache\n/******/ \t\tif(installedModules[moduleId])\n/******/ \t\t\treturn installedModules[moduleId].exports;\n/******/\n/******/ \t\t// Create a new module (and put it into the cache)\n/******/ \t\tvar module = installedModules[moduleId] = {\n/******/ \t\t\ti: moduleId,\n/******/ \t\t\tl: false,\n/******/ \t\t\texports: {}\n/******/ \t\t};\n/******/\n/******/ \t\t// Execute the module function\n/******/ \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n/******/\n/******/ \t\t// Flag the module as 
 loaded\n/******/ \t\tmodule.l = true;\n/******/\n/******/ \t\t// Return the exports of the module\n/******/ \t\treturn module.exports;\n/******/ \t}\n/******/\n/******/ \t// This file contains only the entry chunk.\n/******/ \t// The chunk loading function for additional chunks\n/******/ \t__webpack_require__.e = function requireEnsure(chunkId) {\n/******/ \t\tif(installedChunks[chunkId] === 0)\n/******/ \t\t\treturn Promise.resolve();\n/******/\n/******/ \t\t// an Promise means \"currently loading\".\n/******/ \t\tif(installedChunks[chunkId]) {\n/******/ \t\t\treturn installedChunks[chunkId][2];\n/******/ \t\t}\n/******/ \t\t// start chunk loading\n/******/ \t\tvar head = document.getElementsByTagName('head')[0];\n/******/ \t\tvar script = document.createElement('script');\n/******/ \t\tscript.type = 'text/javascript';\n/******/ \t\tscript.charset = 'utf-8';\n/******/ \t\tscript.async = true;\n/******/ \t\tscript.timeout = 120000;\n/******/\n/******/ \t\tscript.src = __webpack_requ
 ire__.p + \"\" + chunkId + \".\" + {\"0\":\"806d67070af66e18c2fc\",\"1\":\"b2328beb0372c051d06d\"}[chunkId] + \".chunk.js\";\n/******/ \t\tvar timeout = setTimeout(onScriptComplete, 120000);\n/******/ \t\tscript.onerror = script.onload = onScriptComplete;\n/******/ \t\tfunction onScriptComplete() {\n/******/ \t\t\t// avoid mem leaks in IE.\n/******/ \t\t\tscript.onerror = script.onload = null;\n/******/ \t\t\tclearTimeout(timeout);\n/******/ \t\t\tvar chunk = installedChunks[chunkId];\n/******/ \t\t\tif(chunk !== 0) {\n/******/ \t\t\t\tif(chunk) chunk[1](new Error('Loading chunk ' + chunkId + ' failed.'));\n/******/ \t\t\t\tinstalledChunks[chunkId] = undefined;\n/******/ \t\t\t}\n/******/ \t\t};\n/******/ \t\thead.appendChild(script);\n/******/\n/******/ \t\tvar promise = new Promise(function(resolve, reject) {\n/******/ \t\t\tinstalledChunks[chunkId] = [resolve, reject];\n/******/ \t\t});\n/******/ \t\treturn installedChunks[chunkId][2] = promise;\n/******/ \t};\n/******/\n/******/
  \t// expose the modules object (__webpack_modules__)\n/******/ \t__webpack_require__.m = modules;\n/******/\n/******/ \t// expose the module cache\n/******/ \t__webpack_require__.c = installedModules;\n/******/\n/******/ \t// identity function for calling harmory imports with the correct context\n/******/ \t__webpack_require__.i = function(value) { return value; };\n/******/\n/******/ \t// define getter function for harmory exports\n/******/ \t__webpack_require__.d = function(exports, name, getter) {\n/******/ \t\tObject.defineProperty(exports, name, {\n/******/ \t\t\tconfigurable: false,\n/******/ \t\t\tenumerable: true,\n/******/ \t\t\tget: getter\n/******/ \t\t});\n/******/ \t};\n/******/\n/******/ \t// getDefaultExport function for compatibility with non-harmony modules\n/******/ \t__webpack_require__.n = function(module) {\n/******/ \t\tvar getter = module && module.__esModule ?\n/******/ \t\t\tfunction getDefault() { return module['default']; } :\n/******/ \t\t\tfunction getM
 oduleExports() { return module; };\n/******/ \t\t__webpack_require__.d(getter, 'a', getter);\n/******/ \t\treturn getter;\n/******/ \t};\n/******/\n/******/ \t// Object.prototype.hasOwnProperty.call\n/******/ \t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\n/******/\n/******/ \t// __webpack_public_path__\n/******/ \t__webpack_require__.p = \"\";\n/******/\n/******/ \t// on error function for async loading\n/******/ \t__webpack_require__.oe = function(err) { console.error(err); throw err; };\n/******/ })\n/************************************************************************/\n/******/ ([]);\n\n\n// WEBPACK FOOTER //\n// inline.d41d8cd98f00b204e980.bundle.js"," \t// install a JSONP callback for chunk loading\n \tvar parentJsonpFunction = window[\"webpackJsonp\"];\n \twindow[\"webpackJsonp\"] = function webpackJsonpCallback(chunkIds, moreModules, executeModules) {\n \t\t// add \"moreModules\" to the modules ob
 ject,\n \t\t// then flag all \"chunkIds\" as loaded and fire callback\n \t\tvar moduleId, chunkId, i = 0, resolves = [], result;\n \t\tfor(;i < chunkIds.length; i++) {\n \t\t\tchunkId = chunkIds[i];\n \t\t\tif(installedChunks[chunkId])\n \t\t\t\tresolves.push(installedChunks[chunkId][0]);\n \t\t\tinstalledChunks[chunkId] = 0;\n \t\t}\n \t\tfor(moduleId in moreModules) {\n \t\t\tif(Object.prototype.hasOwnProperty.call(moreModules, moduleId)) {\n \t\t\t\tmodules[moduleId] = moreModules[moduleId];\n \t\t\t}\n \t\t}\n \t\tif(parentJsonpFunction) parentJsonpFunction(chunkIds, moreModules, executeModules);\n \t\twhile(resolves.length)\n \t\t\tresolves.shift()();\n \t\tif(executeModules) {\n \t\t\tfor(i=0; i < executeModules.length; i++) {\n \t\t\t\tresult = __webpack_require__(__webpack_require__.s = executeModules[i]);\n \t\t\t}\n \t\t}\n \t\treturn result;\n \t};\n\n \t// The module cache\n \tvar installedModules = {};\n\n \t// objects to store loaded and loading chunks\n \tvar installe
 dChunks = {\n \t\t3: 0\n \t};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId])\n \t\t\treturn installedModules[moduleId].exports;\n\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\ti: moduleId,\n \t\t\tl: false,\n \t\t\texports: {}\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.l = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n \t// This file contains only the entry chunk.\n \t// The chunk loading function for additional chunks\n \t__webpack_require__.e = function requireEnsure(chunkId) {\n \t\tif(installedChunks[chunkId] === 0)\n \t\t\treturn Promise.resolve();\n\n \t\t// an Promise means \"currently loading\".\n \t\tif(installedChunks[
 chunkId]) {\n \t\t\treturn installedChunks[chunkId][2];\n \t\t}\n \t\t// start chunk loading\n \t\tvar head = document.getElementsByTagName('head')[0];\n \t\tvar script = document.createElement('script');\n \t\tscript.type = 'text/javascript';\n \t\tscript.charset = 'utf-8';\n \t\tscript.async = true;\n \t\tscript.timeout = 120000;\n\n \t\tscript.src = __webpack_require__.p + \"\" + chunkId + \".\" + {\"0\":\"806d67070af66e18c2fc\",\"1\":\"b2328beb0372c051d06d\"}[chunkId] + \".chunk.js\";\n \t\tvar timeout = setTimeout(onScriptComplete, 120000);\n \t\tscript.onerror = script.onload = onScriptComplete;\n \t\tfunction onScriptComplete() {\n \t\t\t// avoid mem leaks in IE.\n \t\t\tscript.onerror = script.onload = null;\n \t\t\tclearTimeout(timeout);\n \t\t\tvar chunk = installedChunks[chunkId];\n \t\t\tif(chunk !== 0) {\n \t\t\t\tif(chunk) chunk[1](new Error('Loading chunk ' + chunkId + ' failed.'));\n \t\t\t\tinstalledChunks[chunkId] = undefined;\n \t\t\t}\n \t\t};\n \t\thead.appendCh
 ild(script);\n\n \t\tvar promise = new Promise(function(resolve, reject) {\n \t\t\tinstalledChunks[chunkId] = [resolve, reject];\n \t\t});\n \t\treturn installedChunks[chunkId][2] = promise;\n \t};\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// identity function for calling harmory imports with the correct context\n \t__webpack_require__.i = function(value) { return value; };\n\n \t// define getter function for harmory exports\n \t__webpack_require__.d = function(exports, name, getter) {\n \t\tObject.defineProperty(exports, name, {\n \t\t\tconfigurable: false,\n \t\t\tenumerable: true,\n \t\t\tget: getter\n \t\t});\n \t};\n\n \t// getDefaultExport function for compatibility with non-harmony modules\n \t__webpack_require__.n = function(module) {\n \t\tvar getter = module && module.__esModule ?\n \t\t\tfunction getDefault() { return module['default']; 
 } :\n \t\t\tfunction getModuleExports() { return module; };\n \t\t__webpack_require__.d(getter, 'a', getter);\n \t\treturn getter;\n \t};\n\n \t// Object.prototype.hasOwnProperty.call\n \t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n \t// on error function for async loading\n \t__webpack_require__.oe = function(err) { console.error(err); throw err; };\n\n\n\n// WEBPACK FOOTER //\n// webpack/bootstrap 1715ff82e9b7d7596e76"],"sourceRoot":""}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-applications/src/main/resources/applications/admin-ui/app/main.806d67070af66e18c2fc.bundle.js
----------------------------------------------------------------------
diff --git a/gateway-applications/src/main/resources/applications/admin-ui/app/main.806d67070af66e18c2fc.bundle.js b/gateway-applications/src/main/resources/applications/admin-ui/app/main.806d67070af66e18c2fc.bundle.js
deleted file mode 100644
index b63f615..0000000
--- a/gateway-applications/src/main/resources/applications/admin-ui/app/main.806d67070af66e18c2fc.bundle.js
+++ /dev/null
@@ -1,2 +0,0 @@
-webpackJsonp([0,3],{122:function(t,e,o){"use strict";var n=o(0),i=o(180),a=o(341),r=(o.n(a),o(90));o.n(r);o.d(e,"a",function(){return s});var c=this&&this.__decorate||function(t,e,o,n){var i,a=arguments.length,r=a<3?e:null===n?n=Object.getOwnPropertyDescriptor(e,o):n;if("object"==typeof Reflect&&"function"==typeof Reflect.decorate)r=Reflect.decorate(t,e,o,n);else for(var c=t.length-1;c>=0;c--)(i=t[c])&&(r=(a<3?i(r):a>3?i(e,o,r):i(e,o))||r);return a>3&&r&&Object.defineProperty(e,o,r),r},l=this&&this.__metadata||function(t,e){if("object"==typeof Reflect&&"function"==typeof Reflect.metadata)return Reflect.metadata(t,e)},s=function(){function t(t){this.http=t,this.apiUrl="/gateway/manager/api/v1/",this.topologiesUrl=this.apiUrl+"topologies",this.selectedTopologySource=new r.Subject,this.selectedTopology$=this.selectedTopologySource.asObservable(),this.changedTopologySource=new r.Subject,this.changedTopology$=this.changedTopologySource.asObservable()}return t.prototype.getTopologies=func
 tion(){var t=new i.a;return this.addJsonHeaders(t),this.http.get(this.topologiesUrl,{headers:t}).toPromise().then(function(t){return t.json().topologies.topology}).catch(this.handleError)},t.prototype.getTopology=function(t){var e=new i.a;return this.addXmlHeaders(e),this.http.get(t,{headers:e}).toPromise().then(function(t){return t.text()}).catch(this.handleError)},t.prototype.saveTopology=function(t,e){var o=new i.a;return this.addXmlHeaders(o),this.addCsrfHeaders(o),this.http.put(t,e,{headers:o}).toPromise().then(function(){return e}).catch(this.handleError)},t.prototype.createTopology=function(t,e){var o=new i.a;this.addXmlHeaders(o),this.addCsrfHeaders(o);var n=this.topologiesUrl+"/"+t;return this.http.put(n,e,{headers:o}).toPromise().then(function(){return e}).catch(this.handleError)},t.prototype.deleteTopology=function(t){var e=new i.a;return this.addJsonHeaders(e),this.addCsrfHeaders(e),this.http.delete(t,{headers:e}).toPromise().then(function(t){return t.text()}).catch(this
 .handleError)},t.prototype.addJsonHeaders=function(t){t.append("Accept","application/json"),t.append("Content-Type","application/json")},t.prototype.addXmlHeaders=function(t){t.append("Accept","application/xml"),t.append("Content-Type","application/xml")},t.prototype.addCsrfHeaders=function(t){t.append("X-XSRF-Header","admin-ui")},t.prototype.selectedTopology=function(t){this.selectedTopologySource.next(t)},t.prototype.changedTopology=function(t){this.changedTopologySource.next(t)},t.prototype.handleError=function(t){return console.error("An error occurred",t),Promise.reject(t.message||t)},t=c([o.i(n.Injectable)(),l("design:paramtypes",["function"==typeof(e="undefined"!=typeof i.b&&i.b)&&e||Object])],t);var e}()},293:function(t,e,o){"use strict";var n=o(0),i=o(122);o.d(e,"a",function(){return c});var a=this&&this.__decorate||function(t,e,o,n){var i,a=arguments.length,r=a<3?e:null===n?n=Object.getOwnPropertyDescriptor(e,o):n;if("object"==typeof Reflect&&"function"==typeof Reflect.dec
 orate)r=Reflect.decorate(t,e,o,n);else for(var c=t.length-1;c>=0;c--)(i=t[c])&&(r=(a<3?i(r):a>3?i(e,o,r):i(e,o))||r);return a>3&&r&&Object.defineProperty(e,o,r),r},r=this&&this.__metadata||function(t,e){if("object"==typeof Reflect&&"function"==typeof Reflect.metadata)return Reflect.metadata(t,e)},c=function(){function t(t){this.topologyService=t}return t=a([o.i(n.Component)({selector:"topology-management",template:'\n      <div class="container">\n        <div class="row">\n          <div class="col-md-5">\n            <topology></topology>\n         </div>\n          <div class="col-md-7">\n            <topology-detail></topology-detail>\n          </div>\n        </div>\n      </div>\n  ',providers:[i.a]}),r("design:paramtypes",["function"==typeof(e="undefined"!=typeof i.a&&i.a)&&e||Object])],t);var e}()},294:function(t,e,o){"use strict";var n=o(0),i=o(180),a=o(341);o.n(a);o.d(e,"a",function(){return l});var r=this&&this.__decorate||function(t,e,o,n){var i,a=arguments.length,r=a<3
 ?e:null===n?n=Object.getOwnPropertyDescriptor(e,o):n;if("object"==typeof Reflect&&"function"==typeof Reflect.decorate)r=Reflect.decorate(t,e,o,n);else for(var c=t.length-1;c>=0;c--)(i=t[c])&&(r=(a<3?i(r):a>3?i(e,o,r):i(e,o))||r);return a>3&&r&&Object.defineProperty(e,o,r),r},c=this&&this.__metadata||function(t,e){if("object"==typeof Reflect&&"function"==typeof Reflect.metadata)return Reflect.metadata(t,e)},l=function(){function t(t){this.http=t,this.apiUrl="/gateway/manager/api/v1/version"}return t.prototype.getVersion=function(){var t=new i.a;return this.addHeaders(t),this.http.get(this.apiUrl,{headers:t}).toPromise().then(function(t){return t.json().ServerVersion}).catch(this.handleError)},t.prototype.addHeaders=function(t){t.append("Accept","application/json"),t.append("Content-Type","application/json")},t.prototype.handleError=function(t){return console.error("An error occurred",t),Promise.reject(t.message||t)},t=r([o.i(n.Injectable)(),c("design:paramtypes",["function"==typeof(e
 ="undefined"!=typeof i.b&&i.b)&&e||Object])],t);var e}()},295:function(t,e,o){"use strict";var n=o(0);o.d(e,"a",function(){return r});var i=this&&this.__decorate||function(t,e,o,n){var i,a=arguments.length,r=a<3?e:null===n?n=Object.getOwnPropertyDescriptor(e,o):n;if("object"==typeof Reflect&&"function"==typeof Reflect.decorate)r=Reflect.decorate(t,e,o,n);else for(var c=t.length-1;c>=0;c--)(i=t[c])&&(r=(a<3?i(r):a>3?i(e,o,r):i(e,o))||r);return a>3&&r&&Object.defineProperty(e,o,r),r},a=this&&this.__metadata||function(t,e){if("object"==typeof Reflect&&"function"==typeof Reflect.metadata)return Reflect.metadata(t,e)},r=function(){function t(){this.active=!1}return i([o.i(n.Input)("tabTitle"),a("design:type",String)],t.prototype,"title",void 0),i([o.i(n.Input)(),a("design:type",Object)],t.prototype,"active",void 0),t=i([o.i(n.Component)({selector:"tab",styles:["\n    .pane{\n      padding: 1em;\n    }\n  "],template:'\n    <div [hidden]="!active" class="pane">\n    </div>\n  '}),a("desig
 n:paramtypes",[])],t)}()},344:function(t,e){function o(t){throw new Error("Cannot find module '"+t+"'.")}o.keys=function(){return[]},o.resolve=o,t.exports=o,o.id=344},345:function(t,e,o){"use strict";var n=o(454),i=(o.n(n),o(425)),a=o(0),r=o(453),c=o(447);r.a.production&&o.i(a.enableProdMode)(),o.i(i.a)().bootstrapModule(c.a)},445:function(t,e,o){"use strict";var n=o(0),i=o(184),a=o(180),r=o(418),c=o(293),l=o(122),s=o(294),p=o(446),f=o(449),d=o(448),u=o(452),y=o(450),h=o(295),g=o(451),m=o(608),b=(o.n(m),o(340));o.n(b);o.d(e,"a",function(){return R});var v=this&&this.__decorate||function(t,e,o,n){var i,a=arguments.length,r=a<3?e:null===n?n=Object.getOwnPropertyDescriptor(e,o):n;if("object"==typeof Reflect&&"function"==typeof Reflect.decorate)r=Reflect.decorate(t,e,o,n);else for(var c=t.length-1;c>=0;c--)(i=t[c])&&(r=(a<3?i(r):a>3?i(e,o,r):i(e,o))||r);return a>3&&r&&Object.defineProperty(e,o,r),r},j=this&&this.__metadata||function(t,e){if("object"==typeof Reflect&&"function"==typeof R
 eflect.metadata)return Reflect.metadata(t,e)},R=function(){function t(){}return t=v([o.i(n.NgModule)({imports:[i.b,a.c,r.a,b.Ng2Bs3ModalModule],declarations:[c.a,f.a,d.a,p.a,m.AceEditorDirective,u.a,y.a,g.a,h.a],providers:[l.a,s.a],bootstrap:[c.a,p.a]}),j("design:paramtypes",[])],t)}()},446:function(t,e,o){"use strict";var n=o(0),i=o(294);o.d(e,"a",function(){return c});var a=this&&this.__decorate||function(t,e,o,n){var i,a=arguments.length,r=a<3?e:null===n?n=Object.getOwnPropertyDescriptor(e,o):n;if("object"==typeof Reflect&&"function"==typeof Reflect.decorate)r=Reflect.decorate(t,e,o,n);else for(var c=t.length-1;c>=0;c--)(i=t[c])&&(r=(a<3?i(r):a>3?i(e,o,r):i(e,o))||r);return a>3&&r&&Object.defineProperty(e,o,r),r},r=this&&this.__metadata||function(t,e){if("object"==typeof Reflect&&"function"==typeof Reflect.metadata)return Reflect.metadata(t,e)},c=function(){function t(t){this.gatewayVersionService=t}return t.prototype.getVersion=function(){var t=this;this.gatewayVersionService.ge
 tVersion().then(function(e){return t.gatewayVersion=e})},t.prototype.ngOnInit=function(){this.getVersion()},t=a([o.i(n.Component)({selector:"gateway-version",template:'\n        <div *ngIf="gatewayVersion">\n            <span class="small"><cite>Knox Gateway Version</cite> {{this.gatewayVersion.version}}</span>\n            <span class="small"><cite>Hash</cite> {{this.gatewayVersion.hash}}</span>\n</div>',providers:[i.a]}),r("design:paramtypes",["function"==typeof(e="undefined"!=typeof i.a&&i.a)&&e||Object])],t);var e}()},447:function(t,e,o){"use strict";var n=(o(293),o(445));o.d(e,"a",function(){return n.a})},448:function(t,e,o){"use strict";var n=o(0),i=o(122),a=o(340);o.n(a);o.d(e,"a",function(){return l});var r=this&&this.__decorate||function(t,e,o,n){var i,a=arguments.length,r=a<3?e:null===n?n=Object.getOwnPropertyDescriptor(e,o):n;if("object"==typeof Reflect&&"function"==typeof Reflect.decorate)r=Reflect.decorate(t,e,o,n);else for(var c=t.length-1;c>=0;c--)(i=t[c])&&(r=(a<3?i(
 r):a>3?i(e,o,r):i(e,o))||r);return a>3&&r&&Object.defineProperty(e,o,r),r},c=this&&this.__metadata||function(t,e){if("object"==typeof Reflect&&"function"==typeof Reflect.metadata)return Reflect.metadata(t,e)},l=function(){function t(t){this.topologyService=t,this.title="Topology Detail",this.options={useWorker:!1,printMargin:!1}}return t.prototype.ngOnInit=function(){var t=this;this.topologyService.selectedTopology$.subscribe(function(e){return t.populateContent(e)})},t.prototype.setTitle=function(t){this.titleSuffix=t},t.prototype.onChange=function(t){this.changedTopology=t},t.prototype.saveTopology=function(){var t=this;this.topologyService.saveTopology(this.topology.href,this.changedTopology).then(function(e){return t.topologyService.changedTopology(t.topology.name)})},t.prototype.createTopology=function(){var t=this;this.changedTopology?this.topologyService.createTopology(this.newTopologyName,this.changedTopology).then(function(e){return t.topologyService.changedTopology(t.newTo
 pologyName)}):this.topologyService.createTopology(this.newTopologyName,this.topologyContent).then(function(e){return t.topologyService.changedTopology(t.newTopologyName)})},t.prototype.deleteTopology=function(){var t=this;this.topologyService.deleteTopology(this.topology.href).then(function(e){return t.topologyService.changedTopology(t.topology.name)})},t.prototype.populateContent=function(t){var e=this;this.topology=t,this.setTitle(t.name),this.topology&&this.topology.href&&this.topologyService.getTopology(this.topology.href).then(function(t){return e.topologyContent=t})},r([o.i(n.ViewChild)("duplicateModal"),c("design:type","function"==typeof(e="undefined"!=typeof a.ModalComponent&&a.ModalComponent)&&e||Object)],t.prototype,"duplicateModal",void 0),r([o.i(n.ViewChild)("deleteConfirmModal"),c("design:type","function"==typeof(l="undefined"!=typeof a.ModalComponent&&a.ModalComponent)&&l||Object)],t.prototype,"deleteConfirmModal",void 0),t=r([o.i(n.Component)({selector:"topology-detai
 l",template:'\n     <div class="panel panel-default">\n        <div class="panel-heading">\n            <h4 class="panel-title">{{title}} <span class="label label-default pull-right">{{titleSuffix}}</span></h4>\n         </div>\n     <div *ngIf="topologyContent" class="panel-body">\n      <div ace-editor\n       [readOnly]="false" [text]="topologyContent | xml" [mode]="\'xml\'" [options]="options" \n        [theme]="\'monokai\'"\n         style="min-height: 300px; width:100%; overflow: auto;" (textChanged)="onChange($event)">\n      </div>\n       <div class="panel-footer">\n        <button (click)="duplicateModal.open(\'sm\')" class="btn btn-default btn-sm" type="submit">\n            <span class="glyphicon glyphicon-duplicate" aria-hidden="true"></span>\n        </button>\n        <button (click)="deleteConfirmModal.open(\'sm\')" class="btn btn-default btn-sm" type="submit">\n            <span class="glyphicon glyphicon-trash" aria-hidden="true"></span>\n        </button>\n       
 <button (click)="saveTopology()" class="btn btn-default btn-sm pull-right" [disabled]="!changedTopology" type="submit">\n            <span class="glyphicon glyphicon-floppy-disk" aria-hidden="true"></span>\n        </button>\n       </div>\n         \n    </div>\n    <modal (onClose)="createTopology()" #duplicateModal>\n\n        <modal-header [show-close]="true">\n            <h4 class="modal-title">Create a copy</h4>\n        </modal-header>\n        <modal-body>\n            <div class="form-group">\n                <label for="textbox">Name the new topology</label>\n                <input autofocus type="text" class="form-control" required [(ngModel)]="newTopologyName" id="textbox">\n            </div> \n        </modal-body>\n        <modal-footer>\n            <button type="button" class="btn btn-default btn-sm" data-dismiss="duplicateModal" (click)="duplicateModal.dismiss()">Cancel</button>\n            <button type="button" class="btn btn-primary btn-sm" [disabled]="!newTopo
 logyName" (click)="duplicateModal.close()">Ok</button>\n        </modal-footer>\n    </modal>\n    <modal (onClose)="deleteTopology()" #deleteConfirmModal>\n        <modal-header [show-close]="true">\n            <h4 class="modal-title">Deleting Topology {{titleSuffix}}</h4>\n        </modal-header>\n        <modal-body>\n            Are you sure you want to delete the topology?\n        </modal-body>\n        <modal-footer>\n            <button type="button" class="btn btn-default btn-sm" data-dismiss="deleteConfirmModal" (click)="deleteConfirmModal.dismiss()">Cancel</button>\n            <button type="button" class="btn btn-primary btn-sm" (click)="deleteConfirmModal.close()">Ok</button>\n        </modal-footer>\n    </modal>\n   '}),c("design:paramtypes",["function"==typeof(s="undefined"!=typeof i.a&&i.a)&&s||Object])],t);var e,l,s}()},449:function(t,e,o){"use strict";var n=o(0),i=o(122);o.d(e,"a",function(){return c});var a=this&&this.__decorate||function(t,e,o,n){var i,a=argume
 nts.length,r=a<3?e:null===n?n=Object.getOwnPropertyDescriptor(e,o):n;if("object"==typeof Reflect&&"function"==typeof Reflect.decorate)r=Reflect.decorate(t,e,o,n);else for(var c=t.length-1;c>=0;c--)(i=t[c])&&(r=(a<3?i(r):a>3?i(e,o,r):i(e,o))||r);return a>3&&r&&Object.defineProperty(e,o,r),r},r=this&&this.__metadata||function(t,e){if("object"==typeof Reflect&&"function"==typeof Reflect.metadata)return Reflect.metadata(t,e)},c=function(){function t(t){this.topologyService=t}return t.prototype.getTopologies=function(){var t=this;this.topologyService.getTopologies().then(function(e){return t.topologies=e})},t.prototype.ngOnInit=function(){var t=this;this.getTopologies(),this.topologyService.changedTopology$.subscribe(function(e){return t.getTopologies()})},t.prototype.onSelect=function(t){this.selectedTopology=t,this.topologyService.selectedTopology(t)},t=a([o.i(n.Component)({selector:"topology",template:'\n        <div class="table-responsive" style="max-height: 400px; width:100%; overf
 low: auto;">\n            <table class="table table-striped table-hover">\n              <thead>\n                <tr>\n                  <th>Topology Name</th>\n                  <th>Timestamp</th>\n                </tr>\n              </thead>\n         <tbody>\n         <tr *ngFor="let topology of topologies"\n          [class.selected]="topology === selectedTopology"\n        (click)="onSelect(topology)">\n         <td>{{topology.name}}</td> \n         <td>{{topology.timestamp | date:\'yMMMdjms\'}}</td> \n         </tr>\n        </tbody>\n        </table>\n        </div>\n       '}),r("design:paramtypes",["function"==typeof(e="undefined"!=typeof i.a&&i.a)&&e||Object])],t);var e}()},450:function(t,e,o){"use strict";var n=o(0);o.d(e,"a",function(){return r});var i=this&&this.__decorate||function(t,e,o,n){var i,a=arguments.length,r=a<3?e:null===n?n=Object.getOwnPropertyDescriptor(e,o):n;if("object"==typeof Reflect&&"function"==typeof Reflect.decorate)r=Reflect.decorate(t,e,o,n);els
 e for(var c=t.length-1;c>=0;c--)(i=t[c])&&(r=(a<3?i(r):a>3?i(e,o,r):i(e,o))||r);return a>3&&r&&Object.defineProperty(e,o,r),r},a=this&&this.__metadata||function(t,e){if("object"==typeof Reflect&&"function"==typeof Reflect.metadata)return Reflect.metadata(t,e)},r=function(){function t(){}return t.prototype.transform=function(t){return vkbeautify.json(t)},t=i([o.i(n.Pipe)({name:"jsonpretty"}),a("design:paramtypes",[])],t)}()},451:function(t,e,o){"use strict";var n=o(0),i=o(295);o.d(e,"a",function(){return c});var a=this&&this.__decorate||function(t,e,o,n){var i,a=arguments.length,r=a<3?e:null===n?n=Object.getOwnPropertyDescriptor(e,o):n;if("object"==typeof Reflect&&"function"==typeof Reflect.decorate)r=Reflect.decorate(t,e,o,n);else for(var c=t.length-1;c>=0;c--)(i=t[c])&&(r=(a<3?i(r):a>3?i(e,o,r):i(e,o))||r);return a>3&&r&&Object.defineProperty(e,o,r),r},r=this&&this.__metadata||function(t,e){if("object"==typeof Reflect&&"function"==typeof Reflect.metadata)return Reflect.metadata(t,e
 )},c=function(){function t(){}return t.prototype.ngAfterContentInit=function(){var t=this.tabs.filter(function(t){return t.active});0===t.length&&this.selectTab(this.tabs.first)},t.prototype.selectTab=function(t){this.tabs.toArray().forEach(function(t){return t.active=!1}),t.active=!0},a([o.i(n.ContentChildren)(i.a),r("design:type","function"==typeof(e="undefined"!=typeof n.QueryList&&n.QueryList)&&e||Object)],t.prototype,"tabs",void 0),t=a([o.i(n.Component)({selector:"tabs",template:'\n    <ul class="nav nav-tabs">\n      <li *ngFor="let tab of tabs" (click)="selectTab(tab)" [class.active]="tab.active">\n        <a>{{tab.title}}</a>\n      </li>\n    </ul>\n    \n  '}),r("design:paramtypes",[])],t);var e}()},452:function(t,e,o){"use strict";var n=o(0);o.d(e,"a",function(){return r});var i=this&&this.__decorate||function(t,e,o,n){var i,a=arguments.length,r=a<3?e:null===n?n=Object.getOwnPropertyDescriptor(e,o):n;if("object"==typeof Reflect&&"function"==typeof Reflect.decorate)r=Refle
 ct.decorate(t,e,o,n);else for(var c=t.length-1;c>=0;c--)(i=t[c])&&(r=(a<3?i(r):a>3?i(e,o,r):i(e,o))||r);return a>3&&r&&Object.defineProperty(e,o,r),r},a=this&&this.__metadata||function(t,e){if("object"==typeof Reflect&&"function"==typeof Reflect.metadata)return Reflect.metadata(t,e)},r=function(){function t(){}return t.prototype.transform=function(t){return vkbeautify.xml(t)},t=i([o.i(n.Pipe)({name:"xml"}),a("design:paramtypes",[])],t)}()},453:function(t,e,o){"use strict";o.d(e,"a",function(){return n});var n={production:!0}},454:function(t,e,o){"use strict";var n=o(471),i=(o.n(n),o(464)),a=(o.n(i),o(460)),r=(o.n(a),o(466)),c=(o.n(r),o(465)),l=(o.n(c),o(463)),s=(o.n(l),o(462)),p=(o.n(s),o(470)),f=(o.n(p),o(459)),d=(o.n(f),o(458)),u=(o.n(d),o(468)),y=(o.n(u),o(461)),h=(o.n(y),o(469)),g=(o.n(h),o(467)),m=(o.n(g),o(472)),b=(o.n(m),o(628));o.n(b)},629:function(t,e,o){t.exports=o(345)}},[629]);
-//# sourceMappingURL=main.806d67070af66e18c2fc.bundle.map
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-applications/src/main/resources/applications/admin-ui/app/main.806d67070af66e18c2fc.bundle.js.gz
----------------------------------------------------------------------
diff --git a/gateway-applications/src/main/resources/applications/admin-ui/app/main.806d67070af66e18c2fc.bundle.js.gz b/gateway-applications/src/main/resources/applications/admin-ui/app/main.806d67070af66e18c2fc.bundle.js.gz
deleted file mode 100644
index 51d892f..0000000
Binary files a/gateway-applications/src/main/resources/applications/admin-ui/app/main.806d67070af66e18c2fc.bundle.js.gz and /dev/null differ


[42/49] knox git commit: Merge branch 'master' into KNOX-998-Package_Restructuring

Posted by mo...@apache.org.
Merge branch 'master' into KNOX-998-Package_Restructuring

# Conflicts:
#	gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
#	gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
#	gateway-provider-security-pac4j/src/main/java/org/apache/knox/gateway/pac4j/filter/Pac4jIdentityAdapter.java
#	gateway-server/src/main/java/org/apache/knox/gateway/services/CLIGatewayServices.java
#	gateway-server/src/main/java/org/apache/knox/gateway/services/DefaultGatewayServices.java
#	gateway-server/src/main/java/org/apache/knox/gateway/services/topology/impl/DefaultTopologyService.java
#	gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java
#	gateway-server/src/main/java/org/apache/knox/gateway/util/KnoxCLI.java
#	gateway-server/src/test/java/org/apache/knox/gateway/util/KnoxCLITest.java
#	gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/22a7304a
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/22a7304a
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/22a7304a

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 22a7304a602105ed01ed37e19cba086f3296e4af
Parents: e70904b 370c861
Author: Sandeep More <mo...@apache.org>
Authored: Thu Dec 14 10:38:39 2017 -0500
Committer: Sandeep More <mo...@apache.org>
Committed: Thu Dec 14 10:38:39 2017 -0500

----------------------------------------------------------------------
 .gitignore                                      |    1 +
 CHANGES                                         |   22 +-
 gateway-admin-ui/README.md                      |   34 +-
 gateway-admin-ui/angular-cli.json               |   22 +-
 gateway-admin-ui/package.json                   |   60 +-
 gateway-admin-ui/src/app/app.module.ts          |    6 +-
 .../src/app/topology-detail.component.ts        |   55 +-
 gateway-admin-ui/src/app/topology.component.ts  |    2 +-
 .../src/assets/knox-logo-transparent.gif        |  Bin 0 -> 19703 bytes
 gateway-admin-ui/src/index.html                 |    4 +-
 gateway-admin-ui/src/tsconfig.json              |    4 +-
 .../app/assets/knox-logo-transparent.gif        |  Bin 0 -> 19703 bytes
 .../applications/admin-ui/app/index.html        |   62 +-
 .../app/inline.b47d11937c275f76ce02.bundle.js   |    1 +
 .../app/inline.d41d8cd98f00b204e980.bundle.js   |    2 -
 .../app/inline.d41d8cd98f00b204e980.bundle.map  |    1 -
 .../app/main.806d67070af66e18c2fc.bundle.js     |    2 -
 .../app/main.806d67070af66e18c2fc.bundle.js.gz  |  Bin 3657 -> 0 bytes
 .../app/main.806d67070af66e18c2fc.bundle.map    |    1 -
 .../app/main.a69408978854e3a77fb2.bundle.js     |    1 +
 .../app/scripts.2c89ed78f648df44c10f.bundle.js  |   12 +
 .../app/styles.b2328beb0372c051d06d.bundle.js   |    2 -
 .../app/styles.b2328beb0372c051d06d.bundle.map  |    1 -
 .../app/styles.d41d8cd98f00b204e980.bundle.css  |    0
 ....d41d8cd98f00b204e9800998ecf8427e.bundle.css |    2 -
 .../app/vendor.48771018d3da89d3269f.bundle.js   | 2035 ------------------
 .../vendor.48771018d3da89d3269f.bundle.js.gz    |  Bin 459997 -> 0 bytes
 .../app/vendor.48771018d3da89d3269f.bundle.map  |    1 -
 .../discovery/ambari/AmbariClientCommon.java    |  102 +
 ...bariClusterConfigurationMonitorProvider.java |   35 +
 .../ambari/AmbariConfigurationMonitor.java      |  525 +++++
 .../topology/discovery/ambari/RESTInvoker.java  |  136 ++
 .../discovery/ambari/AmbariCluster.java         |    5 +
 .../ambari/AmbariServiceDiscovery.java          |  228 +-
 .../ambari/AmbariServiceDiscoveryMessages.java  |   51 +-
 .../ambari/ServiceURLPropertyConfig.java        |    2 +-
 ...iscovery.ClusterConfigurationMonitorProvider |   19 +
 .../ambari/AmbariConfigurationMonitorTest.java  |  319 +++
 .../ambari/AmbariServiceDiscoveryTest.java      |   34 +-
 .../pac4j/filter/Pac4jDispatcherFilter.java     |   11 +-
 .../pac4j/filter/Pac4jIdentityAdapter.java      |   33 +-
 .../gateway/pac4j/MockHttpServletRequest.java   |    8 +-
 .../knox/gateway/pac4j/Pac4jProviderTest.java   |  187 +-
 gateway-release/home/conf/gateway-site.xml      |   12 +
 gateway-release/home/conf/topologies/admin.xml  |   21 +-
 .../home/conf/topologies/knoxsso.xml            |    5 +-
 .../home/conf/topologies/manager.xml            |   21 +-
 .../home/conf/topologies/sandbox.xml            |   21 +-
 gateway-server/pom.xml                          |    9 +
 ...faultClusterConfigurationMonitorService.java |   81 +
 .../DefaultConfigurationMonitorProvider.java    |   31 +
 .../DefaultRemoteConfigurationMonitor.java      |  228 ++
 .../RemoteConfigurationMonitorFactory.java      |   74 +
 .../apache/knox/gateway/GatewayMessages.java    |   64 +-
 .../gateway/config/impl/GatewayConfigImpl.java  |   67 +-
 .../gateway/services/CLIGatewayServices.java    |   10 +
 .../services/DefaultGatewayServices.java        |   24 +-
 .../topology/impl/DefaultTopologyService.java   |   99 +-
 .../simple/SimpleDescriptorFactory.java         |    2 +-
 .../simple/SimpleDescriptorHandler.java         |   78 +-
 .../simple/SimpleDescriptorMessages.java        |    9 +
 .../org/apache/knox/gateway/util/KnoxCLI.java   |  411 +++-
 ...y.monitor.RemoteConfigurationMonitorProvider |   19 +
 ...emoteConfigurationRegistryClientService.java |  263 +++
 ...figurationRegistryClientServiceProvider.java |   32 +
 .../ZooKeeperConfigurationMonitorTest.java      |  355 +++
 .../config/impl/GatewayConfigImplTest.java      |   43 +
 .../topology/DefaultTopologyServiceTest.java    |   10 +-
 .../simple/SimpleDescriptorFactoryTest.java     |   13 +-
 .../apache/knox/gateway/util/KnoxCLITest.java   |  385 +++-
 .../knox/gateway/websockets/BadUrlTest.java     |   11 +
 .../gateway/websockets/WebsocketEchoTest.java   |   11 +
 .../WebsocketMultipleConnectionTest.java        |   11 +
 ...teConfigurationRegistryClientServiceProvider |   19 +
 .../services/ambariui/2.2.1/rewrite.xml         |  104 -
 .../services/ambariui/2.2.1/service.xml         |   92 -
 gateway-service-remoteconfig/pom.xml            |   89 +
 .../remote/RemoteConfigurationMessages.java     |   49 +
 ...nfigurationRegistryClientServiceFactory.java |   41 +
 ...figurationRegistryClientServiceProvider.java |   27 +
 .../RemoteConfigurationRegistryConfig.java      |   43 +
 .../DefaultRemoteConfigurationRegistries.java   |  104 +
 .../config/RemoteConfigurationRegistries.java   |   33 +
 .../RemoteConfigurationRegistriesAccessor.java  |   60 +
 .../RemoteConfigurationRegistriesParser.java    |   48 +
 .../config/RemoteConfigurationRegistry.java     |  139 ++
 .../config/remote/zk/CuratorClientService.java  |  464 ++++
 .../RemoteConfigurationRegistryJAASConfig.java  |  179 ++
 .../remote/zk/ZooKeeperClientService.java       |   25 +
 .../zk/ZooKeeperClientServiceProvider.java      |   34 +
 ...teConfigurationRegistryClientServiceProvider |   19 +
 ...efaultRemoteConfigurationRegistriesTest.java |  184 ++
 ...teConfigurationRegistryConfigParserTest.java |  108 +
 .../util/RemoteRegistryConfigTestUtils.java     |  117 +
 ...eConfigurationRegistryClientServiceTest.java |  424 ++++
 ...moteConfigurationRegistryJAASConfigTest.java |  255 +++
 .../RemoteConfigurationRegistryClient.java      |   80 +
 ...emoteConfigurationRegistryClientService.java |   28 +
 .../ClusterConfigurationMonitorService.java     |   43 +
 .../discovery/ClusterConfigurationMonitor.java  |   48 +
 .../ClusterConfigurationMonitorProvider.java    |   27 +
 .../monitor/RemoteConfigurationMonitor.java     |   24 +
 .../RemoteConfigurationMonitorProvider.java     |   34 +
 .../knox/gateway/config/GatewayConfig.java      |   50 +
 .../knox/gateway/services/GatewayServices.java  |    4 +
 .../apache/knox/gateway/GatewayTestConfig.java  |   38 +-
 .../java/org/apache/knox/test/TestUtils.java    |    2 +-
 gateway-test/pom.xml                            |    6 +
 .../SimpleDescriptorHandlerFuncTest.java        |  275 +++
 .../monitor/RemoteConfigurationMonitorTest.java |  603 ++++++
 .../knox/gateway/GatewayBasicFuncTest.java      |    2 +-
 ...eway.topology.discovery.ServiceDiscoveryType |   19 +
 pom.xml                                         |   18 +-
 113 files changed, 7743 insertions(+), 2663 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/.gitignore
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariCluster.java
----------------------------------------------------------------------
diff --cc gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariCluster.java
index d71e079,0000000..bcf3adc
mode 100644,000000..100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariCluster.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariCluster.java
@@@ -1,115 -1,0 +1,120 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one or more
 + * contributor license agreements. See the NOTICE file distributed with this
 + * work for additional information regarding copyright ownership. The ASF
 + * licenses this file to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance with the License.
 + * You may obtain a copy of the License at
 + *
 + * http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 + * License for the specific language governing permissions and limitations under
 + * the License.
 + */
 +package org.apache.knox.gateway.topology.discovery.ambari;
 +
 +import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
 +
 +import java.util.ArrayList;
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
 +
 +class AmbariCluster implements ServiceDiscovery.Cluster {
 +
 +    private String name = null;
 +
 +    private AmbariDynamicServiceURLCreator urlCreator;
 +
 +    private Map<String, Map<String, ServiceConfiguration>> serviceConfigurations = new HashMap<>();
 +
 +    private Map<String, AmbariComponent> components = null;
 +
 +
 +    AmbariCluster(String name) {
 +        this.name = name;
 +        components = new HashMap<>();
 +        urlCreator = new AmbariDynamicServiceURLCreator(this);
 +    }
 +
 +    void addServiceConfiguration(String serviceName, String configurationType, ServiceConfiguration serviceConfig) {
 +        if (!serviceConfigurations.keySet().contains(serviceName)) {
 +            serviceConfigurations.put(serviceName, new HashMap<>());
 +        }
 +        serviceConfigurations.get(serviceName).put(configurationType, serviceConfig);
 +    }
 +
 +
 +    void addComponent(AmbariComponent component) {
 +        components.put(component.getName(), component);
 +    }
 +
 +
 +    ServiceConfiguration getServiceConfiguration(String serviceName, String configurationType) {
 +        ServiceConfiguration sc = null;
 +        Map<String, ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
 +        if (configs != null) {
 +            sc = configs.get(configurationType);
 +        }
 +        return sc;
 +    }
 +
 +
++    Map<String, Map<String, ServiceConfiguration>> getServiceConfigurations() {
++        return serviceConfigurations;
++    }
++
++
 +    Map<String, AmbariComponent> getComponents() {
 +        return components;
 +    }
 +
 +
 +    AmbariComponent getComponent(String name) {
 +        return components.get(name);
 +    }
 +
 +
 +    @Override
 +    public String getName() {
 +        return name;
 +    }
 +
 +
 +    @Override
 +    public List<String> getServiceURLs(String serviceName) {
 +        List<String> urls = new ArrayList<>();
 +        urls.addAll(urlCreator.create(serviceName));
 +        return urls;
 +    }
 +
 +
 +    static class ServiceConfiguration {
 +
 +        private String type;
 +        private String version;
 +        private Map<String, String> props;
 +
 +        ServiceConfiguration(String type, String version, Map<String, String> properties) {
 +            this.type = type;
 +            this.version = version;
 +            this.props = properties;
 +        }
 +
 +        public String getVersion() {
 +            return version;
 +        }
 +
 +        public String getType() {
 +            return type;
 +        }
 +
 +        public Map<String, String> getProperties() {
 +            return props;
 +        }
 +    }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
----------------------------------------------------------------------
diff --cc gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
index dbc783d,0000000..6a6a888
mode 100644,000000..100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
@@@ -1,306 -1,0 +1,262 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one or more
 + * contributor license agreements. See the NOTICE file distributed with this
 + * work for additional information regarding copyright ownership. The ASF
 + * licenses this file to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance with the License.
 + * You may obtain a copy of the License at
 + *
 + * http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 + * License for the specific language governing permissions and limitations under
 + * the License.
 + */
 +package org.apache.knox.gateway.topology.discovery.ambari;
 +
- import java.io.IOException;
++import java.lang.reflect.Method;
 +import java.util.ArrayList;
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.Properties;
 +
 +import net.minidev.json.JSONArray;
 +import net.minidev.json.JSONObject;
- import net.minidev.json.JSONValue;
- import org.apache.knox.gateway.config.ConfigurationException;
 +import org.apache.knox.gateway.i18n.messages.MessagesFactory;
++import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.security.AliasService;
- import org.apache.knox.gateway.services.security.AliasServiceException;
++import org.apache.knox.gateway.topology.ClusterConfigurationMonitorService;
++import org.apache.knox.gateway.topology.discovery.ClusterConfigurationMonitor;
 +import org.apache.knox.gateway.topology.discovery.GatewayService;
 +import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
 +import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryConfig;
- import org.apache.http.HttpEntity;
- import org.apache.http.HttpStatus;
- import org.apache.http.client.methods.CloseableHttpResponse;
- import org.apache.http.client.methods.HttpGet;
- import org.apache.http.impl.client.CloseableHttpClient;
- import org.apache.http.message.BasicHeader;
- import org.apache.http.util.EntityUtils;
 +
 +
 +class AmbariServiceDiscovery implements ServiceDiscovery {
 +
 +    static final String TYPE = "AMBARI";
 +
-     static final String AMBARI_CLUSTERS_URI = "/api/v1/clusters";
++    static final String AMBARI_CLUSTERS_URI = AmbariClientCommon.AMBARI_CLUSTERS_URI;
 +
-     static final String AMBARI_HOSTROLES_URI =
-                                        AMBARI_CLUSTERS_URI + "/%s/services?fields=components/host_components/HostRoles";
++    static final String AMBARI_HOSTROLES_URI = AmbariClientCommon.AMBARI_HOSTROLES_URI;
 +
-     static final String AMBARI_SERVICECONFIGS_URI =
-             AMBARI_CLUSTERS_URI + "/%s/configurations/service_config_versions?is_current=true";
++    static final String AMBARI_SERVICECONFIGS_URI = AmbariClientCommon.AMBARI_SERVICECONFIGS_URI;
 +
 +    private static final String COMPONENT_CONFIG_MAPPING_FILE =
 +                                                        "ambari-service-discovery-component-config-mapping.properties";
 +
++    private static final String GATEWAY_SERVICES_ACCESSOR_CLASS  = "org.apache.knox.gateway.GatewayServer";
++    private static final String GATEWAY_SERVICES_ACCESSOR_METHOD = "getGatewayServices";
++
 +    private static final AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
 +
 +    // Map of component names to service configuration types
 +    private static Map<String, String> componentServiceConfigs = new HashMap<>();
 +    static {
 +        try {
 +            Properties configMapping = new Properties();
 +            configMapping.load(AmbariServiceDiscovery.class.getClassLoader().getResourceAsStream(COMPONENT_CONFIG_MAPPING_FILE));
 +            for (String componentName : configMapping.stringPropertyNames()) {
 +                componentServiceConfigs.put(componentName, configMapping.getProperty(componentName));
 +            }
 +        } catch (Exception e) {
-             log.failedToLoadServiceDiscoveryConfiguration(COMPONENT_CONFIG_MAPPING_FILE, e);
++            log.failedToLoadServiceDiscoveryURLDefConfiguration(COMPONENT_CONFIG_MAPPING_FILE, e);
 +        }
 +    }
 +
-     private static final String DEFAULT_USER_ALIAS = "ambari.discovery.user";
-     private static final String DEFAULT_PWD_ALIAS  = "ambari.discovery.password";
- 
 +    @GatewayService
 +    private AliasService aliasService;
 +
-     private CloseableHttpClient httpClient = null;
++    private RESTInvoker restClient;
++    private AmbariClientCommon ambariClient;
 +
++    // This is used to update the monitor when new cluster configuration details are discovered.
++    private AmbariConfigurationMonitor configChangeMonitor;
++
++    private boolean isInitialized = false;
 +
 +    AmbariServiceDiscovery() {
-         httpClient = org.apache.http.impl.client.HttpClients.createDefault();
++    }
++
++
++    AmbariServiceDiscovery(RESTInvoker restClient) {
++        this.restClient = restClient;
++    }
++
++
++    /**
++     * Initialization must be subsequent to construction because the AliasService member isn't assigned until after
++     * construction time. This is called internally prior to discovery invocations to make sure the clients have been
++     * initialized.
++     */
++    private void init() {
++        if (!isInitialized) {
++            if (this.restClient == null) {
++                this.restClient = new RESTInvoker(aliasService);
++            }
++            this.ambariClient = new AmbariClientCommon(restClient);
++            this.configChangeMonitor = getConfigurationChangeMonitor();
++
++            isInitialized = true;
++        }
++    }
++
++
++    /**
++     * Get the Ambari configuration change monitor from the associated gateway service.
++     */
++    private AmbariConfigurationMonitor getConfigurationChangeMonitor() {
++        AmbariConfigurationMonitor ambariMonitor = null;
++        try {
++            Class clazz = Class.forName(GATEWAY_SERVICES_ACCESSOR_CLASS);
++            if (clazz != null) {
++                Method m = clazz.getDeclaredMethod(GATEWAY_SERVICES_ACCESSOR_METHOD);
++                if (m != null) {
++                    Object obj = m.invoke(null);
++                    if (GatewayServices.class.isAssignableFrom(obj.getClass())) {
++                        ClusterConfigurationMonitorService clusterMonitorService =
++                              ((GatewayServices) obj).getService(GatewayServices.CLUSTER_CONFIGURATION_MONITOR_SERVICE);
++                        ClusterConfigurationMonitor monitor =
++                                                 clusterMonitorService.getMonitor(AmbariConfigurationMonitor.getType());
++                        if (monitor != null) {
++                            if (AmbariConfigurationMonitor.class.isAssignableFrom(monitor.getClass())) {
++                                ambariMonitor = (AmbariConfigurationMonitor) monitor;
++                            }
++                        }
++                    }
++                }
++            }
++        } catch (Exception e) {
++            log.errorAccessingConfigurationChangeMonitor(e);
++        }
++        return ambariMonitor;
 +    }
 +
 +
 +    @Override
 +    public String getType() {
 +        return TYPE;
 +    }
 +
 +
 +    @Override
 +    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
-         Map<String, Cluster> clusters = new HashMap<String, Cluster>();
++        Map<String, Cluster> clusters = new HashMap<>();
++
++        init();
 +
 +        String discoveryAddress = config.getAddress();
 +
 +        // Invoke Ambari REST API to discover the available clusters
 +        String clustersDiscoveryURL = String.format("%s" + AMBARI_CLUSTERS_URI, discoveryAddress);
 +
-         JSONObject json = invokeREST(clustersDiscoveryURL, config.getUser(), config.getPasswordAlias());
++        JSONObject json = restClient.invoke(clustersDiscoveryURL, config.getUser(), config.getPasswordAlias());
 +
 +        // Parse the cluster names from the response, and perform the cluster discovery
 +        JSONArray clusterItems = (JSONArray) json.get("items");
 +        for (Object clusterItem : clusterItems) {
 +            String clusterName = (String) ((JSONObject)((JSONObject) clusterItem).get("Clusters")).get("cluster_name");
 +            try {
 +                Cluster c = discover(config, clusterName);
 +                clusters.put(clusterName, c);
 +            } catch (Exception e) {
 +                log.clusterDiscoveryError(clusterName, e);
 +            }
 +        }
 +
 +        return clusters;
 +    }
 +
 +
 +    @Override
 +    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
 +        AmbariCluster cluster = new AmbariCluster(clusterName);
 +
 +        Map<String, String> serviceComponents = new HashMap<>();
 +
++        init();
++
 +        String discoveryAddress = config.getAddress();
 +        String discoveryUser = config.getUser();
 +        String discoveryPwdAlias = config.getPasswordAlias();
 +
 +        Map<String, List<String>> componentHostNames = new HashMap<>();
 +        String hostRolesURL = String.format("%s" + AMBARI_HOSTROLES_URI, discoveryAddress, clusterName);
-         JSONObject hostRolesJSON = invokeREST(hostRolesURL, discoveryUser, discoveryPwdAlias);
++        JSONObject hostRolesJSON = restClient.invoke(hostRolesURL, discoveryUser, discoveryPwdAlias);
 +        if (hostRolesJSON != null) {
 +            // Process the host roles JSON
 +            JSONArray items = (JSONArray) hostRolesJSON.get("items");
 +            for (Object obj : items) {
 +                JSONArray components = (JSONArray) ((JSONObject) obj).get("components");
 +                for (Object component : components) {
 +                    JSONArray hostComponents = (JSONArray) ((JSONObject) component).get("host_components");
 +                    for (Object hostComponent : hostComponents) {
 +                        JSONObject hostRoles = (JSONObject) ((JSONObject) hostComponent).get("HostRoles");
 +                        String serviceName = (String) hostRoles.get("service_name");
 +                        String componentName = (String) hostRoles.get("component_name");
 +
 +                        serviceComponents.put(componentName, serviceName);
 +
 +                        // Assuming public host name is more applicable than host_name
 +                        String hostName = (String) hostRoles.get("public_host_name");
 +                        if (hostName == null) {
 +                            // Some (even slightly) older versions of Ambari/HDP do not return public_host_name,
 +                            // so fall back to host_name in those cases.
 +                            hostName = (String) hostRoles.get("host_name");
 +                        }
 +
 +                        if (hostName != null) {
 +                            log.discoveredServiceHost(serviceName, hostName);
 +                            if (!componentHostNames.containsKey(componentName)) {
-                                 componentHostNames.put(componentName, new ArrayList<String>());
++                                componentHostNames.put(componentName, new ArrayList<>());
 +                            }
 +                            componentHostNames.get(componentName).add(hostName);
 +                        }
 +                    }
 +                }
 +            }
 +        }
 +
++        // Service configurations
 +        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigurations =
-                                                  new HashMap<String, Map<String, AmbariCluster.ServiceConfiguration>>();
-         String serviceConfigsURL = String.format("%s" + AMBARI_SERVICECONFIGS_URI, discoveryAddress, clusterName);
-         JSONObject serviceConfigsJSON = invokeREST(serviceConfigsURL, discoveryUser, discoveryPwdAlias);
-         if (serviceConfigsJSON != null) {
-             // Process the service configurations
-             JSONArray serviceConfigs = (JSONArray) serviceConfigsJSON.get("items");
-             for (Object serviceConfig : serviceConfigs) {
-                 String serviceName = (String) ((JSONObject) serviceConfig).get("service_name");
-                 JSONArray configurations = (JSONArray) ((JSONObject) serviceConfig).get("configurations");
-                 for (Object configuration : configurations) {
-                     String configType = (String) ((JSONObject) configuration).get("type");
-                     String configVersion = String.valueOf(((JSONObject) configuration).get("version"));
- 
-                     Map<String, String> configProps = new HashMap<String, String>();
-                     JSONObject configProperties = (JSONObject) ((JSONObject) configuration).get("properties");
-                     for (String propertyName : configProperties.keySet()) {
-                         configProps.put(propertyName, String.valueOf(((JSONObject) configProperties).get(propertyName)));
-                     }
-                     if (!serviceConfigurations.containsKey(serviceName)) {
-                         serviceConfigurations.put(serviceName, new HashMap<String, AmbariCluster.ServiceConfiguration>());
-                     }
-                     serviceConfigurations.get(serviceName).put(configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
-                     cluster.addServiceConfiguration(serviceName, configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
-                 }
++                                                        ambariClient.getActiveServiceConfigurations(discoveryAddress,
++                                                                                                    clusterName,
++                                                                                                    discoveryUser,
++                                                                                                    discoveryPwdAlias);
++        for (String serviceName : serviceConfigurations.keySet()) {
++            for (Map.Entry<String, AmbariCluster.ServiceConfiguration> serviceConfig : serviceConfigurations.get(serviceName).entrySet()) {
++                cluster.addServiceConfiguration(serviceName, serviceConfig.getKey(), serviceConfig.getValue());
 +            }
 +        }
 +
 +        // Construct the AmbariCluster model
 +        for (String componentName : serviceComponents.keySet()) {
 +            String serviceName = serviceComponents.get(componentName);
 +            List<String> hostNames = componentHostNames.get(componentName);
 +
 +            Map<String, AmbariCluster.ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
 +            String configType = componentServiceConfigs.get(componentName);
 +            if (configType != null) {
 +                AmbariCluster.ServiceConfiguration svcConfig = configs.get(configType);
 +                AmbariComponent c = new AmbariComponent(componentName,
 +                                                        svcConfig.getVersion(),
 +                                                        clusterName,
 +                                                        serviceName,
 +                                                        hostNames,
 +                                                        svcConfig.getProperties());
 +                cluster.addComponent(c);
 +            }
 +        }
 +
-         return cluster;
-     }
- 
- 
-     protected JSONObject invokeREST(String url, String username, String passwordAlias) {
-         JSONObject result = null;
- 
-         CloseableHttpResponse response = null;
-         try {
-             HttpGet request = new HttpGet(url);
- 
-             // If no configured username, then use default username alias
-             String password = null;
-             if (username == null) {
-                 if (aliasService != null) {
-                     try {
-                         char[] defaultUser = aliasService.getPasswordFromAliasForGateway(DEFAULT_USER_ALIAS);
-                         if (defaultUser != null) {
-                             username = new String(defaultUser);
-                         }
-                     } catch (AliasServiceException e) {
-                         log.aliasServiceUserError(DEFAULT_USER_ALIAS, e.getLocalizedMessage());
-                     }
-                 }
- 
-                 // If username is still null
-                 if (username == null) {
-                     log.aliasServiceUserNotFound();
-                     throw new ConfigurationException("No username is configured for Ambari service discovery.");
-                 }
-             }
- 
-             if (aliasService != null) {
-                 // If no password alias is configured, then try the default alias
-                 if (passwordAlias == null) {
-                     passwordAlias = DEFAULT_PWD_ALIAS;
-                 }
- 
-                 try {
-                     char[] pwd = aliasService.getPasswordFromAliasForGateway(passwordAlias);
-                     if (pwd != null) {
-                         password = new String(pwd);
-                     }
- 
-                 } catch (AliasServiceException e) {
-                     log.aliasServicePasswordError(passwordAlias, e.getLocalizedMessage());
-                 }
-             }
- 
-             // If the password could not be determined
-             if (password == null) {
-                 log.aliasServicePasswordNotFound();
-                 throw new ConfigurationException("No password is configured for Ambari service discovery.");
-             }
- 
-             // Add an auth header if credentials are available
-             String encodedCreds =
-                     org.apache.commons.codec.binary.Base64.encodeBase64String((username + ":" + password).getBytes());
-             request.addHeader(new BasicHeader("Authorization", "Basic " + encodedCreds));
- 
-             response = httpClient.execute(request);
- 
-             if (HttpStatus.SC_OK == response.getStatusLine().getStatusCode()) {
-                 HttpEntity entity = response.getEntity();
-                 if (entity != null) {
-                     result = (JSONObject) JSONValue.parse((EntityUtils.toString(entity)));
-                     log.debugJSON(result.toJSONString());
-                 } else {
-                     log.noJSON(url);
-                 }
-             } else {
-                 log.unexpectedRestResponseStatusCode(url, response.getStatusLine().getStatusCode());
-             }
- 
-         } catch (IOException e) {
-             log.restInvocationError(url, e);
-         } finally {
-             if(response != null) {
-                 try {
-                     response.close();
-                 } catch (IOException e) {
-                     // Ignore
-                 }
-             }
++        if (configChangeMonitor != null) {
++            // Notify the cluster config monitor about these cluster configuration details
++            configChangeMonitor.addClusterConfigVersions(cluster, config);
 +        }
-         return result;
-     }
 +
++        return cluster;
++    }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
----------------------------------------------------------------------
diff --cc gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
index 2bdc94b,0000000..12e6078
mode 100644,000000..100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
@@@ -1,121 -1,0 +1,148 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one or more
 + * contributor license agreements. See the NOTICE file distributed with this
 + * work for additional information regarding copyright ownership. The ASF
 + * licenses this file to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance with the License.
 + * You may obtain a copy of the License at
 + *
 + * http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 + * License for the specific language governing permissions and limitations under
 + * the License.
 + */
 +package org.apache.knox.gateway.topology.discovery.ambari;
 +
 +import org.apache.knox.gateway.i18n.messages.Message;
 +import org.apache.knox.gateway.i18n.messages.MessageLevel;
 +import org.apache.knox.gateway.i18n.messages.Messages;
 +import org.apache.knox.gateway.i18n.messages.StackTrace;
 +
 +@Messages(logger="org.apache.knox.gateway.topology.discovery.ambari")
 +public interface AmbariServiceDiscoveryMessages {
 +
 +    @Message(level = MessageLevel.ERROR,
-             text = "Failed to load service discovery configuration: {1}")
-     void failedToLoadServiceDiscoveryConfiguration(@StackTrace(level = MessageLevel.ERROR) Exception e);
++             text = "Failed to persist data for cluster configuration monitor {0} {1}: {2}")
++    void failedToPersistClusterMonitorData(final String monitor,
++                                           final String filename,
++                                           @StackTrace(level = MessageLevel.DEBUG) Exception e);
 +
 +    @Message(level = MessageLevel.ERROR,
-              text = "Failed to load service discovery configuration {0}: {1}")
-     void failedToLoadServiceDiscoveryConfiguration(final String configuration,
-                                @StackTrace(level = MessageLevel.ERROR) Exception e);
++             text = "Failed to load persisted service discovery configuration for cluster monitor {0} : {1}")
++    void failedToLoadClusterMonitorServiceDiscoveryConfig(final String monitor,
++                                                          @StackTrace(level = MessageLevel.DEBUG) Exception e);
++
++    @Message(level = MessageLevel.ERROR,
++            text = "Failed to load persisted cluster configuration version data for cluster monitor {0} : {1}")
++    void failedToLoadClusterMonitorConfigVersions(final String monitor,
++                                                  @StackTrace(level = MessageLevel.DEBUG) Exception e);
++
++    @Message(level = MessageLevel.ERROR,
++             text = "Unable to access the Ambari Configuration Change Monitor: {0}")
++    void errorAccessingConfigurationChangeMonitor(@StackTrace(level = MessageLevel.DEBUG) Exception e);
++
++    @Message(level = MessageLevel.ERROR,
++             text = "Failed to load service discovery URL definition configuration: {1}")
++    void failedToLoadServiceDiscoveryURLDefConfiguration(@StackTrace(level = MessageLevel.DEBUG) Exception e);
++
++    @Message(level = MessageLevel.ERROR,
++             text = "Failed to load service discovery URL definition configuration {0}: {1}")
++    void failedToLoadServiceDiscoveryURLDefConfiguration(final String configuration,
++                                                         @StackTrace(level = MessageLevel.ERROR) Exception e);
 +
 +    @Message(level = MessageLevel.ERROR,
 +             text = "Encountered an error during cluster {0} discovery: {1}")
 +    void clusterDiscoveryError(final String clusterName,
-                                @StackTrace(level = MessageLevel.ERROR) Exception e);
++                               @StackTrace(level = MessageLevel.DEBUG) Exception e);
 +
 +
 +    @Message(level = MessageLevel.DEBUG,
 +             text = "REST invocation {0} failed: {1}")
 +    void restInvocationError(final String url,
-                              @StackTrace(level = MessageLevel.ERROR) Exception e);
++                             @StackTrace(level = MessageLevel.DEBUG) Exception e);
 +
 +
 +    @Message(level = MessageLevel.ERROR,
 +             text = "Encountered an error attempting to determine the user for alias {0} : {1}")
 +    void aliasServiceUserError(final String alias, final String error);
 +
 +
 +    @Message(level = MessageLevel.ERROR,
 +             text = "Encountered an error attempting to determine the password for alias {0} : {1}")
 +    void aliasServicePasswordError(final String alias, final String error);
 +
 +
 +    @Message(level = MessageLevel.ERROR,
 +             text = "No user configured for Ambari service discovery.")
 +    void aliasServiceUserNotFound();
 +
 +
 +    @Message(level = MessageLevel.ERROR,
 +             text = "No password configured for Ambari service discovery.")
 +    void aliasServicePasswordNotFound();
 +
 +
 +    @Message(level = MessageLevel.ERROR,
 +             text = "Unexpected REST invocation response code for {0} : {1}")
 +    void unexpectedRestResponseStatusCode(final String url, int responseStatusCode);
 +
 +
 +    @Message(level = MessageLevel.ERROR,
 +             text = "REST invocation {0} yielded a response without any JSON.")
 +    void noJSON(final String url);
 +
 +
-     @Message(level = MessageLevel.DEBUG,
++    @Message(level = MessageLevel.TRACE,
 +             text = "REST invocation result: {0}")
 +    void debugJSON(final String json);
 +
++
 +    @Message(level = MessageLevel.DEBUG,
-             text = "Loaded component configuration mappings: {0}")
++             text = "Loaded component configuration mappings: {0}")
 +    void loadedComponentConfigMappings(final String mappings);
 +
++
 +    @Message(level = MessageLevel.ERROR,
 +             text = "Failed to load component configuration property mappings {0}: {1}")
 +    void failedToLoadComponentConfigMappings(final String mappings,
-                                              @StackTrace(level = MessageLevel.ERROR) Exception e);
++                                             @StackTrace(level = MessageLevel.DEBUG) Exception e);
 +
-     @Message(level = MessageLevel.DEBUG,
++
++    @Message(level = MessageLevel.TRACE,
 +             text = "Discovered: Service: {0}, Host: {1}")
 +    void discoveredServiceHost(final String serviceName, final String hostName);
 +
 +
 +    @Message(level = MessageLevel.DEBUG,
 +             text = "Querying the cluster for the {0} configuration ({1}) property: {2}")
 +    void lookingUpServiceConfigProperty(final String serviceName, final String configType, final String propertyName);
 +
 +
 +    @Message(level = MessageLevel.DEBUG,
 +             text = "Querying the cluster for the {0} component configuration property: {1}")
 +    void lookingUpComponentConfigProperty(final String componentName, final String propertyName);
 +
 +
 +    @Message(level = MessageLevel.DEBUG,
 +             text = "Querying the cluster for the {0} component's hosts")
 +    void lookingUpComponentHosts(final String componentName);
 +
 +
 +    @Message(level = MessageLevel.DEBUG,
 +            text = "Handling a derived service URL mapping property for the {0} service: type = {1}, name = {2}")
 +    void handlingDerivedProperty(final String serviceName, final String propertyType, final String propertyName);
 +
 +
 +    @Message(level = MessageLevel.DEBUG,
-             text = "Determined the service URL mapping property {0} value: {1}")
++             text = "Determined the service URL mapping property {0} value: {1}")
 +    void determinedPropertyValue(final String propertyName, final String propertyValue);
 +
 +
++    @Message(level = MessageLevel.INFO,
++             text = "Started Ambari cluster configuration monitor (checking every {0} seconds)")
++    void startedAmbariConfigMonitor(final long pollingInterval);
++
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java
----------------------------------------------------------------------
diff --cc gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java
index ed07873,0000000..47b20e9
mode 100644,000000..100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java
@@@ -1,324 -1,0 +1,324 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one or more
 + * contributor license agreements. See the NOTICE file distributed with this
 + * work for additional information regarding copyright ownership. The ASF
 + * licenses this file to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance with the License.
 + * You may obtain a copy of the License at
 + * <p>
 + * http://www.apache.org/licenses/LICENSE-2.0
 + * <p>
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 + * License for the specific language governing permissions and limitations under
 + * the License.
 + */
 +package org.apache.knox.gateway.topology.discovery.ambari;
 +
 +import org.apache.knox.gateway.i18n.messages.MessagesFactory;
 +import org.apache.knox.gateway.util.XmlUtils;
 +import org.w3c.dom.Document;
 +import org.w3c.dom.NamedNodeMap;
 +import org.w3c.dom.Node;
 +import org.w3c.dom.NodeList;
 +
 +import javax.xml.xpath.XPath;
 +import javax.xml.xpath.XPathConstants;
 +import javax.xml.xpath.XPathExpression;
 +import javax.xml.xpath.XPathExpressionException;
 +import javax.xml.xpath.XPathFactory;
 +import java.io.File;
 +import java.io.FileInputStream;
 +import java.io.IOException;
 +import java.io.InputStream;
 +import java.util.ArrayList;
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.regex.Matcher;
 +import java.util.regex.Pattern;
 +
 +/**
 + * Service URL pattern mapping configuration model.
 + */
 +class ServiceURLPropertyConfig {
 +
 +    private static final AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
 +
 +    private static final String ATTR_NAME = "name";
 +
 +    private static XPathExpression SERVICE_URL_PATTERN_MAPPINGS;
 +    private static XPathExpression URL_PATTERN;
 +    private static XPathExpression PROPERTIES;
 +    static {
 +        XPath xpath = XPathFactory.newInstance().newXPath();
 +        try {
 +            SERVICE_URL_PATTERN_MAPPINGS = xpath.compile("/service-discovery-url-mappings/service");
 +            URL_PATTERN                  = xpath.compile("url-pattern/text()");
 +            PROPERTIES                   = xpath.compile("properties/property");
 +        } catch (XPathExpressionException e) {
 +            e.printStackTrace();
 +        }
 +    }
 +
 +    private static final String DEFAULT_SERVICE_URL_MAPPINGS = "ambari-service-discovery-url-mappings.xml";
 +
 +    private Map<String, URLPattern> urlPatterns = new HashMap<>();
 +
 +    private Map<String, Map<String, Property>> properties = new HashMap<>();
 +
 +
 +    /**
 +     * The default service URL pattern to property mapping configuration will be used.
 +     */
 +    ServiceURLPropertyConfig() {
 +        this(ServiceURLPropertyConfig.class.getClassLoader().getResourceAsStream(DEFAULT_SERVICE_URL_MAPPINGS));
 +    }
 +
 +    /**
 +     * The default service URL pattern to property mapping configuration will be used.
 +     */
 +    ServiceURLPropertyConfig(File mappingConfigurationFile) throws Exception {
 +        this(new FileInputStream(mappingConfigurationFile));
 +    }
 +
 +    /**
 +     *
 +     * @param source An InputStream for the XML content
 +     */
 +    ServiceURLPropertyConfig(InputStream source) {
 +        // Parse the XML, and build the model
 +        try {
 +            Document doc = XmlUtils.readXml(source);
 +
 +            NodeList serviceNodes =
 +                    (NodeList) SERVICE_URL_PATTERN_MAPPINGS.evaluate(doc, XPathConstants.NODESET);
 +            for (int i=0; i < serviceNodes.getLength(); i++) {
 +                Node serviceNode = serviceNodes.item(i);
 +                String serviceName = serviceNode.getAttributes().getNamedItem(ATTR_NAME).getNodeValue();
 +                properties.put(serviceName, new HashMap<String, Property>());
 +
 +                Node urlPatternNode = (Node) URL_PATTERN.evaluate(serviceNode, XPathConstants.NODE);
 +                if (urlPatternNode != null) {
 +                    urlPatterns.put(serviceName, new URLPattern(urlPatternNode.getNodeValue()));
 +                }
 +
 +                NodeList propertiesNode = (NodeList) PROPERTIES.evaluate(serviceNode, XPathConstants.NODESET);
 +                if (propertiesNode != null) {
 +                    processProperties(serviceName, propertiesNode);
 +                }
 +            }
 +        } catch (Exception e) {
-             log.failedToLoadServiceDiscoveryConfiguration(e);
++            log.failedToLoadServiceDiscoveryURLDefConfiguration(e);
 +        } finally {
 +            try {
 +                source.close();
 +            } catch (IOException e) {
 +                // Ignore
 +            }
 +        }
 +    }
 +
 +    private void processProperties(String serviceName, NodeList propertyNodes) {
 +        for (int i = 0; i < propertyNodes.getLength(); i++) {
 +            Property p = Property.createProperty(serviceName, propertyNodes.item(i));
 +            properties.get(serviceName).put(p.getName(), p);
 +        }
 +    }
 +
 +    URLPattern getURLPattern(String service) {
 +        return urlPatterns.get(service);
 +    }
 +
 +    Property getConfigProperty(String service, String property) {
 +        return properties.get(service).get(property);
 +    }
 +
 +    static class URLPattern {
 +        String pattern;
 +        List<String> placeholders = new ArrayList<>();
 +
 +        URLPattern(String pattern) {
 +            this.pattern = pattern;
 +
 +            final Pattern regex = Pattern.compile("\\{(.*?)}", Pattern.DOTALL);
 +            final Matcher matcher = regex.matcher(pattern);
 +            while( matcher.find() ){
 +                placeholders.add(matcher.group(1));
 +            }
 +        }
 +
 +        String get() {return pattern; }
 +        List<String> getPlaceholders() {
 +            return placeholders;
 +        }
 +    }
 +
 +    static class Property {
 +        static final String TYPE_SERVICE   = "SERVICE";
 +        static final String TYPE_COMPONENT = "COMPONENT";
 +        static final String TYPE_DERIVED   = "DERIVED";
 +
 +        static final String PROP_COMP_HOSTNAME = "component.host.name";
 +
 +        static final String ATTR_NAME     = "name";
 +        static final String ATTR_PROPERTY = "property";
 +        static final String ATTR_VALUE    = "value";
 +
 +        static XPathExpression HOSTNAME;
 +        static XPathExpression SERVICE_CONFIG;
 +        static XPathExpression COMPONENT;
 +        static XPathExpression CONFIG_PROPERTY;
 +        static XPathExpression IF;
 +        static XPathExpression THEN;
 +        static XPathExpression ELSE;
 +        static XPathExpression TEXT;
 +        static {
 +            XPath xpath = XPathFactory.newInstance().newXPath();
 +            try {
 +                HOSTNAME        = xpath.compile("hostname");
 +                SERVICE_CONFIG  = xpath.compile("service-config");
 +                COMPONENT       = xpath.compile("component");
 +                CONFIG_PROPERTY = xpath.compile("config-property");
 +                IF              = xpath.compile("if");
 +                THEN            = xpath.compile("then");
 +                ELSE            = xpath.compile("else");
 +                TEXT            = xpath.compile("text()");
 +            } catch (XPathExpressionException e) {
 +                e.printStackTrace();
 +            }
 +        }
 +
 +
 +        String type;
 +        String name;
 +        String component;
 +        String service;
 +        String serviceConfig;
 +        String value;
 +        ConditionalValueHandler conditionHandler = null;
 +
 +        private Property(String type,
 +                         String propertyName,
 +                         String component,
 +                         String service,
 +                         String configType,
 +                         String value,
 +                         ConditionalValueHandler pch) {
 +            this.type = type;
 +            this.name = propertyName;
 +            this.service = service;
 +            this.component = component;
 +            this.serviceConfig = configType;
 +            this.value = value;
 +            conditionHandler = pch;
 +        }
 +
 +        static Property createProperty(String serviceName, Node propertyNode) {
 +            String propertyName = propertyNode.getAttributes().getNamedItem(ATTR_NAME).getNodeValue();
 +            String propertyType = null;
 +            String serviceType = null;
 +            String configType = null;
 +            String componentType = null;
 +            String value = null;
 +            ConditionalValueHandler pch = null;
 +
 +            try {
 +                Node hostNameNode = (Node) HOSTNAME.evaluate(propertyNode, XPathConstants.NODE);
 +                if (hostNameNode != null) {
 +                    value = PROP_COMP_HOSTNAME;
 +                }
 +
 +                // Check for a service-config node
 +                Node scNode = (Node) SERVICE_CONFIG.evaluate(propertyNode, XPathConstants.NODE);
 +                if (scNode != null) {
 +                    // Service config property
 +                    propertyType = Property.TYPE_SERVICE;
 +                    serviceType = scNode.getAttributes().getNamedItem(ATTR_NAME).getNodeValue();
 +                    Node scTextNode = (Node) TEXT.evaluate(scNode, XPathConstants.NODE);
 +                    configType = scTextNode.getNodeValue();
 +                } else { // If not service-config node, check for a component config node
 +                    Node cNode = (Node) COMPONENT.evaluate(propertyNode, XPathConstants.NODE);
 +                    if (cNode != null) {
 +                        // Component config property
 +                        propertyType = Property.TYPE_COMPONENT;
 +                        componentType = cNode.getFirstChild().getNodeValue();
 +                        Node cTextNode = (Node) TEXT.evaluate(cNode, XPathConstants.NODE);
 +                        configType = cTextNode.getNodeValue();
 +                        componentType = cTextNode.getNodeValue();
 +                    }
 +                }
 +
 +                // Check for a config property node
 +                Node cpNode = (Node) CONFIG_PROPERTY.evaluate(propertyNode, XPathConstants.NODE);
 +                if (cpNode != null) {
 +                    // Check for a condition element
 +                    Node ifNode = (Node) IF.evaluate(cpNode, XPathConstants.NODE);
 +                    if (ifNode != null) {
 +                        propertyType = TYPE_DERIVED;
 +                        pch = getConditionHandler(serviceName, ifNode);
 +                    } else {
 +                        Node cpTextNode = (Node) TEXT.evaluate(cpNode, XPathConstants.NODE);
 +                        value = cpTextNode.getNodeValue();
 +                    }
 +                }
 +            } catch (Exception e) {
 +                e.printStackTrace();
 +            }
 +
 +            // Create and return the property representation
 +            return new Property(propertyType, propertyName, componentType, serviceType, configType, value, pch);
 +        }
 +
 +        private static ConditionalValueHandler getConditionHandler(String serviceName, Node ifNode) throws Exception {
 +            ConditionalValueHandler result = null;
 +
 +            if (ifNode != null) {
 +                NamedNodeMap attrs = ifNode.getAttributes();
 +                String comparisonPropName = attrs.getNamedItem(ATTR_PROPERTY).getNodeValue();
 +                String comparisonValue = attrs.getNamedItem(ATTR_VALUE).getNodeValue();
 +
 +                ConditionalValueHandler affirmativeResult = null;
 +                Node thenNode = (Node) THEN.evaluate(ifNode, XPathConstants.NODE);
 +                if (thenNode != null) {
 +                    Node subIfNode = (Node) IF.evaluate(thenNode, XPathConstants.NODE);
 +                    if (subIfNode != null) {
 +                        affirmativeResult = getConditionHandler(serviceName, subIfNode);
 +                    } else {
 +                        affirmativeResult = new SimpleValueHandler(thenNode.getFirstChild().getNodeValue());
 +                    }
 +                }
 +
 +                ConditionalValueHandler negativeResult = null;
 +                Node elseNode = (Node) ELSE.evaluate(ifNode, XPathConstants.NODE);
 +                if (elseNode != null) {
 +                    Node subIfNode = (Node) IF.evaluate(elseNode, XPathConstants.NODE);
 +                    if (subIfNode != null) {
 +                        negativeResult = getConditionHandler(serviceName, subIfNode);
 +                    } else {
 +                        negativeResult = new SimpleValueHandler(elseNode.getFirstChild().getNodeValue());
 +                    }
 +                }
 +
 +                result = new PropertyEqualsHandler(serviceName,
 +                        comparisonPropName,
 +                        comparisonValue,
 +                        affirmativeResult,
 +                        negativeResult);
 +            }
 +
 +            return result;
 +        }
 +
 +        String getType() { return type; }
 +        String getName() { return name; }
 +        String getComponent() { return component; }
 +        String getService() { return service; }
 +        String getServiceConfig() { return serviceConfig; }
 +        String getValue() {
 +            return value;
 +        }
 +        ConditionalValueHandler getConditionHandler() { return conditionHandler; }
 +    }
 +}


[02/49] knox git commit: KNOX-1118 - Remove POC Service Definition for AmbariUI

Posted by mo...@apache.org.
KNOX-1118 - Remove POC Service Definition for AmbariUI


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/24d51ad9
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/24d51ad9
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/24d51ad9

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 24d51ad9e36de2ad84d45d8ed19af41441b16765
Parents: fa6acbe
Author: Larry McCay <lm...@hortonworks.com>
Authored: Thu Nov 16 15:34:34 2017 -0500
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Thu Nov 16 16:33:14 2017 -0500

----------------------------------------------------------------------
 CHANGES | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/24d51ad9/CHANGES
----------------------------------------------------------------------
diff --git a/CHANGES b/CHANGES
index 2d266d6..3c84f3d 100644
--- a/CHANGES
+++ b/CHANGES
@@ -82,6 +82,7 @@ Release Notes - Apache Knox - Version 0.14.0
    * [KNOX-1068] - Add support for HTTP Head request
    * [KNOX-1079] - Regression: proxy for Atlas fails with j_spring_security_check during login (Madhan Neethiraj via lmccay)
    * [KNOX-1022] - Configuring knox token ttl to higher value generates an access token which is not valid
+   * [KNOX-1118] - Remove POC Service Definition for AmbariUI
 
 ------------------------------------------------------------------------------
 Release Notes - Apache Knox - Version 0.13.0


[34/49] knox git commit: Merge branch 'master' into KNOX-998-Package_Restructuring

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-spi/src/main/java/org/apache/knox/gateway/config/GatewayConfig.java
----------------------------------------------------------------------
diff --cc gateway-spi/src/main/java/org/apache/knox/gateway/config/GatewayConfig.java
index 0ed7556,0000000..882bc71
mode 100644,000000..100644
--- a/gateway-spi/src/main/java/org/apache/knox/gateway/config/GatewayConfig.java
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/config/GatewayConfig.java
@@@ -1,302 -1,0 +1,352 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.config;
 +
 +import java.net.InetSocketAddress;
 +import java.net.UnknownHostException;
 +import java.util.List;
 +import java.util.Map;
 +
 +public interface GatewayConfig {
 +
 +  // Used as the basis for any home directory that is not specified.
 +  static final String GATEWAY_HOME_VAR = "GATEWAY_HOME";
 +
 +  // Variable name for the location of configuration files edited by users
 +  static final String GATEWAY_CONF_HOME_VAR = "GATEWAY_CONF_HOME";
 +
 +  // Variable name for the location of data files generated by the gateway at runtime.
 +  static final String GATEWAY_DATA_HOME_VAR = "GATEWAY_DATA_HOME";
 +
 +  public static final String GATEWAY_CONFIG_ATTRIBUTE = "org.apache.knox.gateway.config";
 +  public static final String HADOOP_KERBEROS_SECURED = "gateway.hadoop.kerberos.secured";
 +  public static final String KRB5_CONFIG = "java.security.krb5.conf";
 +  public static final String KRB5_DEBUG = "sun.security.krb5.debug";
 +  public static final String KRB5_LOGIN_CONFIG = "java.security.auth.login.config";
 +  public static final String KRB5_USE_SUBJECT_CREDS_ONLY = "javax.security.auth.useSubjectCredsOnly";
 +  public static final String SIGNING_KEYSTORE_NAME = "gateway.signing.keystore.name";
 +  public static final String SIGNING_KEY_ALIAS = "gateway.signing.key.alias";
 +
++  String REMOTE_CONFIG_REGISTRY_TYPE = "type";
++  String REMOTE_CONFIG_REGISTRY_ADDRESS = "address";
++  String REMOTE_CONFIG_REGISTRY_NAMESPACE = "namespace";
++  String REMOTE_CONFIG_REGISTRY_AUTH_TYPE = "authType";
++  String REMOTE_CONFIG_REGISTRY_PRINCIPAL = "principal";
++  String REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS = "credentialAlias";
++  String REMOTE_CONFIG_REGISTRY_KEYTAB = "keytab";
++  String REMOTE_CONFIG_REGISTRY_USE_KEYTAB = "useKeytab";
++  String REMOTE_CONFIG_REGISTRY_USE_TICKET_CACHE = "useTicketCache";
++
 +  /**
 +   * The location of the gateway configuration.
 +   * Subdirectories will be: topologies
 +   * @return The location of the gateway configuration.
 +   */
 +  String getGatewayConfDir();
 +
 +  /**
 +   * The location of the gateway runtime generated data.
 +   * Subdirectories will be security, deployments
 +   * @return The location of the gateway runtime generated data.
 +   */
 +  String getGatewayDataDir();
 +
 +  /**
 +   * The location of the gateway services definition's root directory
 +   * @return The location of the gateway services top level directory.
 +   */
 +  String getGatewayServicesDir();
 +
 +  /**
 +   * The location of the gateway applications's root directory
 +   * @return The location of the gateway applications top level directory.
 +   */
 +  String getGatewayApplicationsDir();
 +
 +  String getHadoopConfDir();
 +
 +  String getGatewayHost();
 +
 +  int getGatewayPort();
 +
 +  String getGatewayPath();
 +
++  String getGatewayProvidersConfigDir();
++
++  String getGatewayDescriptorsDir();
++
 +  String getGatewayTopologyDir();
 +
 +  String getGatewaySecurityDir();
 +
 +  String getGatewayDeploymentDir();
 +
 +  InetSocketAddress getGatewayAddress() throws UnknownHostException;
 +
 +  boolean isSSLEnabled();
 +  
 +  List<String> getExcludedSSLProtocols();
 +
 +  List<String> getIncludedSSLCiphers();
 +
 +  List<String> getExcludedSSLCiphers();
 +
 +  boolean isHadoopKerberosSecured();
 +
 +  String getKerberosConfig();
 +
 +  boolean isKerberosDebugEnabled();
 +
 +  String getKerberosLoginConfig();
 +
 +  String getDefaultTopologyName();
 +
 +  String getDefaultAppRedirectPath();
 +
 +  String getFrontendUrl();
 +
 +  boolean isClientAuthNeeded();
 +
 +  boolean isClientAuthWanted();
 +
 +  String getTruststorePath();
 +
 +  boolean getTrustAllCerts();
 +
 +  String getKeystoreType();
 +
 +  String getTruststoreType();
 +
 +  boolean isXForwardedEnabled();
 +
 +  String getEphemeralDHKeySize();
 +
 +  int getHttpClientMaxConnections();
 +
 +  int getHttpClientConnectionTimeout();
 +
 +  int getHttpClientSocketTimeout();
 +
 +  int getThreadPoolMax();
 +
 +  int getHttpServerRequestBuffer();
 +
 +  int getHttpServerRequestHeaderBuffer();
 +
 +  int getHttpServerResponseBuffer();
 +
 +  int getHttpServerResponseHeaderBuffer();
 +
 +  int getGatewayDeploymentsBackupVersionLimit();
 +
 +  long getGatewayDeploymentsBackupAgeLimit();
 +
 +  long getGatewayIdleTimeout();
 +
 +  String getSigningKeystoreName();
 +
 +  String getSigningKeyAlias();
 +
 +  List<String> getGlobalRulesServices();
 +
 +  /**
 +   * Returns true if websocket feature enabled else false.
 +   * Default is false.
 +   * @since 0.10
 +   * @return
 +   */
 +  boolean isWebsocketEnabled();
 +
 +  /**
 +   * Websocket connection max text message size.
 +   * @since 0.10
 +   * @return
 +   */
 +  int getWebsocketMaxTextMessageSize();
 +
 +  /**
 +   * Websocket connection max binary message size.
 +   * @since 0.10
 +   * @return
 +   */
 +  int getWebsocketMaxBinaryMessageSize();
 +
 +  /**
 +   * Websocket connection max text message buffer size.
 +   * @since 0.10
 +   * @return
 +   */
 +  int getWebsocketMaxTextMessageBufferSize();
 +
 +  /**
 +   * Websocket connection max binary message buffer size.
 +   * @since 0.10
 +   * @return
 +   */
 +  int getWebsocketMaxBinaryMessageBufferSize();
 +
 +  /**
 +   * Websocket connection input buffer size.
 +   * @since 0.10
 +   * @return
 +   */
 +  int getWebsocketInputBufferSize();
 +
 +  /**
 +   * Websocket connection async write timeout.
 +   * @since 0.10
 +   * @return
 +   */
 +  int getWebsocketAsyncWriteTimeout();
 +
 +  /**
 +   * Websocket connection idle timeout.
 +   * @since 0.10
 +   * @return
 +   */
 +  int getWebsocketIdleTimeout();
 +
 +  boolean isMetricsEnabled();
 +
 +  boolean isJmxMetricsReportingEnabled();
 +
 +  boolean isGraphiteMetricsReportingEnabled();
 +
 +  String getGraphiteHost();
 +
 +  int getGraphitePort();
 +
 +  int getGraphiteReportingFrequency();
 +
 +  /**
 +   * List of MIME Type to be compressed.
 +   * @since 0.12
 +   */
 +  List<String> getMimeTypesToCompress();
 +
 +  /**
 +   * Enable cookie scoping to gateway path
 +   *
 +   * @since 0.13
 +   */
 +  boolean isCookieScopingToPathEnabled();
 +
 +  /**
 +   * Configured name of the HTTP Header that is expected
 +   * to be set by a proxy in front of the gateway.
 +   * @return
 +   */
 +  String getHeaderNameForRemoteAddress();
 +
 +  /**
 +   * Configured Algorithm name to be used by the CryptoService
 +   * and MasterService implementations
 +   * @return
 +   */
 +  String getAlgorithm();
 +
 +  /**
 +   * Configured Algorithm name to be used by the CryptoService
 +   * for password based encryption
 +   * @return
 +   */
 +  String getPBEAlgorithm();
 +
 +  /**
 +   * Configured Transformation name to be used by the CryptoService
 +   * and MasterService implementations
 +   * @return
 +   */
 +  String getTransformation();
 +
 +  /**
 +   * Configured SaltSize to be used by the CryptoService
 +   * and MasterService implementations
 +   * @return
 +   */
 +  String getSaltSize();
 +
 +  /**
 +   * Configured IterationCount to be used by the CryptoService
 +   * and MasterService implementations
 +   * @return
 +   */
 +  String getIterationCount();
 +
 +  /**
 +   * Configured KeyLength to be used by the CryptoService
 +   * and MasterService implementations
 +   * @return
 +   */
 +  String getKeyLength();
 +
 +  /**
 +   * Map of Topology names and their ports.
 +   *
 +   * @return
 +   */
 +  Map<String, Integer> getGatewayPortMappings();
 +
 +  /**
 +   * Is the Port Mapping feature on
 +   * @return
 +   */
 +  boolean isGatewayPortMappingEnabled();
 +
 +  /**
 +   * Is the Server header suppressed
 +   * @return
 +   */
 +  boolean isGatewayServerHeaderEnabled();
++  
++  /**
++   *
++   * @param type The type of cluster configuration monitor for which the interval should be returned.
++   *
++   * @return The polling interval configuration value, or -1 if it has not been configured.
++   */
++  int getClusterMonitorPollingInterval(String type);
++  
++  /**
++   *
++   * @param type The type of cluster configuration monitor for which the interval should be returned.
++   *
++   * @return The enabled status of the specified type of cluster configuration monitor.
++   */
++  boolean isClusterMonitorEnabled(String type);
++  
++  /**
++   * @return The list of the names of any remote registry configurations defined herein.
++   */
++  List<String> getRemoteRegistryConfigurationNames();
++
++  /**
++   *
++   * @param name The name of the remote registry configuration
++   *
++   * @return The configuration associated with the specified name.
++   */
++  String getRemoteRegistryConfiguration(String name);
++
++  /**
++   *
++   * @return The name of a remote configuration registry client
++   */
++  String getRemoteConfigurationMonitorClientName();
++
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-spi/src/main/java/org/apache/knox/gateway/services/GatewayServices.java
----------------------------------------------------------------------
diff --cc gateway-spi/src/main/java/org/apache/knox/gateway/services/GatewayServices.java
index 4a30800,0000000..8912c98
mode 100644,000000..100644
--- a/gateway-spi/src/main/java/org/apache/knox/gateway/services/GatewayServices.java
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/services/GatewayServices.java
@@@ -1,46 -1,0 +1,50 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.services;
 +
 +import java.util.Collection;
 +
 +import org.apache.knox.gateway.deploy.ProviderDeploymentContributor;
 +
 +public interface GatewayServices extends Service,
 +    ProviderDeploymentContributor {
 +
 +  public static final String GATEWAY_CLUSTER_ATTRIBUTE = "org.apache.knox.gateway.gateway.cluster";
 +  public static final String GATEWAY_SERVICES_ATTRIBUTE = "org.apache.knox.gateway.gateway.services";
 +
 +  public static final String SSL_SERVICE = "SSLService";
 +  public static final String CRYPTO_SERVICE = "CryptoService";
 +  public static final String ALIAS_SERVICE = "AliasService";
 +  public static final String KEYSTORE_SERVICE = "KeystoreService";
 +  public static final String TOKEN_SERVICE = "TokenService";
 +  public static final String SERVICE_REGISTRY_SERVICE = "ServiceRegistryService";
 +  public static final String HOST_MAPPING_SERVICE = "HostMappingService";
 +  public static final String SERVER_INFO_SERVICE = "ServerInfoService";
 +  public static final String TOPOLOGY_SERVICE = "TopologyService";
 +  public static final String SERVICE_DEFINITION_REGISTRY = "ServiceDefinitionRegistry";
 +  public static final String METRICS_SERVICE = "MetricsService";
 +
++  String REMOTE_REGISTRY_CLIENT_SERVICE = "RemoteConfigRegistryClientService";
++
++  String CLUSTER_CONFIGURATION_MONITOR_SERVICE = "ClusterConfigurationMonitorService";
++
 +  public abstract Collection<String> getServiceNames();
 +
 +  public abstract <T> T getService( String serviceName );
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-test-release-utils/src/main/java/org/apache/knox/gateway/GatewayTestConfig.java
----------------------------------------------------------------------
diff --cc gateway-test-release-utils/src/main/java/org/apache/knox/gateway/GatewayTestConfig.java
index 8abf5aa,0000000..79a9292
mode 100644,000000..100644
--- a/gateway-test-release-utils/src/main/java/org/apache/knox/gateway/GatewayTestConfig.java
+++ b/gateway-test-release-utils/src/main/java/org/apache/knox/gateway/GatewayTestConfig.java
@@@ -1,617 -1,0 +1,653 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
 +import org.apache.commons.lang.StringUtils;
 +import org.apache.hadoop.conf.Configuration;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +
 +import java.io.File;
 +import java.net.InetSocketAddress;
 +import java.net.UnknownHostException;
 +import java.util.ArrayList;
++import java.util.Collections;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.concurrent.ConcurrentHashMap;
 +
 +public class GatewayTestConfig extends Configuration implements GatewayConfig {
 +
 +  /* Websocket defaults */
 +  public static final boolean DEFAULT_WEBSOCKET_FEATURE_ENABLED = false;
 +  public static final int DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE = Integer.MAX_VALUE;;
 +  public static final int DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_SIZE = Integer.MAX_VALUE;;
 +  public static final int DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE = 32768;
 +  public static final int DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE = 32768;
 +  public static final int DEFAULT_WEBSOCKET_INPUT_BUFFER_SIZE = 4096;
 +  public static final int DEFAULT_WEBSOCKET_ASYNC_WRITE_TIMEOUT = 60000;
 +  public static final int DEFAULT_WEBSOCKET_IDLE_TIMEOUT = 300000;
 +
 +  private String gatewayHomeDir = "gateway-home";
 +  private String hadoopConfDir = "hadoop";
 +  private String gatewayHost = "localhost";
 +  private int gatewayPort = 0;
 +  private String gatewayPath = "gateway";
 +  private boolean hadoopKerberosSecured = false;
 +  private String kerberosConfig = "/etc/knox/conf/krb5.conf";
 +  private boolean kerberosDebugEnabled = false;
 +  private String kerberosLoginConfig = "/etc/knox/conf/krb5JAASLogin.conf";
 +  private String frontendUrl = null;
 +  private boolean xForwardedEnabled = true;
 +  private String gatewayApplicationsDir = null;
 +  private String gatewayServicesDir;
 +  private String defaultTopologyName = "default";
 +  private List<String> includedSSLCiphers = null;
 +  private List<String> excludedSSLCiphers = null;
 +  private boolean sslEnabled = false;
 +  private String truststoreType = "jks";
 +  private String keystoreType = "jks";
 +  private boolean isTopologyPortMappingEnabled = true;
 +  private ConcurrentHashMap<String, Integer> topologyPortMapping = new ConcurrentHashMap<>();
 +  private int backupVersionLimit = -1;
 +  private long backupAgeLimit = -1;
 +
 +  public void setGatewayHomeDir( String gatewayHomeDir ) {
 +    this.gatewayHomeDir = gatewayHomeDir;
 +  }
 +
 +  public String getGatewayHomeDir() {
 +    return this.gatewayHomeDir;
 +  }
 +
 +  @Override
 +  public String getGatewayConfDir() {
 +    return gatewayHomeDir;
 +  }
 +
 +  @Override
 +  public String getGatewayDataDir() {
 +    return gatewayHomeDir;
 +  }
 +
 +  @Override
 +  public String getGatewaySecurityDir() {
 +    return gatewayHomeDir + "/security";
 +  }
 +
 +  @Override
 +  public String getGatewayTopologyDir() {
 +    return gatewayHomeDir + "/topologies";
 +  }
 +
 +  @Override
 +  public String getGatewayDeploymentDir() {
 +    return gatewayHomeDir + "/deployments";
 +  }
 +
 +//  public void setDeploymentDir( String clusterConfDir ) {
 +//    this.deployDir = clusterConfDir;
 +//  }
 +
 +  @Override
 +  public String getHadoopConfDir() {
 +    return hadoopConfDir;
 +  }
 +
 +//  public void setHadoopConfDir( String hadoopConfDir ) {
 +//    this.hadoopConfDir = hadoopConfDir;
 +//  }
 +
 +  @Override
 +  public String getGatewayHost() {
 +    return gatewayHost;
 +  }
 +
 +//  public void setGatewayHost( String gatewayHost ) {
 +//    this.gatewayHost = gatewayHost;
 +//  }
 +
 +  @Override
 +  public int getGatewayPort() {
 +    return gatewayPort;
 +  }
 +
 +//  public void setGatewayPort( int gatewayPort ) {
 +//    this.gatewayPort = gatewayPort;
 +//  }
 +
 +  @Override
 +  public String getGatewayPath() {
 +    return gatewayPath;
 +  }
 +
 +  public void setGatewayPath( String gatewayPath ) {
 +    this.gatewayPath = gatewayPath;
 +  }
 +
 +  @Override
 +  public InetSocketAddress getGatewayAddress() throws UnknownHostException {
 +    return new InetSocketAddress( getGatewayHost(), getGatewayPort() );
 +  }
 +
 +
 +  public long getGatewayIdleTimeout() {
 +    return 0l;
 +  }
 +
 +  @Override
 +  public boolean isSSLEnabled() {
 +    return sslEnabled;
 +  }
 +
 +  public void setSSLEnabled( boolean sslEnabled ) {
 +    this.sslEnabled = sslEnabled;
 +  }
 +
 +  @Override
 +  public boolean isHadoopKerberosSecured() {
 +    return hadoopKerberosSecured;
 +  }
 +
 +  public void setHadoopKerberosSecured(boolean hadoopKerberosSecured) {
 +    this.hadoopKerberosSecured = hadoopKerberosSecured;
 +  }
 +
 +  @Override
 +  public String getKerberosConfig() {
 +    return kerberosConfig;
 +  }
 +
 +  public void setKerberosConfig(String kerberosConfig) {
 +    this.kerberosConfig = kerberosConfig;
 +  }
 +
 +  @Override
 +  public boolean isKerberosDebugEnabled() {
 +    return kerberosDebugEnabled;
 +  }
 +
 +  public void setKerberosDebugEnabled(boolean kerberosDebugEnabled) {
 +    this.kerberosDebugEnabled = kerberosDebugEnabled;
 +  }
 +
 +  @Override
 +  public String getKerberosLoginConfig() {
 +    return kerberosLoginConfig;
 +  }
 +
 +  @Override
 +  public String getDefaultTopologyName() {
 +    return defaultTopologyName;
 +  }
 +
 +  public void setDefaultTopologyName( String defaultTopologyName ) {
 +    this.defaultTopologyName = defaultTopologyName;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getDefaultAppRedirectPath()
 +   */
 +  @Override
 +  public String getDefaultAppRedirectPath() {
 +
 +    if(StringUtils.isBlank(this.defaultTopologyName)) {
 +      return "/gateway/sandbox";
 +    } else {
 +      return "/gateway/"+this.defaultTopologyName;
 +    }
 +
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getFrontendUrl()
 +   */
 +  @Override
 +  public String getFrontendUrl() { return frontendUrl; }
 +
 +  public void setFrontendUrl( String frontendUrl ) {
 +    this.frontendUrl = frontendUrl;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getExcludedSSLProtocols()
 +   */
 +  @Override
 +  public List getExcludedSSLProtocols() {
 +    List<String> protocols = new ArrayList<String>();
 +    protocols.add("SSLv3");
 +    return protocols;
 +  }
 +
 +  @Override
 +  public List getIncludedSSLCiphers() {
 +    return includedSSLCiphers;
 +  }
 +
 +  public void setIncludedSSLCiphers( List<String> list ) {
 +    includedSSLCiphers = list;
 +  }
 +
 +  @Override
 +  public List getExcludedSSLCiphers() {
 +    return excludedSSLCiphers;
 +  }
 +
 +  public void setExcludedSSLCiphers( List<String> list ) {
 +    excludedSSLCiphers = list;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#isClientAuthNeeded()
 +   */
 +  @Override
 +  public boolean isClientAuthNeeded() {
 +    // TODO Auto-generated method stub
 +    return false;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getTruststorePath()
 +   */
 +  @Override
 +  public String getTruststorePath() {
 +    // TODO Auto-generated method stub
 +    return null;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getTrustAllCerts()
 +   */
 +  @Override
 +  public boolean getTrustAllCerts() {
 +    // TODO Auto-generated method stub
 +    return false;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getTruststoreType()
 +   */
 +  @Override
 +  public String getTruststoreType() {
 +    return truststoreType;
 +  }
 +
 +  public void setTruststoreType( String truststoreType ) {
 +    this.truststoreType = truststoreType;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getKeystoreType()
 +   */
 +  @Override
 +  public String getKeystoreType() {
 +    return keystoreType;
 +  }
 +
 +  public void setKeystoreType( String keystoreType ) {
 +    this.keystoreType = keystoreType;
 +  }
 +
 +  public void setKerberosLoginConfig(String kerberosLoginConfig) {
 +   this.kerberosLoginConfig = kerberosLoginConfig;
 +  }
 +
 +   @Override
 +   public String getGatewayServicesDir() {
 +    if( gatewayServicesDir != null ) {
 +      return gatewayServicesDir;
 +    } else {
 +      File targetDir = new File( System.getProperty( "user.dir" ), "target/services" );
 +      return targetDir.getPath();
 +    }
 +  }
 +
 +  public void setGatewayServicesDir( String gatewayServicesDir ) {
 +    this.gatewayServicesDir = gatewayServicesDir;
 +  }
 +
 +  @Override
 +  public String getGatewayApplicationsDir() {
 +    if( gatewayApplicationsDir != null ) {
 +      return gatewayApplicationsDir;
 +    } else {
 +      return getGatewayConfDir() + "/applications";
 +    }
 +  }
 +
 +  public void setGatewayApplicationsDir( String gatewayApplicationsDir ) {
 +    this.gatewayApplicationsDir = gatewayApplicationsDir;
 +   }
 +
 +  @Override
 +  public boolean isXForwardedEnabled() {
 +    return xForwardedEnabled;
 +  }
 +
 +  public void setXForwardedEnabled(boolean enabled) {
 +    xForwardedEnabled = enabled;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getEphemeralDHKeySize()
 +   */
 +  @Override
 +  public String getEphemeralDHKeySize() {
 +    return "2048";
 +  }
 +
 +  @Override
 +  public int getHttpClientMaxConnections() {
 +    return 16;
 +  }
 +
 +  @Override
 +  public int getHttpClientConnectionTimeout() {
 +    return -1;
 +  }
 +
 +  @Override
 +  public int getHttpClientSocketTimeout() {
 +    return -1;
 +  }
 +
 +  @Override
 +  public int getThreadPoolMax() {
-     return 16;
++    return 254;
 +  }
 +
 +  @Override
 +  public int getHttpServerRequestBuffer() {
 +    return 16*1024;
 +  }
 +
 +  @Override
 +  public int getHttpServerRequestHeaderBuffer() {
 +    return 8*1024;
 +  }
 +
 +  @Override
 +  public int getHttpServerResponseBuffer() {
 +    return 32*1024;
 +  }
 +
 +  @Override
 +  public int getHttpServerResponseHeaderBuffer() {
 +    return 8*1024;
 +  }
 +
 +  public void setGatewayDeploymentsBackupVersionLimit( int newBackupVersionLimit ) {
 +    backupVersionLimit = newBackupVersionLimit;
 +  }
 +
 +  public int getGatewayDeploymentsBackupVersionLimit() {
 +    return backupVersionLimit;
 +  }
 +
 +  public void setTopologyPortMapping(ConcurrentHashMap<String, Integer> topologyPortMapping) {
 +    this.topologyPortMapping = topologyPortMapping;
 +  }
 +
 +  public void setGatewayPortMappingEnabled(
 +      boolean topologyPortMappingEnabled) {
 +    isTopologyPortMappingEnabled = topologyPortMappingEnabled;
 +  }
 +
 +  @Override
 +  public long getGatewayDeploymentsBackupAgeLimit() {
 +    return backupAgeLimit;
 +  }
 +
 +  public void setGatewayDeploymentsBackupAgeLimit( long newBackupAgeLimit ) {
 +    backupAgeLimit = newBackupAgeLimit;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getSigningKeystoreName()
 +   */
 +  @Override
 +  public String getSigningKeystoreName() {
 +    return null;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getSigningKeyAlias()
 +   */
 +  @Override
 +  public String getSigningKeyAlias() {
 +    return null;
 +  }
 +
 +  @Override
 +  public List<String> getGlobalRulesServices() {
 +    ArrayList<String> services = new ArrayList<>();
 +    services.add("WEBHDFS");
 +    services.add("HBASE");
 +    services.add("HIVE");
 +    services.add("OOZIE");
 +    services.add("RESOURCEMANAGER");
 +    services.add("STORM");
 +    return services;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#isWebsocketEnabled()
 +   */
 +  @Override
 +  public boolean isWebsocketEnabled() {
 +    return DEFAULT_WEBSOCKET_FEATURE_ENABLED;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getWebsocketMaxTextMessageSize()
 +   */
 +  @Override
 +  public int getWebsocketMaxTextMessageSize() {
 +    return DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getWebsocketMaxBinaryMessageSize()
 +   */
 +  @Override
 +  public int getWebsocketMaxBinaryMessageSize() {
 +    return DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_SIZE;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getWebsocketMaxTextMessageBufferSize()
 +   */
 +  @Override
 +  public int getWebsocketMaxTextMessageBufferSize() {
 +    return DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getWebsocketMaxBinaryMessageBufferSize()
 +   */
 +  @Override
 +  public int getWebsocketMaxBinaryMessageBufferSize() {
 +    return DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getWebsocketInputBufferSize()
 +   */
 +  @Override
 +  public int getWebsocketInputBufferSize() {
 +    return DEFAULT_WEBSOCKET_INPUT_BUFFER_SIZE;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getWebsocketAsyncWriteTimeout()
 +   */
 +  @Override
 +  public int getWebsocketAsyncWriteTimeout() {
 +    return DEFAULT_WEBSOCKET_ASYNC_WRITE_TIMEOUT;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getWebsocketIdleTimeout()
 +   */
 +  @Override
 +  public int getWebsocketIdleTimeout() {
 +    return DEFAULT_WEBSOCKET_IDLE_TIMEOUT;
 +  }
 +
 +  @Override
 +  public boolean isMetricsEnabled() {
 +    return false;
 +  }
 +
 +  @Override
 +  public boolean isJmxMetricsReportingEnabled() {
 +    return false;
 +  }
 +
 +  @Override
 +  public boolean isGraphiteMetricsReportingEnabled() {
 +    return false;
 +  }
 +
 +  @Override
 +  public String getGraphiteHost() {
 +    return null;
 +  }
 +
 +  @Override
 +  public int getGraphitePort() {
 +    return 0;
 +  }
 +
 +  @Override
 +  public int getGraphiteReportingFrequency() {
 +    return 0;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getMimeTypesToCompress()
 +   */
 +  @Override
 +  public List<String> getMimeTypesToCompress() {
 +    return new ArrayList<String>();
 +  }
 +
 +  @Override
 +  public  boolean isCookieScopingToPathEnabled() {
 +    return false;
 +  }
 +
 +  @Override
 +  public String getHeaderNameForRemoteAddress() {
 +    return "X-Forwarded-For";
 +  }
 +
 +  @Override
 +  public String getAlgorithm() {
 +    return null;
 +  }
 +
 +  @Override
 +  public String getPBEAlgorithm() {
 +    return null;
 +  }
 +
 +  @Override
 +  public String getTransformation() {
 +    return null;
 +  }
 +
 +  @Override
 +  public String getSaltSize() {
 +    return null;
 +  }
 +
 +  @Override
 +  public String getIterationCount() {
 +    return null;
 +  }
 +
 +  @Override
 +  public String getKeyLength() {
 +    return null;
 +  }
 +
 +  /**
 +   * Map of Topology names and their ports.
 +   *
 +   * @return
 +   */
 +  @Override
 +  public Map<String, Integer> getGatewayPortMappings() {
 +    return topologyPortMapping;
 +  }
 +
 +  /**
 +   * Is the Port Mapping feature on ?
 +   *
 +   * @return
 +   */
 +  @Override
 +  public boolean isGatewayPortMappingEnabled() {
 +    return isTopologyPortMappingEnabled;
 +  }
 +
 +  @Override
 +  public boolean isGatewayServerHeaderEnabled() {
 +	return false;
 +  }
 +
 +  @Override
 +  public boolean isClientAuthWanted() {
 +    return false;
 +  }
++
++  @Override
++  public String getGatewayProvidersConfigDir() {
++    return null;
++  }
++
++  @Override
++  public String getGatewayDescriptorsDir() {
++    return null;
++  }
++
++  @Override
++  public List<String> getRemoteRegistryConfigurationNames() {
++    return Collections.emptyList();
++  }
++
++  @Override
++  public String getRemoteRegistryConfiguration(String s) {
++    return null;
++  }
++
++  @Override
++  public String getRemoteConfigurationMonitorClientName() {
++    return null;
++  }
++
++  @Override
++  public int getClusterMonitorPollingInterval(String type) {
++    return 600;
++  }
++
++  @Override
++  public boolean isClusterMonitorEnabled(String type) {
++    return false;
++  }
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-test-utils/src/main/java/org/apache/knox/test/TestUtils.java
----------------------------------------------------------------------
diff --cc gateway-test-utils/src/main/java/org/apache/knox/test/TestUtils.java
index 5437ce1,0000000..e5ed5c9
mode 100644,000000..100644
--- a/gateway-test-utils/src/main/java/org/apache/knox/test/TestUtils.java
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/TestUtils.java
@@@ -1,216 -1,0 +1,216 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.test;
 +
 +import java.io.File;
 +import java.io.FileNotFoundException;
 +import java.io.IOException;
 +import java.io.InputStream;
 +import java.io.InputStreamReader;
 +import java.io.Reader;
 +import java.io.StringWriter;
 +import java.net.HttpURLConnection;
 +import java.net.InetSocketAddress;
 +import java.net.ServerSocket;
 +import java.net.Socket;
 +import java.net.URL;
 +import java.nio.ByteBuffer;
 +import java.util.Properties;
 +import java.util.UUID;
 +import java.util.concurrent.TimeUnit;
 +
 +import org.apache.commons.io.FileUtils;
 +import org.apache.commons.io.IOUtils;
 +import org.apache.log4j.Logger;
 +import org.apache.velocity.Template;
 +import org.apache.velocity.VelocityContext;
 +import org.apache.velocity.app.VelocityEngine;
 +import org.apache.velocity.runtime.RuntimeConstants;
 +import org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader;
 +import org.eclipse.jetty.http.HttpTester;
 +import org.eclipse.jetty.servlet.ServletTester;
 +
 +public class TestUtils {
 +
 +  private static Logger LOG = Logger.getLogger(TestUtils.class);
 +
 +  public static final long SHORT_TIMEOUT = 1000L;
-   public static final long MEDIUM_TIMEOUT = 20 * 1000L;
++  public static final long MEDIUM_TIMEOUT = 30 * 1000L;
 +  public static final long LONG_TIMEOUT = 60 * 1000L;
 +
 +  public static String getResourceName( Class clazz, String name ) {
 +    name = clazz.getName().replaceAll( "\\.", "/" ) + "/" + name;
 +    return name;
 +  }
 +
 +  public static URL getResourceUrl( Class clazz, String name ) throws FileNotFoundException {
 +    name = getResourceName( clazz, name );
 +    URL url = ClassLoader.getSystemResource( name );
 +    if( url == null ) {
 +      throw new FileNotFoundException( name );
 +    }
 +    return url;
 +  }
 +
 +  public static URL getResourceUrl( String name ) throws FileNotFoundException {
 +    URL url = ClassLoader.getSystemResource( name );
 +    if( url == null ) {
 +      throw new FileNotFoundException( name );
 +    }
 +    return url;
 +  }
 +
 +  public static InputStream getResourceStream( String name ) throws IOException {
 +    URL url = ClassLoader.getSystemResource( name );
 +    InputStream stream = url.openStream();
 +    return stream;
 +  }
 +
 +  public static InputStream getResourceStream( Class clazz, String name ) throws IOException {
 +    URL url = getResourceUrl( clazz, name );
 +    InputStream stream = url.openStream();
 +    return stream;
 +  }
 +
 +  public static Reader getResourceReader( String name, String charset ) throws IOException {
 +    return new InputStreamReader( getResourceStream( name ), charset );
 +  }
 +
 +  public static Reader getResourceReader( Class clazz, String name, String charset ) throws IOException {
 +    return new InputStreamReader( getResourceStream( clazz, name ), charset );
 +  }
 +
 +  public static String getResourceString( Class clazz, String name, String charset ) throws IOException {
 +    return IOUtils.toString( getResourceReader( clazz, name, charset ) );
 +  }
 +
 +  public static File createTempDir( String prefix ) throws IOException {
 +    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
 +    File tempDir = new File( targetDir, prefix + UUID.randomUUID() );
 +    FileUtils.forceMkdir( tempDir );
 +    return tempDir;
 +  }
 +
 +  public static void LOG_ENTER() {
 +    StackTraceElement caller = Thread.currentThread().getStackTrace()[2];
 +    System.out.flush();
 +    System.out.println( String.format( "Running %s#%s", caller.getClassName(), caller.getMethodName() ) );
 +    System.out.flush();
 +  }
 +
 +  public static void LOG_EXIT() {
 +    StackTraceElement caller = Thread.currentThread().getStackTrace()[2];
 +    System.out.flush();
 +    System.out.println( String.format( "Exiting %s#%s", caller.getClassName(), caller.getMethodName() ) );
 +    System.out.flush();
 +  }
 +
 +  public static void awaitPortOpen( InetSocketAddress address, int timeout, int delay ) throws InterruptedException {
 +    long maxTime = System.currentTimeMillis() + timeout;
 +    do {
 +      try {
 +        Socket socket = new Socket();
 +        socket.connect( address, delay );
 +        socket.close();
 +        return;
 +      } catch ( IOException e ) {
 +        //e.printStackTrace();
 +      }
 +    } while( System.currentTimeMillis() < maxTime );
 +    throw new IllegalStateException( "Timed out " + timeout + " waiting for port " + address );
 +  }
 +
 +  public static void awaitNon404HttpStatus( URL url, int timeout, int delay ) throws InterruptedException {
 +    long maxTime = System.currentTimeMillis() + timeout;
 +    do {
 +      Thread.sleep( delay );
 +      HttpURLConnection conn = null;
 +      try {
 +        conn = (HttpURLConnection)url.openConnection();
 +        conn.getInputStream().close();
 +        return;
 +      } catch ( IOException e ) {
 +        //e.printStackTrace();
 +        try {
 +          if( conn != null && conn.getResponseCode() != 404 ) {
 +            return;
 +          }
 +        } catch ( IOException ee ) {
 +          //ee.printStackTrace();
 +        }
 +      }
 +    } while( System.currentTimeMillis() < maxTime );
 +    throw new IllegalStateException( "Timed out " + timeout + " waiting for URL " + url );
 +  }
 +
 +  public static String merge( String resource, Properties properties ) {
 +    ClasspathResourceLoader loader = new ClasspathResourceLoader();
 +    loader.getResourceStream( resource );
 +
 +    VelocityEngine engine = new VelocityEngine();
 +    Properties config = new Properties();
 +    config.setProperty( RuntimeConstants.RUNTIME_LOG_LOGSYSTEM_CLASS, "org.apache.velocity.runtime.log.NullLogSystem" );
 +    config.setProperty( RuntimeConstants.RESOURCE_LOADER, "classpath" );
 +    config.setProperty( "classpath.resource.loader.class", ClasspathResourceLoader.class.getName() );
 +    engine.init( config );
 +
 +    VelocityContext context = new VelocityContext( properties );
 +    Template template = engine.getTemplate( resource );
 +    StringWriter writer = new StringWriter();
 +    template.merge( context, writer );
 +    return writer.toString();
 +  }
 +
 +  public static String merge( Class base, String resource, Properties properties ) {
 +    String baseResource = base.getName().replaceAll( "\\.", "/" );
 +    String fullResource = baseResource + "/" + resource;
 +    return merge( fullResource, properties );
 +  }
 +
 +  public static int findFreePort() throws IOException {
 +    ServerSocket socket = new ServerSocket(0);
 +    int port = socket.getLocalPort();
 +    socket.close();
 +    return port;
 +  }
 +
 +  public static void waitUntilNextSecond() {
 +    long before = System.currentTimeMillis();
 +    long wait;
 +    while( ( wait = ( 1000 - ( System.currentTimeMillis() - before ) ) ) > 0 ) {
 +      try {
 +        Thread.sleep( wait );
 +      } catch( InterruptedException e ) {
 +        // Ignore.
 +      }
 +    }
 +  }
 +
 +  public static HttpTester.Response execute( ServletTester server, HttpTester.Request request ) throws Exception {
 +    LOG.debug( "execute: request=" + request );
 +    ByteBuffer requestBuffer = request.generate();
 +    LOG.trace( "execute: requestBuffer=[" + new String(requestBuffer.array(),0,requestBuffer.limit()) + "]" );
 +    ByteBuffer responseBuffer = server.getResponses( requestBuffer, 30, TimeUnit.SECONDS );
 +    HttpTester.Response response = HttpTester.parseResponse( responseBuffer );
 +    LOG.trace( "execute: responseBuffer=[" + new String(responseBuffer.array(),0,responseBuffer.limit()) + "]" );
 +    LOG.debug( "execute: reponse=" + response );
 +    return response;
 +  }
 +
 +
 +}


[20/49] knox git commit: KNOX-1119 - Add missing tests for valid and invalid id attributes

Posted by mo...@apache.org.
KNOX-1119 - Add missing tests for valid and invalid id attributes

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/a8fbf800
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/a8fbf800
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/a8fbf800

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: a8fbf80096f5fc6363fc7e5e182d29f2d57af080
Parents: 844506f
Author: Larry McCay <lm...@hortonworks.com>
Authored: Fri Dec 1 08:54:18 2017 -0500
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Fri Dec 1 08:54:18 2017 -0500

----------------------------------------------------------------------
 .../hadoop/gateway/pac4j/Pac4jProviderTest.java | 185 +++++++++++++++++++
 1 file changed, 185 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/a8fbf800/gateway-provider-security-pac4j/src/test/java/org/apache/hadoop/gateway/pac4j/Pac4jProviderTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-pac4j/src/test/java/org/apache/hadoop/gateway/pac4j/Pac4jProviderTest.java b/gateway-provider-security-pac4j/src/test/java/org/apache/hadoop/gateway/pac4j/Pac4jProviderTest.java
index 0da156f..39e5531 100644
--- a/gateway-provider-security-pac4j/src/test/java/org/apache/hadoop/gateway/pac4j/Pac4jProviderTest.java
+++ b/gateway-provider-security-pac4j/src/test/java/org/apache/hadoop/gateway/pac4j/Pac4jProviderTest.java
@@ -76,6 +76,98 @@ public class Pac4jProviderTest {
         when(config.getServletContext()).thenReturn(context);
         when(config.getInitParameter(Pac4jDispatcherFilter.PAC4J_CALLBACK_URL)).thenReturn(PAC4J_CALLBACK_URL);
         when(config.getInitParameter("clientName")).thenReturn(Pac4jDispatcherFilter.TEST_BASIC_AUTH);
+
+        final Pac4jDispatcherFilter dispatcher = new Pac4jDispatcherFilter();
+        dispatcher.init(config);
+        final Pac4jIdentityAdapter adapter = new Pac4jIdentityAdapter();
+        adapter.init(config);
+        Pac4jIdentityAdapter.setAuditor(mock(Auditor.class));
+        final AuditService auditService = mock(AuditService.class);
+        when(auditService.getContext()).thenReturn(mock(AuditContext.class));
+        Pac4jIdentityAdapter.setAuditService(auditService);
+
+        // step 1: call the KnoxSSO service with an original url pointing to an Hadoop service (redirected by the SSOCookieProvider)
+        MockHttpServletRequest request = new MockHttpServletRequest();
+        request.setRequestURL(KNOXSSO_SERVICE_URL + "?" + ORIGINAL_URL + "=" + HADOOP_SERVICE_URL);
+        request.setCookies(new Cookie[0]);
+        request.setServerName(LOCALHOST);
+        MockHttpServletResponse response = new MockHttpServletResponse();
+        FilterChain filterChain = mock(FilterChain.class);
+        dispatcher.doFilter(request, response, filterChain);
+        // it should be a redirection to the idp topology
+        assertEquals(302, response.getStatus());
+        assertEquals(PAC4J_CALLBACK_URL + "?" + Pac4jDispatcherFilter.PAC4J_CALLBACK_PARAMETER + "=true&" + Clients.DEFAULT_CLIENT_NAME_PARAMETER + "=" + CLIENT_CLASS, response.getHeaders().get("Location"));
+        // we should have one cookie for the saved requested url
+        List<Cookie> cookies = response.getCookies();
+        assertEquals(1, cookies.size());
+        final Cookie requestedUrlCookie = cookies.get(0);
+        assertEquals(KnoxSessionStore.PAC4J_SESSION_PREFIX + Pac4jConstants.REQUESTED_URL, requestedUrlCookie.getName());
+
+        // step 2: send credentials to the callback url (callback from the identity provider)
+        request = new MockHttpServletRequest();
+        request.setCookies(new Cookie[]{requestedUrlCookie});
+        request.setRequestURL(PAC4J_CALLBACK_URL + "?" + Pac4jDispatcherFilter.PAC4J_CALLBACK_PARAMETER + "=true&" + Clients.DEFAULT_CLIENT_NAME_PARAMETER + "=" + Clients.DEFAULT_CLIENT_NAME_PARAMETER + "=" + CLIENT_CLASS);
+        request.addParameter(Pac4jDispatcherFilter.PAC4J_CALLBACK_PARAMETER, "true");
+        request.addParameter(Clients.DEFAULT_CLIENT_NAME_PARAMETER, CLIENT_CLASS);
+        request.addHeader("Authorization", "Basic amxlbGV1OmpsZWxldQ==");
+        request.setServerName(LOCALHOST);
+        response = new MockHttpServletResponse();
+        filterChain = mock(FilterChain.class);
+        dispatcher.doFilter(request, response, filterChain);
+        // it should be a redirection to the original url
+        assertEquals(302, response.getStatus());
+        assertEquals(KNOXSSO_SERVICE_URL + "?" + ORIGINAL_URL + "=" + HADOOP_SERVICE_URL, response.getHeaders().get("Location"));
+        // we should have 3 cookies among with the user profile
+        cookies = response.getCookies();
+        Map<String, String> mapCookies = new HashMap<>();
+        assertEquals(3, cookies.size());
+        for (final Cookie cookie : cookies) {
+            mapCookies.put(cookie.getName(), cookie.getValue());
+        }
+        assertNull(mapCookies.get(KnoxSessionStore.PAC4J_SESSION_PREFIX + CLIENT_CLASS + "$attemptedAuthentication"));
+        assertNotNull(mapCookies.get(KnoxSessionStore.PAC4J_SESSION_PREFIX + Pac4jConstants.USER_PROFILES));
+        assertNull(mapCookies.get(KnoxSessionStore.PAC4J_SESSION_PREFIX + Pac4jConstants.REQUESTED_URL));
+
+        // step 3: turn pac4j identity into KnoxSSO identity
+        request = new MockHttpServletRequest();
+        request.setCookies(cookies.toArray(new Cookie[cookies.size()]));
+        request.setRequestURL(KNOXSSO_SERVICE_URL + "?" + ORIGINAL_URL + "=" + HADOOP_SERVICE_URL);
+        request.setServerName(LOCALHOST);
+        response = new MockHttpServletResponse();
+        filterChain = mock(FilterChain.class);
+        dispatcher.doFilter(request, response, filterChain);
+        assertEquals(0, response.getStatus());
+        adapter.doFilter(request, response, filterChain);
+        cookies = response.getCookies();
+        assertEquals(1, cookies.size());
+        final Cookie userProfileCookie = cookies.get(0);
+        // the user profile has been cleaned
+        assertEquals(KnoxSessionStore.PAC4J_SESSION_PREFIX + Pac4jConstants.USER_PROFILES, userProfileCookie.getName());
+        assertNull(userProfileCookie.getValue());
+        assertEquals(USERNAME, adapter.getTestIdentifier());
+    }
+
+    @Test
+    public void testValidIdAttribute() throws Exception {
+        final AliasService aliasService = mock(AliasService.class);
+        when(aliasService.getPasswordFromAliasForCluster(CLUSTER_NAME, KnoxSessionStore.PAC4J_PASSWORD, true)).thenReturn(PAC4J_PASSWORD.toCharArray());
+        when(aliasService.getPasswordFromAliasForCluster(CLUSTER_NAME, KnoxSessionStore.PAC4J_PASSWORD)).thenReturn(PAC4J_PASSWORD.toCharArray());
+
+        final DefaultCryptoService cryptoService = new DefaultCryptoService();
+        cryptoService.setAliasService(aliasService);
+
+        final GatewayServices services = mock(GatewayServices.class);
+        when(services.getService(GatewayServices.CRYPTO_SERVICE)).thenReturn(cryptoService);
+        when(services.getService(GatewayServices.ALIAS_SERVICE)).thenReturn(aliasService);
+
+        final ServletContext context = mock(ServletContext.class);
+        when(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).thenReturn(services);
+        when(context.getAttribute(GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE)).thenReturn(CLUSTER_NAME);
+
+        final FilterConfig config = mock(FilterConfig.class);
+        when(config.getServletContext()).thenReturn(context);
+        when(config.getInitParameter(Pac4jDispatcherFilter.PAC4J_CALLBACK_URL)).thenReturn(PAC4J_CALLBACK_URL);
+        when(config.getInitParameter("clientName")).thenReturn(Pac4jDispatcherFilter.TEST_BASIC_AUTH);
         when(config.getInitParameter(Pac4jIdentityAdapter.PAC4J_ID_ATTRIBUTE)).thenReturn("username");
 
         final Pac4jDispatcherFilter dispatcher = new Pac4jDispatcherFilter();
@@ -147,4 +239,97 @@ public class Pac4jProviderTest {
         assertNull(userProfileCookie.getValue());
         assertEquals(USERNAME, adapter.getTestIdentifier());
     }
+    @Test
+    public void testInvalidIdAttribute() throws Exception {
+        final AliasService aliasService = mock(AliasService.class);
+        when(aliasService.getPasswordFromAliasForCluster(CLUSTER_NAME, KnoxSessionStore.PAC4J_PASSWORD, true)).thenReturn(PAC4J_PASSWORD.toCharArray());
+        when(aliasService.getPasswordFromAliasForCluster(CLUSTER_NAME, KnoxSessionStore.PAC4J_PASSWORD)).thenReturn(PAC4J_PASSWORD.toCharArray());
+
+        final DefaultCryptoService cryptoService = new DefaultCryptoService();
+        cryptoService.setAliasService(aliasService);
+
+        final GatewayServices services = mock(GatewayServices.class);
+        when(services.getService(GatewayServices.CRYPTO_SERVICE)).thenReturn(cryptoService);
+        when(services.getService(GatewayServices.ALIAS_SERVICE)).thenReturn(aliasService);
+
+        final ServletContext context = mock(ServletContext.class);
+        when(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).thenReturn(services);
+        when(context.getAttribute(GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE)).thenReturn(CLUSTER_NAME);
+
+        final FilterConfig config = mock(FilterConfig.class);
+        when(config.getServletContext()).thenReturn(context);
+        when(config.getInitParameter(Pac4jDispatcherFilter.PAC4J_CALLBACK_URL)).thenReturn(PAC4J_CALLBACK_URL);
+        when(config.getInitParameter("clientName")).thenReturn(Pac4jDispatcherFilter.TEST_BASIC_AUTH);
+        when(config.getInitParameter(Pac4jIdentityAdapter.PAC4J_ID_ATTRIBUTE)).thenReturn("larry");
+
+        final Pac4jDispatcherFilter dispatcher = new Pac4jDispatcherFilter();
+        dispatcher.init(config);
+        final Pac4jIdentityAdapter adapter = new Pac4jIdentityAdapter();
+        adapter.init(config);
+        Pac4jIdentityAdapter.setAuditor(mock(Auditor.class));
+        final AuditService auditService = mock(AuditService.class);
+        when(auditService.getContext()).thenReturn(mock(AuditContext.class));
+        Pac4jIdentityAdapter.setAuditService(auditService);
+
+        // step 1: call the KnoxSSO service with an original url pointing to an Hadoop service (redirected by the SSOCookieProvider)
+        MockHttpServletRequest request = new MockHttpServletRequest();
+        request.setRequestURL(KNOXSSO_SERVICE_URL + "?" + ORIGINAL_URL + "=" + HADOOP_SERVICE_URL);
+        request.setCookies(new Cookie[0]);
+        request.setServerName(LOCALHOST);
+        MockHttpServletResponse response = new MockHttpServletResponse();
+        FilterChain filterChain = mock(FilterChain.class);
+        dispatcher.doFilter(request, response, filterChain);
+        // it should be a redirection to the idp topology
+        assertEquals(302, response.getStatus());
+        assertEquals(PAC4J_CALLBACK_URL + "?" + Pac4jDispatcherFilter.PAC4J_CALLBACK_PARAMETER + "=true&" + Clients.DEFAULT_CLIENT_NAME_PARAMETER + "=" + CLIENT_CLASS, response.getHeaders().get("Location"));
+        // we should have one cookie for the saved requested url
+        List<Cookie> cookies = response.getCookies();
+        assertEquals(1, cookies.size());
+        final Cookie requestedUrlCookie = cookies.get(0);
+        assertEquals(KnoxSessionStore.PAC4J_SESSION_PREFIX + Pac4jConstants.REQUESTED_URL, requestedUrlCookie.getName());
+
+        // step 2: send credentials to the callback url (callback from the identity provider)
+        request = new MockHttpServletRequest();
+        request.setCookies(new Cookie[]{requestedUrlCookie});
+        request.setRequestURL(PAC4J_CALLBACK_URL + "?" + Pac4jDispatcherFilter.PAC4J_CALLBACK_PARAMETER + "=true&" + Clients.DEFAULT_CLIENT_NAME_PARAMETER + "=" + Clients.DEFAULT_CLIENT_NAME_PARAMETER + "=" + CLIENT_CLASS);
+        request.addParameter(Pac4jDispatcherFilter.PAC4J_CALLBACK_PARAMETER, "true");
+        request.addParameter(Clients.DEFAULT_CLIENT_NAME_PARAMETER, CLIENT_CLASS);
+        request.addHeader("Authorization", "Basic amxlbGV1OmpsZWxldQ==");
+        request.setServerName(LOCALHOST);
+        response = new MockHttpServletResponse();
+        filterChain = mock(FilterChain.class);
+        dispatcher.doFilter(request, response, filterChain);
+        // it should be a redirection to the original url
+        assertEquals(302, response.getStatus());
+        assertEquals(KNOXSSO_SERVICE_URL + "?" + ORIGINAL_URL + "=" + HADOOP_SERVICE_URL, response.getHeaders().get("Location"));
+        // we should have 3 cookies among with the user profile
+        cookies = response.getCookies();
+        Map<String, String> mapCookies = new HashMap<>();
+        assertEquals(3, cookies.size());
+        for (final Cookie cookie : cookies) {
+            mapCookies.put(cookie.getName(), cookie.getValue());
+        }
+        assertNull(mapCookies.get(KnoxSessionStore.PAC4J_SESSION_PREFIX + CLIENT_CLASS + "$attemptedAuthentication"));
+        assertNotNull(mapCookies.get(KnoxSessionStore.PAC4J_SESSION_PREFIX + Pac4jConstants.USER_PROFILES));
+        assertNull(mapCookies.get(KnoxSessionStore.PAC4J_SESSION_PREFIX + Pac4jConstants.REQUESTED_URL));
+
+        // step 3: turn pac4j identity into KnoxSSO identity
+        request = new MockHttpServletRequest();
+        request.setCookies(cookies.toArray(new Cookie[cookies.size()]));
+        request.setRequestURL(KNOXSSO_SERVICE_URL + "?" + ORIGINAL_URL + "=" + HADOOP_SERVICE_URL);
+        request.setServerName(LOCALHOST);
+        response = new MockHttpServletResponse();
+        filterChain = mock(FilterChain.class);
+        dispatcher.doFilter(request, response, filterChain);
+        assertEquals(0, response.getStatus());
+        adapter.doFilter(request, response, filterChain);
+        cookies = response.getCookies();
+        assertEquals(1, cookies.size());
+        final Cookie userProfileCookie = cookies.get(0);
+        // the user profile has been cleaned
+        assertEquals(KnoxSessionStore.PAC4J_SESSION_PREFIX + Pac4jConstants.USER_PROFILES, userProfileCookie.getName());
+        assertNull(userProfileCookie.getValue());
+        assertEquals(USERNAME, adapter.getTestIdentifier());
+    }
+
 }


[17/49] knox git commit: KNOX-1134 - Regression due to KNOX-1119

Posted by mo...@apache.org.
KNOX-1134 - Regression due to KNOX-1119


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/27217ead
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/27217ead
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/27217ead

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 27217ead9a67bcded8978133a9d7abe40ebc1d1a
Parents: d835af9
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Fri Dec 1 12:40:07 2017 +0000
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Fri Dec 1 12:40:07 2017 +0000

----------------------------------------------------------------------
 .../hadoop/gateway/pac4j/filter/Pac4jIdentityAdapter.java     | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/27217ead/gateway-provider-security-pac4j/src/main/java/org/apache/hadoop/gateway/pac4j/filter/Pac4jIdentityAdapter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-pac4j/src/main/java/org/apache/hadoop/gateway/pac4j/filter/Pac4jIdentityAdapter.java b/gateway-provider-security-pac4j/src/main/java/org/apache/hadoop/gateway/pac4j/filter/Pac4jIdentityAdapter.java
index 1ec0491..20084b1 100644
--- a/gateway-provider-security-pac4j/src/main/java/org/apache/hadoop/gateway/pac4j/filter/Pac4jIdentityAdapter.java
+++ b/gateway-provider-security-pac4j/src/main/java/org/apache/hadoop/gateway/pac4j/filter/Pac4jIdentityAdapter.java
@@ -78,8 +78,11 @@ public class Pac4jIdentityAdapter implements Filter {
       logger.debug("User authenticated as: {}", profile);
       manager.remove(true);
       String id = null;
-      if (idAttribute == null) {
-        id = profile.getAttribute(idAttribute).toString();
+      if (idAttribute != null) {
+        Object attribute = profile.getAttribute(idAttribute);
+        if (attribute != null) {
+          id = attribute.toString();
+        }
         if (id == null) {
           logger.error("Invalid attribute_id: {} configured to be used as principal"
               + " falling back to default id", idAttribute);


[48/49] knox git commit: KNOX-998 - Merge from trunk 0.14.0 code

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariConfigurationMonitorTest.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariConfigurationMonitorTest.java b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariConfigurationMonitorTest.java
deleted file mode 100644
index 2d8b276..0000000
--- a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariConfigurationMonitorTest.java
+++ /dev/null
@@ -1,319 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
-import org.easymock.EasyMock;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.File;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-
-public class AmbariConfigurationMonitorTest {
-
-    private File dataDir = null;
-
-    @Before
-    public void setup() throws Exception {
-        File targetDir = new File( System.getProperty("user.dir"), "target");
-        File tempDir = new File(targetDir, this.getClass().getName() + "__data__" + UUID.randomUUID());
-        FileUtils.forceMkdir(tempDir);
-        dataDir = tempDir;
-    }
-
-    @After
-    public void tearDown() throws Exception {
-        dataDir.delete();
-    }
-
-    @Test
-    public void testPollingMonitor() throws Exception {
-        final String addr1 = "http://host1:8080";
-        final String addr2 = "http://host2:8080";
-        final String cluster1Name = "Cluster_One";
-        final String cluster2Name = "Cluster_Two";
-
-
-        GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
-        EasyMock.expect(config.getGatewayDataDir()).andReturn(dataDir.getAbsolutePath()).anyTimes();
-        EasyMock.expect(config.getClusterMonitorPollingInterval(AmbariConfigurationMonitor.getType()))
-                .andReturn(10)
-                .anyTimes();
-        EasyMock.replay(config);
-
-        // Create the monitor
-        TestableAmbariConfigurationMonitor monitor = new TestableAmbariConfigurationMonitor(config);
-
-        // Clear the system property now that the monitor has been initialized
-        System.clearProperty(AmbariConfigurationMonitor.INTERVAL_PROPERTY_NAME);
-
-
-        // Sequence of config changes for testing monitoring for updates
-        Map<String, Map<String, List<List<AmbariCluster.ServiceConfiguration>>>> updateConfigurations = new HashMap<>();
-
-        updateConfigurations.put(addr1, new HashMap<>());
-        updateConfigurations.get(addr1).put(cluster1Name, Arrays.asList(Arrays.asList(createTestServiceConfig("zoo.cfg", "3"),
-                                                                                      createTestServiceConfig("hive-site", "2")),
-                                                                        Arrays.asList(createTestServiceConfig("zoo.cfg", "3"),
-                                                                                      createTestServiceConfig("hive-site", "3")),
-                                                                        Arrays.asList(createTestServiceConfig("zoo.cfg", "2"),
-                                                                                      createTestServiceConfig("hive-site", "1"))));
-
-        updateConfigurations.put(addr2, new HashMap<>());
-        updateConfigurations.get(addr2).put(cluster2Name, Arrays.asList(Arrays.asList(createTestServiceConfig("zoo.cfg", "1"),
-                                                                                      createTestServiceConfig("hive-site", "1")),
-                                                                        Collections.singletonList(createTestServiceConfig("zoo.cfg", "1")),
-                                                                        Arrays.asList(createTestServiceConfig("zoo.cfg", "1"),
-                                                                                      createTestServiceConfig("hive-site", "2"))));
-
-        updateConfigurations.get(addr2).put(cluster1Name, Arrays.asList(Arrays.asList(createTestServiceConfig("zoo.cfg", "2"),
-                                                                                      createTestServiceConfig("hive-site", "4")),
-                                                                        Arrays.asList(createTestServiceConfig("zoo.cfg", "3"),
-                                                                                      createTestServiceConfig("hive-site", "4"),
-                                                                                      createTestServiceConfig("yarn-site", "1")),
-                                                                        Arrays.asList(createTestServiceConfig("zoo.cfg", "1"),
-                                                                                      createTestServiceConfig("hive-site", "2"))));
-
-        Map<String, Map<String, Integer>> configChangeIndex = new HashMap<>();
-        configChangeIndex.put(addr1, new HashMap<>());
-        configChangeIndex.get(addr1).put(cluster1Name, 0);
-        configChangeIndex.get(addr1).put(cluster2Name, 0);
-        configChangeIndex.put(addr2, new HashMap<>());
-        configChangeIndex.get(addr2).put(cluster2Name, 0);
-
-        // Setup the initial test update data
-        // Cluster 1 data change
-        monitor.addTestConfigVersion(addr1, cluster1Name, "zoo.cfg", "2");
-        monitor.addTestConfigVersion(addr1, cluster1Name, "hive-site", "1");
-
-        // Cluster 2 NO data change
-        monitor.addTestConfigVersion(addr2, cluster1Name, "zoo.cfg", "1");
-        monitor.addTestConfigVersion(addr2, cluster1Name, "hive-site", "1");
-
-        // Cluster 3 data change
-        monitor.addTestConfigVersion(addr2, cluster2Name, "zoo.cfg", "1");
-        monitor.addTestConfigVersion(addr2, cluster2Name, "hive-site", "2");
-
-        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> initialAmbariClusterConfigs = new HashMap<>();
-
-        Map<String, AmbariCluster.ServiceConfiguration> cluster1Configs = new HashMap<>();
-        AmbariCluster.ServiceConfiguration zooCfg = createTestServiceConfig("zoo.cfg", "1");
-        cluster1Configs.put("ZOOKEEPER", zooCfg);
-
-        AmbariCluster.ServiceConfiguration hiveSite = createTestServiceConfig("hive-site", "1");
-        cluster1Configs.put("Hive", hiveSite);
-
-        initialAmbariClusterConfigs.put(cluster1Name, cluster1Configs);
-        AmbariCluster cluster1 = createTestCluster(cluster1Name, initialAmbariClusterConfigs);
-
-        // Tell the monitor about the cluster configurations
-        monitor.addClusterConfigVersions(cluster1, createTestDiscoveryConfig(addr1));
-
-        monitor.addClusterConfigVersions(createTestCluster(cluster2Name, initialAmbariClusterConfigs),
-                                         createTestDiscoveryConfig(addr2));
-
-        monitor.addClusterConfigVersions(createTestCluster(cluster1Name, initialAmbariClusterConfigs),
-                                         createTestDiscoveryConfig(addr2));
-
-        final Map<String, Map<String, Integer>> changeNotifications = new HashMap<>();
-        monitor.addListener((src, cname) -> {
-//            System.out.println("Cluster config changed: " + cname + " @ " + src);
-            // Record the notification
-            Integer notificationCount  = changeNotifications.computeIfAbsent(src, s -> new HashMap<>())
-                                                            .computeIfAbsent(cname, c -> Integer.valueOf(0));
-            changeNotifications.get(src).put(cname, (notificationCount+=1));
-
-            // Update the config version
-            int changeIndex = configChangeIndex.get(src).get(cname);
-            if (changeIndex < updateConfigurations.get(src).get(cname).size()) {
-                List<AmbariCluster.ServiceConfiguration> changes = updateConfigurations.get(src).get(cname).get(changeIndex);
-
-//                System.out.println("Applying config update " + changeIndex + " to " + cname + " @ " + src + " ...");
-                for (AmbariCluster.ServiceConfiguration change : changes) {
-                    monitor.updateConfigState(src, cname, change.getType(), change.getVersion());
-//                    System.out.println("    Updated " + change.getType() + " to version " + change.getVersion());
-                }
-
-                // Increment the change index
-                configChangeIndex.get(src).replace(cname, changeIndex + 1);
-
-//                System.out.println("Monitor config updated for " + cname + " @ " + src + " : " + changeIndex );
-            }
-        });
-
-        try {
-            monitor.start();
-
-            long expiration = System.currentTimeMillis() + (1000 * 30);
-            while (!areChangeUpdatesExhausted(updateConfigurations, configChangeIndex)
-                                                                        && (System.currentTimeMillis() < expiration)) {
-                try {
-                    Thread.sleep(5);
-                } catch (InterruptedException e) {
-                    //
-                }
-            }
-
-        } finally {
-            monitor.stop();
-        }
-
-        assertNotNull("Expected changes to have been reported for source 1.",
-                      changeNotifications.get(addr1));
-
-        assertEquals("Expected changes to have been reported.",
-                     3, changeNotifications.get(addr1).get(cluster1Name).intValue());
-
-        assertNotNull("Expected changes to have been reported for source 2.",
-                      changeNotifications.get(addr2));
-
-        assertEquals("Expected changes to have been reported.",
-                     3, changeNotifications.get(addr2).get(cluster2Name).intValue());
-
-        assertNull("Expected changes to have been reported.",
-                   changeNotifications.get(addr2).get(cluster1Name));
-    }
-
-
-    private static boolean areChangeUpdatesExhausted(Map<String, Map<String, List<List<AmbariCluster.ServiceConfiguration>>>> updates,
-                                              Map<String, Map<String, Integer>> configChangeIndeces) {
-        boolean isExhausted = true;
-
-        for (String address : updates.keySet()) {
-            Map<String, List<List<AmbariCluster.ServiceConfiguration>>> clusterConfigs = updates.get(address);
-            for (String clusterName : clusterConfigs.keySet()) {
-                Integer configChangeCount = clusterConfigs.get(clusterName).size();
-                if (configChangeIndeces.get(address).containsKey(clusterName)) {
-                    if (configChangeIndeces.get(address).get(clusterName) < configChangeCount) {
-                        isExhausted = false;
-                        break;
-                    }
-                }
-            }
-        }
-
-        return isExhausted;
-    }
-
-    /**
-     *
-     * @param name           The cluster name
-     * @param serviceConfigs A map of service configurations (keyed by service name)
-     *
-     * @return
-     */
-    private AmbariCluster createTestCluster(String name,
-                                            Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigs) {
-        AmbariCluster c = EasyMock.createNiceMock(AmbariCluster.class);
-        EasyMock.expect(c.getName()).andReturn(name).anyTimes();
-        EasyMock.expect(c.getServiceConfigurations()).andReturn(serviceConfigs).anyTimes();
-        EasyMock.replay(c);
-        return c;
-    }
-
-    private AmbariCluster.ServiceConfiguration createTestServiceConfig(String name, String version) {
-        AmbariCluster.ServiceConfiguration sc = EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
-        EasyMock.expect(sc.getType()).andReturn(name).anyTimes();
-        EasyMock.expect(sc.getVersion()).andReturn(version).anyTimes();
-        EasyMock.replay(sc);
-        return sc;
-    }
-
-    private ServiceDiscoveryConfig createTestDiscoveryConfig(String address) {
-        return createTestDiscoveryConfig(address, null, null);
-    }
-
-    private ServiceDiscoveryConfig createTestDiscoveryConfig(String address, String username, String pwdAlias) {
-        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
-        EasyMock.expect(sdc.getAddress()).andReturn(address).anyTimes();
-        EasyMock.expect(sdc.getUser()).andReturn(username).anyTimes();
-        EasyMock.expect(sdc.getPasswordAlias()).andReturn(pwdAlias).anyTimes();
-        EasyMock.replay(sdc);
-        return sdc;
-    }
-
-    /**
-     * AmbariConfigurationMonitor extension that replaces the collection of updated configuration data with a static
-     * mechanism rather than the REST invocation mechanism.
-     */
-    private static final class TestableAmbariConfigurationMonitor extends AmbariConfigurationMonitor {
-
-        Map<String, Map<String, Map<String, String>>> configVersionData = new HashMap<>();
-
-        TestableAmbariConfigurationMonitor(GatewayConfig config) {
-            super(config, null);
-        }
-
-        void addTestConfigVersion(String address, String clusterName, String configType, String configVersion) {
-            configVersionData.computeIfAbsent(address, a -> new HashMap<>())
-                             .computeIfAbsent(clusterName, cl -> new HashMap<>())
-                             .put(configType, configVersion);
-        }
-
-        void addTestConfigVersions(String address, String clusterName, Map<String, String> configVersions) {
-            configVersionData.computeIfAbsent(address, a -> new HashMap<>())
-                             .computeIfAbsent(clusterName, cl -> new HashMap<>())
-                             .putAll(configVersions);
-        }
-
-        void updateTestConfigVersion(String address, String clusterName, String configType, String updatedVersions) {
-            configVersionData.computeIfAbsent(address, a -> new HashMap<>())
-                             .computeIfAbsent(clusterName, cl -> new HashMap<>())
-                             .replace(configType, updatedVersions);
-        }
-
-        void updateTestConfigVersions(String address, String clusterName, Map<String, String> updatedVersions) {
-            configVersionData.computeIfAbsent(address, a -> new HashMap<>())
-                             .computeIfAbsent(clusterName, cl -> new HashMap<>())
-                             .replaceAll((k,v) -> updatedVersions.get(k));
-        }
-
-        void updateConfigState(String address, String clusterName, String configType, String configVersion) {
-            configVersionsLock.writeLock().lock();
-            try {
-                if (ambariClusterConfigVersions.containsKey(address)) {
-                    ambariClusterConfigVersions.get(address).get(clusterName).replace(configType, configVersion);
-                }
-            } finally {
-                configVersionsLock.writeLock().unlock();
-            }
-        }
-
-        @Override
-        Map<String, String> getUpdatedConfigVersions(String address, String clusterName) {
-            Map<String, Map<String, String>> clusterConfigVersions = configVersionData.get(address);
-            if (clusterConfigVersions != null) {
-                return clusterConfigVersions.get(clusterName);
-            }
-            return null;
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariConfigurationMonitorTest.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariConfigurationMonitorTest.java b/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariConfigurationMonitorTest.java
new file mode 100644
index 0000000..7411545
--- /dev/null
+++ b/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariConfigurationMonitorTest.java
@@ -0,0 +1,319 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.easymock.EasyMock;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+
+public class AmbariConfigurationMonitorTest {
+
+    private File dataDir = null;
+
+    @Before
+    public void setup() throws Exception {
+        File targetDir = new File( System.getProperty("user.dir"), "target");
+        File tempDir = new File(targetDir, this.getClass().getName() + "__data__" + UUID.randomUUID());
+        FileUtils.forceMkdir(tempDir);
+        dataDir = tempDir;
+    }
+
+    @After
+    public void tearDown() throws Exception {
+        dataDir.delete();
+    }
+
+    @Test
+    public void testPollingMonitor() throws Exception {
+        final String addr1 = "http://host1:8080";
+        final String addr2 = "http://host2:8080";
+        final String cluster1Name = "Cluster_One";
+        final String cluster2Name = "Cluster_Two";
+
+
+        GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+        EasyMock.expect(config.getGatewayDataDir()).andReturn(dataDir.getAbsolutePath()).anyTimes();
+        EasyMock.expect(config.getClusterMonitorPollingInterval(AmbariConfigurationMonitor.getType()))
+                .andReturn(10)
+                .anyTimes();
+        EasyMock.replay(config);
+
+        // Create the monitor
+        TestableAmbariConfigurationMonitor monitor = new TestableAmbariConfigurationMonitor(config);
+
+        // Clear the system property now that the monitor has been initialized
+        System.clearProperty(AmbariConfigurationMonitor.INTERVAL_PROPERTY_NAME);
+
+
+        // Sequence of config changes for testing monitoring for updates
+        Map<String, Map<String, List<List<AmbariCluster.ServiceConfiguration>>>> updateConfigurations = new HashMap<>();
+
+        updateConfigurations.put(addr1, new HashMap<>());
+        updateConfigurations.get(addr1).put(cluster1Name, Arrays.asList(Arrays.asList(createTestServiceConfig("zoo.cfg", "3"),
+                                                                                      createTestServiceConfig("hive-site", "2")),
+                                                                        Arrays.asList(createTestServiceConfig("zoo.cfg", "3"),
+                                                                                      createTestServiceConfig("hive-site", "3")),
+                                                                        Arrays.asList(createTestServiceConfig("zoo.cfg", "2"),
+                                                                                      createTestServiceConfig("hive-site", "1"))));
+
+        updateConfigurations.put(addr2, new HashMap<>());
+        updateConfigurations.get(addr2).put(cluster2Name, Arrays.asList(Arrays.asList(createTestServiceConfig("zoo.cfg", "1"),
+                                                                                      createTestServiceConfig("hive-site", "1")),
+                                                                        Collections.singletonList(createTestServiceConfig("zoo.cfg", "1")),
+                                                                        Arrays.asList(createTestServiceConfig("zoo.cfg", "1"),
+                                                                                      createTestServiceConfig("hive-site", "2"))));
+
+        updateConfigurations.get(addr2).put(cluster1Name, Arrays.asList(Arrays.asList(createTestServiceConfig("zoo.cfg", "2"),
+                                                                                      createTestServiceConfig("hive-site", "4")),
+                                                                        Arrays.asList(createTestServiceConfig("zoo.cfg", "3"),
+                                                                                      createTestServiceConfig("hive-site", "4"),
+                                                                                      createTestServiceConfig("yarn-site", "1")),
+                                                                        Arrays.asList(createTestServiceConfig("zoo.cfg", "1"),
+                                                                                      createTestServiceConfig("hive-site", "2"))));
+
+        Map<String, Map<String, Integer>> configChangeIndex = new HashMap<>();
+        configChangeIndex.put(addr1, new HashMap<>());
+        configChangeIndex.get(addr1).put(cluster1Name, 0);
+        configChangeIndex.get(addr1).put(cluster2Name, 0);
+        configChangeIndex.put(addr2, new HashMap<>());
+        configChangeIndex.get(addr2).put(cluster2Name, 0);
+
+        // Setup the initial test update data
+        // Cluster 1 data change
+        monitor.addTestConfigVersion(addr1, cluster1Name, "zoo.cfg", "2");
+        monitor.addTestConfigVersion(addr1, cluster1Name, "hive-site", "1");
+
+        // Cluster 2 NO data change
+        monitor.addTestConfigVersion(addr2, cluster1Name, "zoo.cfg", "1");
+        monitor.addTestConfigVersion(addr2, cluster1Name, "hive-site", "1");
+
+        // Cluster 3 data change
+        monitor.addTestConfigVersion(addr2, cluster2Name, "zoo.cfg", "1");
+        monitor.addTestConfigVersion(addr2, cluster2Name, "hive-site", "2");
+
+        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> initialAmbariClusterConfigs = new HashMap<>();
+
+        Map<String, AmbariCluster.ServiceConfiguration> cluster1Configs = new HashMap<>();
+        AmbariCluster.ServiceConfiguration zooCfg = createTestServiceConfig("zoo.cfg", "1");
+        cluster1Configs.put("ZOOKEEPER", zooCfg);
+
+        AmbariCluster.ServiceConfiguration hiveSite = createTestServiceConfig("hive-site", "1");
+        cluster1Configs.put("Hive", hiveSite);
+
+        initialAmbariClusterConfigs.put(cluster1Name, cluster1Configs);
+        AmbariCluster cluster1 = createTestCluster(cluster1Name, initialAmbariClusterConfigs);
+
+        // Tell the monitor about the cluster configurations
+        monitor.addClusterConfigVersions(cluster1, createTestDiscoveryConfig(addr1));
+
+        monitor.addClusterConfigVersions(createTestCluster(cluster2Name, initialAmbariClusterConfigs),
+                                         createTestDiscoveryConfig(addr2));
+
+        monitor.addClusterConfigVersions(createTestCluster(cluster1Name, initialAmbariClusterConfigs),
+                                         createTestDiscoveryConfig(addr2));
+
+        final Map<String, Map<String, Integer>> changeNotifications = new HashMap<>();
+        monitor.addListener((src, cname) -> {
+//            System.out.println("Cluster config changed: " + cname + " @ " + src);
+            // Record the notification
+            Integer notificationCount  = changeNotifications.computeIfAbsent(src, s -> new HashMap<>())
+                                                            .computeIfAbsent(cname, c -> Integer.valueOf(0));
+            changeNotifications.get(src).put(cname, (notificationCount+=1));
+
+            // Update the config version
+            int changeIndex = configChangeIndex.get(src).get(cname);
+            if (changeIndex < updateConfigurations.get(src).get(cname).size()) {
+                List<AmbariCluster.ServiceConfiguration> changes = updateConfigurations.get(src).get(cname).get(changeIndex);
+
+//                System.out.println("Applying config update " + changeIndex + " to " + cname + " @ " + src + " ...");
+                for (AmbariCluster.ServiceConfiguration change : changes) {
+                    monitor.updateConfigState(src, cname, change.getType(), change.getVersion());
+//                    System.out.println("    Updated " + change.getType() + " to version " + change.getVersion());
+                }
+
+                // Increment the change index
+                configChangeIndex.get(src).replace(cname, changeIndex + 1);
+
+//                System.out.println("Monitor config updated for " + cname + " @ " + src + " : " + changeIndex );
+            }
+        });
+
+        try {
+            monitor.start();
+
+            long expiration = System.currentTimeMillis() + (1000 * 30);
+            while (!areChangeUpdatesExhausted(updateConfigurations, configChangeIndex)
+                                                                        && (System.currentTimeMillis() < expiration)) {
+                try {
+                    Thread.sleep(5);
+                } catch (InterruptedException e) {
+                    //
+                }
+            }
+
+        } finally {
+            monitor.stop();
+        }
+
+        assertNotNull("Expected changes to have been reported for source 1.",
+                      changeNotifications.get(addr1));
+
+        assertEquals("Expected changes to have been reported.",
+                     3, changeNotifications.get(addr1).get(cluster1Name).intValue());
+
+        assertNotNull("Expected changes to have been reported for source 2.",
+                      changeNotifications.get(addr2));
+
+        assertEquals("Expected changes to have been reported.",
+                     3, changeNotifications.get(addr2).get(cluster2Name).intValue());
+
+        assertNull("Expected changes to have been reported.",
+                   changeNotifications.get(addr2).get(cluster1Name));
+    }
+
+
+    private static boolean areChangeUpdatesExhausted(Map<String, Map<String, List<List<AmbariCluster.ServiceConfiguration>>>> updates,
+                                              Map<String, Map<String, Integer>> configChangeIndeces) {
+        boolean isExhausted = true;
+
+        for (String address : updates.keySet()) {
+            Map<String, List<List<AmbariCluster.ServiceConfiguration>>> clusterConfigs = updates.get(address);
+            for (String clusterName : clusterConfigs.keySet()) {
+                Integer configChangeCount = clusterConfigs.get(clusterName).size();
+                if (configChangeIndeces.get(address).containsKey(clusterName)) {
+                    if (configChangeIndeces.get(address).get(clusterName) < configChangeCount) {
+                        isExhausted = false;
+                        break;
+                    }
+                }
+            }
+        }
+
+        return isExhausted;
+    }
+
+    /**
+     *
+     * @param name           The cluster name
+     * @param serviceConfigs A map of service configurations (keyed by service name)
+     *
+     * @return
+     */
+    private AmbariCluster createTestCluster(String name,
+                                            Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigs) {
+        AmbariCluster c = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(c.getName()).andReturn(name).anyTimes();
+        EasyMock.expect(c.getServiceConfigurations()).andReturn(serviceConfigs).anyTimes();
+        EasyMock.replay(c);
+        return c;
+    }
+
+    private AmbariCluster.ServiceConfiguration createTestServiceConfig(String name, String version) {
+        AmbariCluster.ServiceConfiguration sc = EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
+        EasyMock.expect(sc.getType()).andReturn(name).anyTimes();
+        EasyMock.expect(sc.getVersion()).andReturn(version).anyTimes();
+        EasyMock.replay(sc);
+        return sc;
+    }
+
+    private ServiceDiscoveryConfig createTestDiscoveryConfig(String address) {
+        return createTestDiscoveryConfig(address, null, null);
+    }
+
+    private ServiceDiscoveryConfig createTestDiscoveryConfig(String address, String username, String pwdAlias) {
+        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
+        EasyMock.expect(sdc.getAddress()).andReturn(address).anyTimes();
+        EasyMock.expect(sdc.getUser()).andReturn(username).anyTimes();
+        EasyMock.expect(sdc.getPasswordAlias()).andReturn(pwdAlias).anyTimes();
+        EasyMock.replay(sdc);
+        return sdc;
+    }
+
+    /**
+     * AmbariConfigurationMonitor extension that replaces the collection of updated configuration data with a static
+     * mechanism rather than the REST invocation mechanism.
+     */
+    private static final class TestableAmbariConfigurationMonitor extends AmbariConfigurationMonitor {
+
+        Map<String, Map<String, Map<String, String>>> configVersionData = new HashMap<>();
+
+        TestableAmbariConfigurationMonitor(GatewayConfig config) {
+            super(config, null);
+        }
+
+        void addTestConfigVersion(String address, String clusterName, String configType, String configVersion) {
+            configVersionData.computeIfAbsent(address, a -> new HashMap<>())
+                             .computeIfAbsent(clusterName, cl -> new HashMap<>())
+                             .put(configType, configVersion);
+        }
+
+        void addTestConfigVersions(String address, String clusterName, Map<String, String> configVersions) {
+            configVersionData.computeIfAbsent(address, a -> new HashMap<>())
+                             .computeIfAbsent(clusterName, cl -> new HashMap<>())
+                             .putAll(configVersions);
+        }
+
+        void updateTestConfigVersion(String address, String clusterName, String configType, String updatedVersions) {
+            configVersionData.computeIfAbsent(address, a -> new HashMap<>())
+                             .computeIfAbsent(clusterName, cl -> new HashMap<>())
+                             .replace(configType, updatedVersions);
+        }
+
+        void updateTestConfigVersions(String address, String clusterName, Map<String, String> updatedVersions) {
+            configVersionData.computeIfAbsent(address, a -> new HashMap<>())
+                             .computeIfAbsent(clusterName, cl -> new HashMap<>())
+                             .replaceAll((k,v) -> updatedVersions.get(k));
+        }
+
+        void updateConfigState(String address, String clusterName, String configType, String configVersion) {
+            configVersionsLock.writeLock().lock();
+            try {
+                if (ambariClusterConfigVersions.containsKey(address)) {
+                    ambariClusterConfigVersions.get(address).get(clusterName).replace(configType, configVersion);
+                }
+            } finally {
+                configVersionsLock.writeLock().unlock();
+            }
+        }
+
+        @Override
+        Map<String, String> getUpdatedConfigVersions(String address, String clusterName) {
+            Map<String, Map<String, String>> clusterConfigVersions = configVersionData.get(address);
+            if (clusterConfigVersions != null) {
+                return clusterConfigVersions.get(clusterName);
+            }
+            return null;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultClusterConfigurationMonitorService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultClusterConfigurationMonitorService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultClusterConfigurationMonitorService.java
deleted file mode 100644
index 342ce11..0000000
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultClusterConfigurationMonitorService.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.services.topology.impl;
-
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.services.ServiceLifecycleException;
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.hadoop.gateway.topology.ClusterConfigurationMonitorService;
-import org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitor;
-import org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitorProvider;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.ServiceLoader;
-
-
-public class DefaultClusterConfigurationMonitorService implements ClusterConfigurationMonitorService {
-
-    private AliasService aliasService = null;
-
-    private Map<String, ClusterConfigurationMonitor> monitors = new HashMap<>();
-
-    @Override
-    public void init(GatewayConfig config, Map<String, String> options) throws ServiceLifecycleException {
-        ServiceLoader<ClusterConfigurationMonitorProvider> providers =
-                                                        ServiceLoader.load(ClusterConfigurationMonitorProvider.class);
-        for (ClusterConfigurationMonitorProvider provider : providers) {
-            // Check the gateway configuration to determine if this type of monitor is enabled
-            if (config.isClusterMonitorEnabled(provider.getType())) {
-                ClusterConfigurationMonitor monitor = provider.newInstance(config, aliasService);
-                if (monitor != null) {
-                    monitors.put(provider.getType(), monitor);
-                }
-            }
-        }
-    }
-
-    @Override
-    public void start() {
-        for (ClusterConfigurationMonitor monitor : monitors.values()) {
-            monitor.start();
-        }
-    }
-
-    @Override
-    public void stop() {
-        for (ClusterConfigurationMonitor monitor : monitors.values()) {
-            monitor.stop();
-        }
-    }
-
-    @Override
-    public ClusterConfigurationMonitor getMonitor(String type) {
-        return monitors.get(type);
-    }
-
-    @Override
-    public void addListener(ClusterConfigurationMonitor.ConfigurationChangeListener listener) {
-        for (ClusterConfigurationMonitor monitor : monitors.values()) {
-            monitor.addListener(listener);
-        }
-    }
-
-    public void setAliasService(AliasService aliasService) {
-        this.aliasService = aliasService;
-    }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultConfigurationMonitorProvider.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultConfigurationMonitorProvider.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultConfigurationMonitorProvider.java
deleted file mode 100644
index 7b34e3d..0000000
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultConfigurationMonitorProvider.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.monitor;
-
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
-
-
-public class DefaultConfigurationMonitorProvider implements RemoteConfigurationMonitorProvider {
-
-    @Override
-    public RemoteConfigurationMonitor newInstance(final GatewayConfig                            config,
-                                                  final RemoteConfigurationRegistryClientService clientService) {
-        return new DefaultRemoteConfigurationMonitor(config, clientService);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java
deleted file mode 100644
index af60058..0000000
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java
+++ /dev/null
@@ -1,228 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.monitor;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.gateway.GatewayMessages;
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient.ChildEntryListener;
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient.EntryListener;
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient;
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
-import org.apache.zookeeper.ZooDefs;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-
-class DefaultRemoteConfigurationMonitor implements RemoteConfigurationMonitor {
-
-    private static final String NODE_KNOX = "/knox";
-    private static final String NODE_KNOX_CONFIG = NODE_KNOX + "/config";
-    private static final String NODE_KNOX_PROVIDERS = NODE_KNOX_CONFIG + "/shared-providers";
-    private static final String NODE_KNOX_DESCRIPTORS = NODE_KNOX_CONFIG + "/descriptors";
-
-    private static GatewayMessages log = MessagesFactory.get(GatewayMessages.class);
-
-    // N.B. This is ZooKeeper-specific, and should be abstracted when another registry is supported
-    private static final RemoteConfigurationRegistryClient.EntryACL AUTHENTICATED_USERS_ALL;
-    static {
-        AUTHENTICATED_USERS_ALL = new RemoteConfigurationRegistryClient.EntryACL() {
-            public String getId() {
-                return "";
-            }
-
-            public String getType() {
-                return "auth";
-            }
-
-            public Object getPermissions() {
-                return ZooDefs.Perms.ALL;
-            }
-
-            public boolean canRead() {
-                return true;
-            }
-
-            public boolean canWrite() {
-                return true;
-            }
-        };
-    }
-
-    private RemoteConfigurationRegistryClient client = null;
-
-    private File providersDir;
-    private File descriptorsDir;
-
-    /**
-     * @param config                The gateway configuration
-     * @param registryClientService The service from which the remote registry client should be acquired.
-     */
-    DefaultRemoteConfigurationMonitor(GatewayConfig                            config,
-                                      RemoteConfigurationRegistryClientService registryClientService) {
-        this.providersDir   = new File(config.getGatewayProvidersConfigDir());
-        this.descriptorsDir = new File(config.getGatewayDescriptorsDir());
-
-        if (registryClientService != null) {
-            String clientName = config.getRemoteConfigurationMonitorClientName();
-            if (clientName != null) {
-                this.client = registryClientService.get(clientName);
-                if (this.client == null) {
-                    log.unresolvedClientConfigurationForRemoteMonitoring(clientName);
-                }
-            } else {
-                log.missingClientConfigurationForRemoteMonitoring();
-            }
-        }
-    }
-
-    @Override
-    public void start() throws Exception {
-        if (client == null) {
-            throw new IllegalStateException("Failed to acquire a remote configuration registry client.");
-        }
-
-        final String monitorSource = client.getAddress();
-        log.startingRemoteConfigurationMonitor(monitorSource);
-
-        // Ensure the existence of the expected entries and their associated ACLs
-        ensureEntries();
-
-        // Confirm access to the remote provider configs directory znode
-        List<String> providerConfigs = client.listChildEntries(NODE_KNOX_PROVIDERS);
-        if (providerConfigs == null) {
-            // Either the ZNode does not exist, or there is an authentication problem
-            throw new IllegalStateException("Unable to access remote path: " + NODE_KNOX_PROVIDERS);
-        }
-
-        // Confirm access to the remote descriptors directory znode
-        List<String> descriptors = client.listChildEntries(NODE_KNOX_DESCRIPTORS);
-        if (descriptors == null) {
-            // Either the ZNode does not exist, or there is an authentication problem
-            throw new IllegalStateException("Unable to access remote path: " + NODE_KNOX_DESCRIPTORS);
-        }
-
-        // Register a listener for provider config znode additions/removals
-        client.addChildEntryListener(NODE_KNOX_PROVIDERS, new ConfigDirChildEntryListener(providersDir));
-
-        // Register a listener for descriptor znode additions/removals
-        client.addChildEntryListener(NODE_KNOX_DESCRIPTORS, new ConfigDirChildEntryListener(descriptorsDir));
-
-        log.monitoringRemoteConfigurationSource(monitorSource);
-    }
-
-
-    @Override
-    public void stop() throws Exception {
-        client.removeEntryListener(NODE_KNOX_PROVIDERS);
-        client.removeEntryListener(NODE_KNOX_DESCRIPTORS);
-    }
-
-    private void ensureEntries() {
-        ensureEntry(NODE_KNOX);
-        ensureEntry(NODE_KNOX_CONFIG);
-        ensureEntry(NODE_KNOX_PROVIDERS);
-        ensureEntry(NODE_KNOX_DESCRIPTORS);
-    }
-
-    private void ensureEntry(String name) {
-        if (!client.entryExists(name)) {
-            client.createEntry(name);
-        } else {
-            // Validate the ACL
-            List<RemoteConfigurationRegistryClient.EntryACL> entryACLs = client.getACL(name);
-            for (RemoteConfigurationRegistryClient.EntryACL entryACL : entryACLs) {
-                // N.B. This is ZooKeeper-specific, and should be abstracted when another registry is supported
-                // For now, check for ZooKeeper world:anyone with ANY permissions (even read-only)
-                if (entryACL.getType().equals("world") && entryACL.getId().equals("anyone")) {
-                    log.suspectWritableRemoteConfigurationEntry(name);
-
-                    // If the client is authenticated, but "anyone" can write the content, then the content may not
-                    // be trustworthy.
-                    if (client.isAuthenticationConfigured()) {
-                        log.correctingSuspectWritableRemoteConfigurationEntry(name);
-
-                        // Replace the existing ACL with one that permits only authenticated users
-                        client.setACL(name, Collections.singletonList(AUTHENTICATED_USERS_ALL));
-                  }
-                }
-            }
-        }
-    }
-
-    private static class ConfigDirChildEntryListener implements ChildEntryListener {
-        File localDir;
-
-        ConfigDirChildEntryListener(File localDir) {
-            this.localDir = localDir;
-        }
-
-        @Override
-        public void childEvent(RemoteConfigurationRegistryClient client, Type type, String path) {
-            File localFile = new File(localDir, path.substring(path.lastIndexOf("/") + 1));
-
-            switch (type) {
-                case REMOVED:
-                    FileUtils.deleteQuietly(localFile);
-                    log.deletedRemoteConfigFile(localDir.getName(), localFile.getName());
-                    try {
-                        client.removeEntryListener(path);
-                    } catch (Exception e) {
-                        log.errorRemovingRemoteConfigurationListenerForPath(path, e);
-                    }
-                    break;
-                case ADDED:
-                    try {
-                        client.addEntryListener(path, new ConfigEntryListener(localDir));
-                    } catch (Exception e) {
-                        log.errorAddingRemoteConfigurationListenerForPath(path, e);
-                    }
-                    break;
-            }
-        }
-    }
-
-    private static class ConfigEntryListener implements EntryListener {
-        private File localDir;
-
-        ConfigEntryListener(File localDir) {
-            this.localDir = localDir;
-        }
-
-        @Override
-        public void entryChanged(RemoteConfigurationRegistryClient client, String path, byte[] data) {
-            File localFile = new File(localDir, path.substring(path.lastIndexOf("/")));
-            if (data != null) {
-                try {
-                    FileUtils.writeByteArrayToFile(localFile, data);
-                    log.downloadedRemoteConfigFile(localDir.getName(), localFile.getName());
-                } catch (IOException e) {
-                    log.errorDownloadingRemoteConfiguration(path, e);
-                }
-            } else {
-                FileUtils.deleteQuietly(localFile);
-                log.deletedRemoteConfigFile(localDir.getName(), localFile.getName());
-            }
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorFactory.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorFactory.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorFactory.java
deleted file mode 100644
index 4d2df45..0000000
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorFactory.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.monitor;
-
-import org.apache.hadoop.gateway.GatewayMessages;
-import org.apache.hadoop.gateway.GatewayServer;
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.hadoop.gateway.services.GatewayServices;
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
-
-import java.util.ServiceLoader;
-
-public class RemoteConfigurationMonitorFactory {
-
-    private static final GatewayMessages log = MessagesFactory.get(GatewayMessages.class);
-
-    private static RemoteConfigurationRegistryClientService remoteConfigRegistryClientService = null;
-
-    public static void setClientService(RemoteConfigurationRegistryClientService clientService) {
-        remoteConfigRegistryClientService = clientService;
-    }
-
-    private static RemoteConfigurationRegistryClientService getClientService() {
-        if (remoteConfigRegistryClientService == null) {
-            GatewayServices services = GatewayServer.getGatewayServices();
-            if (services != null) {
-                remoteConfigRegistryClientService = services.getService(GatewayServices.REMOTE_REGISTRY_CLIENT_SERVICE);
-            }
-        }
-
-        return remoteConfigRegistryClientService;
-    }
-
-    /**
-     *
-     * @param config The GatewayConfig
-     *
-     * @return The first RemoteConfigurationMonitor extension that is found.
-     */
-    public static RemoteConfigurationMonitor get(GatewayConfig config) {
-        RemoteConfigurationMonitor rcm = null;
-
-        ServiceLoader<RemoteConfigurationMonitorProvider> providers =
-                                                 ServiceLoader.load(RemoteConfigurationMonitorProvider.class);
-        for (RemoteConfigurationMonitorProvider provider : providers) {
-            try {
-                rcm = provider.newInstance(config, getClientService());
-                if (rcm != null) {
-                    break;
-                }
-            } catch (Exception e) {
-                log.remoteConfigurationMonitorInitFailure(e.getLocalizedMessage(), e);
-            }
-        }
-
-        return rcm;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/main/java/org/apache/knox/gateway/services/CLIGatewayServices.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/knox/gateway/services/CLIGatewayServices.java b/gateway-server/src/main/java/org/apache/knox/gateway/services/CLIGatewayServices.java
index a1ed549..f168d44 100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/services/CLIGatewayServices.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/services/CLIGatewayServices.java
@@ -24,7 +24,7 @@ import org.apache.knox.gateway.descriptor.FilterParamDescriptor;
 import org.apache.knox.gateway.descriptor.ResourceDescriptor;
 import org.apache.knox.gateway.i18n.messages.MessagesFactory;
 import org.apache.knox.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceFactory;
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClientService;
 import org.apache.knox.gateway.services.topology.impl.DefaultTopologyService;
 import org.apache.knox.gateway.services.security.impl.DefaultAliasService;
 import org.apache.knox.gateway.services.security.impl.DefaultCryptoService;

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/main/java/org/apache/knox/gateway/services/topology/impl/DefaultClusterConfigurationMonitorService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/knox/gateway/services/topology/impl/DefaultClusterConfigurationMonitorService.java b/gateway-server/src/main/java/org/apache/knox/gateway/services/topology/impl/DefaultClusterConfigurationMonitorService.java
new file mode 100644
index 0000000..e7ef01d
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/services/topology/impl/DefaultClusterConfigurationMonitorService.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.services.topology.impl;
+
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.services.ServiceLifecycleException;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.gateway.topology.ClusterConfigurationMonitorService;
+import org.apache.knox.gateway.topology.discovery.ClusterConfigurationMonitor;
+import org.apache.knox.gateway.topology.discovery.ClusterConfigurationMonitorProvider;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.ServiceLoader;
+
+
+public class DefaultClusterConfigurationMonitorService implements ClusterConfigurationMonitorService {
+
+    private AliasService aliasService = null;
+
+    private Map<String, ClusterConfigurationMonitor> monitors = new HashMap<>();
+
+    @Override
+    public void init(GatewayConfig config, Map<String, String> options) throws ServiceLifecycleException {
+        ServiceLoader<ClusterConfigurationMonitorProvider> providers =
+                                                        ServiceLoader.load(ClusterConfigurationMonitorProvider.class);
+        for (ClusterConfigurationMonitorProvider provider : providers) {
+            // Check the gateway configuration to determine if this type of monitor is enabled
+            if (config.isClusterMonitorEnabled(provider.getType())) {
+                ClusterConfigurationMonitor monitor = provider.newInstance(config, aliasService);
+                if (monitor != null) {
+                    monitors.put(provider.getType(), monitor);
+                }
+            }
+        }
+    }
+
+    @Override
+    public void start() {
+        for (ClusterConfigurationMonitor monitor : monitors.values()) {
+            monitor.start();
+        }
+    }
+
+    @Override
+    public void stop() {
+        for (ClusterConfigurationMonitor monitor : monitors.values()) {
+            monitor.stop();
+        }
+    }
+
+    @Override
+    public ClusterConfigurationMonitor getMonitor(String type) {
+        return monitors.get(type);
+    }
+
+    @Override
+    public void addListener(ClusterConfigurationMonitor.ConfigurationChangeListener listener) {
+        for (ClusterConfigurationMonitor monitor : monitors.values()) {
+            monitor.addListener(listener);
+        }
+    }
+
+    public void setAliasService(AliasService aliasService) {
+        this.aliasService = aliasService;
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/main/java/org/apache/knox/gateway/topology/monitor/DefaultConfigurationMonitorProvider.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/knox/gateway/topology/monitor/DefaultConfigurationMonitorProvider.java b/gateway-server/src/main/java/org/apache/knox/gateway/topology/monitor/DefaultConfigurationMonitorProvider.java
new file mode 100644
index 0000000..25bea08
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/topology/monitor/DefaultConfigurationMonitorProvider.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.monitor;
+
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+
+
+public class DefaultConfigurationMonitorProvider implements RemoteConfigurationMonitorProvider {
+
+    @Override
+    public RemoteConfigurationMonitor newInstance(final GatewayConfig                            config,
+                                                  final RemoteConfigurationRegistryClientService clientService) {
+        return new DefaultRemoteConfigurationMonitor(config, clientService);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/main/java/org/apache/knox/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/knox/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java b/gateway-server/src/main/java/org/apache/knox/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java
new file mode 100644
index 0000000..efafee0
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java
@@ -0,0 +1,228 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.monitor;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.knox.gateway.GatewayMessages;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClient.ChildEntryListener;
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClient.EntryListener;
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClient;
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+import org.apache.zookeeper.ZooDefs;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+
+class DefaultRemoteConfigurationMonitor implements RemoteConfigurationMonitor {
+
+    private static final String NODE_KNOX = "/knox";
+    private static final String NODE_KNOX_CONFIG = NODE_KNOX + "/config";
+    private static final String NODE_KNOX_PROVIDERS = NODE_KNOX_CONFIG + "/shared-providers";
+    private static final String NODE_KNOX_DESCRIPTORS = NODE_KNOX_CONFIG + "/descriptors";
+
+    private static GatewayMessages log = MessagesFactory.get(GatewayMessages.class);
+
+    // N.B. This is ZooKeeper-specific, and should be abstracted when another registry is supported
+    private static final RemoteConfigurationRegistryClient.EntryACL AUTHENTICATED_USERS_ALL;
+    static {
+        AUTHENTICATED_USERS_ALL = new RemoteConfigurationRegistryClient.EntryACL() {
+            public String getId() {
+                return "";
+            }
+
+            public String getType() {
+                return "auth";
+            }
+
+            public Object getPermissions() {
+                return ZooDefs.Perms.ALL;
+            }
+
+            public boolean canRead() {
+                return true;
+            }
+
+            public boolean canWrite() {
+                return true;
+            }
+        };
+    }
+
+    private RemoteConfigurationRegistryClient client = null;
+
+    private File providersDir;
+    private File descriptorsDir;
+
+    /**
+     * @param config                The gateway configuration
+     * @param registryClientService The service from which the remote registry client should be acquired.
+     */
+    DefaultRemoteConfigurationMonitor(GatewayConfig                            config,
+                                      RemoteConfigurationRegistryClientService registryClientService) {
+        this.providersDir   = new File(config.getGatewayProvidersConfigDir());
+        this.descriptorsDir = new File(config.getGatewayDescriptorsDir());
+
+        if (registryClientService != null) {
+            String clientName = config.getRemoteConfigurationMonitorClientName();
+            if (clientName != null) {
+                this.client = registryClientService.get(clientName);
+                if (this.client == null) {
+                    log.unresolvedClientConfigurationForRemoteMonitoring(clientName);
+                }
+            } else {
+                log.missingClientConfigurationForRemoteMonitoring();
+            }
+        }
+    }
+
+    @Override
+    public void start() throws Exception {
+        if (client == null) {
+            throw new IllegalStateException("Failed to acquire a remote configuration registry client.");
+        }
+
+        final String monitorSource = client.getAddress();
+        log.startingRemoteConfigurationMonitor(monitorSource);
+
+        // Ensure the existence of the expected entries and their associated ACLs
+        ensureEntries();
+
+        // Confirm access to the remote provider configs directory znode
+        List<String> providerConfigs = client.listChildEntries(NODE_KNOX_PROVIDERS);
+        if (providerConfigs == null) {
+            // Either the ZNode does not exist, or there is an authentication problem
+            throw new IllegalStateException("Unable to access remote path: " + NODE_KNOX_PROVIDERS);
+        }
+
+        // Confirm access to the remote descriptors directory znode
+        List<String> descriptors = client.listChildEntries(NODE_KNOX_DESCRIPTORS);
+        if (descriptors == null) {
+            // Either the ZNode does not exist, or there is an authentication problem
+            throw new IllegalStateException("Unable to access remote path: " + NODE_KNOX_DESCRIPTORS);
+        }
+
+        // Register a listener for provider config znode additions/removals
+        client.addChildEntryListener(NODE_KNOX_PROVIDERS, new ConfigDirChildEntryListener(providersDir));
+
+        // Register a listener for descriptor znode additions/removals
+        client.addChildEntryListener(NODE_KNOX_DESCRIPTORS, new ConfigDirChildEntryListener(descriptorsDir));
+
+        log.monitoringRemoteConfigurationSource(monitorSource);
+    }
+
+
+    @Override
+    public void stop() throws Exception {
+        client.removeEntryListener(NODE_KNOX_PROVIDERS);
+        client.removeEntryListener(NODE_KNOX_DESCRIPTORS);
+    }
+
+    private void ensureEntries() {
+        ensureEntry(NODE_KNOX);
+        ensureEntry(NODE_KNOX_CONFIG);
+        ensureEntry(NODE_KNOX_PROVIDERS);
+        ensureEntry(NODE_KNOX_DESCRIPTORS);
+    }
+
+    private void ensureEntry(String name) {
+        if (!client.entryExists(name)) {
+            client.createEntry(name);
+        } else {
+            // Validate the ACL
+            List<RemoteConfigurationRegistryClient.EntryACL> entryACLs = client.getACL(name);
+            for (RemoteConfigurationRegistryClient.EntryACL entryACL : entryACLs) {
+                // N.B. This is ZooKeeper-specific, and should be abstracted when another registry is supported
+                // For now, check for ZooKeeper world:anyone with ANY permissions (even read-only)
+                if (entryACL.getType().equals("world") && entryACL.getId().equals("anyone")) {
+                    log.suspectWritableRemoteConfigurationEntry(name);
+
+                    // If the client is authenticated, but "anyone" can write the content, then the content may not
+                    // be trustworthy.
+                    if (client.isAuthenticationConfigured()) {
+                        log.correctingSuspectWritableRemoteConfigurationEntry(name);
+
+                        // Replace the existing ACL with one that permits only authenticated users
+                        client.setACL(name, Collections.singletonList(AUTHENTICATED_USERS_ALL));
+                  }
+                }
+            }
+        }
+    }
+
+    private static class ConfigDirChildEntryListener implements ChildEntryListener {
+        File localDir;
+
+        ConfigDirChildEntryListener(File localDir) {
+            this.localDir = localDir;
+        }
+
+        @Override
+        public void childEvent(RemoteConfigurationRegistryClient client, Type type, String path) {
+            File localFile = new File(localDir, path.substring(path.lastIndexOf("/") + 1));
+
+            switch (type) {
+                case REMOVED:
+                    FileUtils.deleteQuietly(localFile);
+                    log.deletedRemoteConfigFile(localDir.getName(), localFile.getName());
+                    try {
+                        client.removeEntryListener(path);
+                    } catch (Exception e) {
+                        log.errorRemovingRemoteConfigurationListenerForPath(path, e);
+                    }
+                    break;
+                case ADDED:
+                    try {
+                        client.addEntryListener(path, new ConfigEntryListener(localDir));
+                    } catch (Exception e) {
+                        log.errorAddingRemoteConfigurationListenerForPath(path, e);
+                    }
+                    break;
+            }
+        }
+    }
+
+    private static class ConfigEntryListener implements EntryListener {
+        private File localDir;
+
+        ConfigEntryListener(File localDir) {
+            this.localDir = localDir;
+        }
+
+        @Override
+        public void entryChanged(RemoteConfigurationRegistryClient client, String path, byte[] data) {
+            File localFile = new File(localDir, path.substring(path.lastIndexOf("/")));
+            if (data != null) {
+                try {
+                    FileUtils.writeByteArrayToFile(localFile, data);
+                    log.downloadedRemoteConfigFile(localDir.getName(), localFile.getName());
+                } catch (IOException e) {
+                    log.errorDownloadingRemoteConfiguration(path, e);
+                }
+            } else {
+                FileUtils.deleteQuietly(localFile);
+                log.deletedRemoteConfigFile(localDir.getName(), localFile.getName());
+            }
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/main/java/org/apache/knox/gateway/topology/monitor/RemoteConfigurationMonitorFactory.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/knox/gateway/topology/monitor/RemoteConfigurationMonitorFactory.java b/gateway-server/src/main/java/org/apache/knox/gateway/topology/monitor/RemoteConfigurationMonitorFactory.java
new file mode 100644
index 0000000..d020532
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/topology/monitor/RemoteConfigurationMonitorFactory.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.monitor;
+
+import org.apache.knox.gateway.GatewayMessages;
+import org.apache.knox.gateway.GatewayServer;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.services.GatewayServices;
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+
+import java.util.ServiceLoader;
+
+public class RemoteConfigurationMonitorFactory {
+
+    private static final GatewayMessages log = MessagesFactory.get(GatewayMessages.class);
+
+    private static RemoteConfigurationRegistryClientService remoteConfigRegistryClientService = null;
+
+    public static void setClientService(RemoteConfigurationRegistryClientService clientService) {
+        remoteConfigRegistryClientService = clientService;
+    }
+
+    private static RemoteConfigurationRegistryClientService getClientService() {
+        if (remoteConfigRegistryClientService == null) {
+            GatewayServices services = GatewayServer.getGatewayServices();
+            if (services != null) {
+                remoteConfigRegistryClientService = services.getService(GatewayServices.REMOTE_REGISTRY_CLIENT_SERVICE);
+            }
+        }
+
+        return remoteConfigRegistryClientService;
+    }
+
+    /**
+     *
+     * @param config The GatewayConfig
+     *
+     * @return The first RemoteConfigurationMonitor extension that is found.
+     */
+    public static RemoteConfigurationMonitor get(GatewayConfig config) {
+        RemoteConfigurationMonitor rcm = null;
+
+        ServiceLoader<RemoteConfigurationMonitorProvider> providers =
+                                                 ServiceLoader.load(RemoteConfigurationMonitorProvider.class);
+        for (RemoteConfigurationMonitorProvider provider : providers) {
+            try {
+                rcm = provider.newInstance(config, getClientService());
+                if (rcm != null) {
+                    break;
+                }
+            } catch (Exception e) {
+                log.remoteConfigurationMonitorInitFailure(e.getLocalizedMessage(), e);
+            }
+        }
+
+        return rcm;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/main/java/org/apache/knox/gateway/util/KnoxCLI.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/knox/gateway/util/KnoxCLI.java b/gateway-server/src/main/java/org/apache/knox/gateway/util/KnoxCLI.java
index 9a87dd0..928c37e 100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/util/KnoxCLI.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/util/KnoxCLI.java
@@ -1855,7 +1855,7 @@ public class KnoxCLI extends Configured implements Tool {
     static final String DESC = "Lists all of the remote configuration registry clients defined in gateway-site.xml.\n";
 
     /* (non-Javadoc)
-     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#execute()
+     * @see org.apache.knox.gateway.util.KnoxCLI.Command#execute()
      */
     @Override
     public void execute() throws Exception {
@@ -1870,7 +1870,7 @@ public class KnoxCLI extends Configured implements Tool {
     }
 
     /* (non-Javadoc)
-     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#getUsage()
+     * @see org.apache.knox.gateway.util.KnoxCLI.Command#getUsage()
      */
     @Override
     public String getUsage() {
@@ -1958,7 +1958,7 @@ public class KnoxCLI extends Configured implements Tool {
     }
 
     /* (non-Javadoc)
-     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#execute()
+     * @see org.apache.knox.gateway.util.KnoxCLI.Command#execute()
      */
     @Override
     public void execute() throws Exception {
@@ -1966,7 +1966,7 @@ public class KnoxCLI extends Configured implements Tool {
     }
 
     /* (non-Javadoc)
-     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#getUsage()
+     * @see org.apache.knox.gateway.util.KnoxCLI.Command#getUsage()
      */
     @Override
     public String getUsage() {
@@ -1987,7 +1987,7 @@ public class KnoxCLI extends Configured implements Tool {
     }
 
     /* (non-Javadoc)
-     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#execute()
+     * @see org.apache.knox.gateway.util.KnoxCLI.Command#execute()
      */
     @Override
     public void execute() throws Exception {
@@ -1995,7 +1995,7 @@ public class KnoxCLI extends Configured implements Tool {
     }
 
     /* (non-Javadoc)
-     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#getUsage()
+     * @see org.apache.knox.gateway.util.KnoxCLI.Command#getUsage()
      */
     @Override
     public String getUsage() {
@@ -2016,7 +2016,7 @@ public class KnoxCLI extends Configured implements Tool {
     }
 
     /* (non-Javadoc)
-     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#execute()
+     * @see org.apache.knox.gateway.util.KnoxCLI.Command#execute()
      */
     @Override
     public void execute() throws Exception {
@@ -2039,7 +2039,7 @@ public class KnoxCLI extends Configured implements Tool {
     }
 
     /* (non-Javadoc)
-     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#getUsage()
+     * @see org.apache.knox.gateway.util.KnoxCLI.Command#getUsage()
      */
     @Override
     public String getUsage() {

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.monitor.RemoteConfigurationMonitorProvider
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.monitor.RemoteConfigurationMonitorProvider b/gateway-server/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.monitor.RemoteConfigurationMonitorProvider
deleted file mode 100644
index bd4023e..0000000
--- a/gateway-server/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.monitor.RemoteConfigurationMonitorProvider
+++ /dev/null
@@ -1,19 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.hadoop.gateway.topology.monitor.DefaultConfigurationMonitorProvider

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/main/resources/META-INF/services/org.apache.knox.gateway.topology.monitor.RemoteConfigurationMonitorProvider
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/resources/META-INF/services/org.apache.knox.gateway.topology.monitor.RemoteConfigurationMonitorProvider b/gateway-server/src/main/resources/META-INF/services/org.apache.knox.gateway.topology.monitor.RemoteConfigurationMonitorProvider
new file mode 100644
index 0000000..63f438a
--- /dev/null
+++ b/gateway-server/src/main/resources/META-INF/services/org.apache.knox.gateway.topology.monitor.RemoteConfigurationMonitorProvider
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.knox.gateway.topology.monitor.DefaultConfigurationMonitorProvider


[44/49] knox git commit: KNOX-998 - Merge from trunk 0.14.0 code

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/test/java/org/apache/knox/gateway/service/config/remote/util/RemoteRegistryConfigTestUtils.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/test/java/org/apache/knox/gateway/service/config/remote/util/RemoteRegistryConfigTestUtils.java b/gateway-service-remoteconfig/src/test/java/org/apache/knox/gateway/service/config/remote/util/RemoteRegistryConfigTestUtils.java
new file mode 100644
index 0000000..2854998
--- /dev/null
+++ b/gateway-service-remoteconfig/src/test/java/org/apache/knox/gateway/service/config/remote/util/RemoteRegistryConfigTestUtils.java
@@ -0,0 +1,117 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.config.remote.util;
+
+import java.util.Collection;
+import java.util.Map;
+
+public class RemoteRegistryConfigTestUtils {
+
+    public static final String PROPERTY_TYPE = "type";
+    public static final String PROPERTY_NAME = "name";
+    public static final String PROPERTY_ADDRESS = "address";
+    public static final String PROPERTY_NAMESAPCE = "namespace";
+    public static final String PROPERTY_SECURE = "secure";
+    public static final String PROPERTY_AUTH_TYPE = "authType";
+    public static final String PROPERTY_PRINCIPAL = "principal";
+    public static final String PROPERTY_CRED_ALIAS = "credentialAlias";
+    public static final String PROPERTY_KEYTAB = "keyTab";
+    public static final String PROPERTY_USE_KEYTAB = "useKeyTab";
+    public static final String PROPERTY_USE_TICKET_CACHE = "useTicketCache";
+
+    public static String createRemoteConfigRegistriesXML(Collection<Map<String, String>> configProperties) {
+        String result = "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
+                        "<remote-configuration-registries>\n";
+
+        for (Map<String, String> props : configProperties) {
+            String authType = props.get(PROPERTY_AUTH_TYPE);
+            if ("Kerberos".equalsIgnoreCase(authType)) {
+                result +=
+                   createRemoteConfigRegistryXMLWithKerberosAuth(props.get(PROPERTY_TYPE),
+                                                                 props.get(PROPERTY_NAME),
+                                                                 props.get(PROPERTY_ADDRESS),
+                                                                 props.get(PROPERTY_PRINCIPAL),
+                                                                 props.get(PROPERTY_KEYTAB),
+                                                                 Boolean.valueOf(props.get(PROPERTY_USE_KEYTAB)),
+                                                                 Boolean.valueOf(props.get(PROPERTY_USE_TICKET_CACHE)));
+            } else if ("Digest".equalsIgnoreCase(authType)) {
+                result +=
+                    createRemoteConfigRegistryXMLWithDigestAuth(props.get(PROPERTY_TYPE),
+                                                                props.get(PROPERTY_NAME),
+                                                                props.get(PROPERTY_ADDRESS),
+                                                                props.get(PROPERTY_PRINCIPAL),
+                                                                props.get(PROPERTY_CRED_ALIAS));
+            } else {
+                result += createRemoteConfigRegistryXMLNoAuth(props.get(PROPERTY_TYPE),
+                                                              props.get(PROPERTY_NAME),
+                                                              props.get(PROPERTY_ADDRESS));
+            }
+        }
+
+        result += "</remote-configuration-registries>\n";
+
+        return result;
+    }
+
+    public static String createRemoteConfigRegistryXMLWithKerberosAuth(String type,
+                                                                       String name,
+                                                                       String address,
+                                                                       String principal,
+                                                                       String keyTab,
+                                                                       boolean userKeyTab,
+                                                                       boolean useTicketCache) {
+        return "  <remote-configuration-registry>\n" +
+               "    <name>" + name + "</name>\n" +
+               "    <type>" + type + "</type>\n" +
+               "    <address>" + address + "</address>\n" +
+               "    <secure>true</secure>\n" +
+               "    <auth-type>" + "Kerberos" + "</auth-type>\n" +
+               "    <principal>" + principal + "</principal>\n" +
+               "    <keytab>" + keyTab + "</keytab>\n" +
+               "    <use-keytab>" + String.valueOf(userKeyTab) + "</use-keytab>\n" +
+               "    <use-ticket-cache>" + String.valueOf(useTicketCache) + "</use-ticket-cache>\n" +
+               "  </remote-configuration-registry>\n";
+    }
+
+    public static String createRemoteConfigRegistryXMLWithDigestAuth(String type,
+                                                                     String name,
+                                                                     String address,
+                                                                     String principal,
+                                                                     String credentialAlias) {
+        return "  <remote-configuration-registry>\n" +
+               "    <name>" + name + "</name>\n" +
+               "    <type>" + type + "</type>\n" +
+               "    <address>" + address + "</address>\n" +
+               "    <secure>true</secure>\n" +
+               "    <auth-type>" + "Digest" + "</auth-type>\n" +
+               "    <principal>" + principal + "</principal>\n" +
+               "    <credential-alias>" + credentialAlias + "</credential-alias>\n" +
+               "  </remote-configuration-registry>\n";
+    }
+
+
+    public static String createRemoteConfigRegistryXMLNoAuth(String type,
+                                                             String name,
+                                                             String address) {
+        return "  <remote-configuration-registry>\n" +
+               "    <name>" + name + "</name>\n" +
+               "    <type>" + type + "</type>\n" +
+               "    <address>" + address + "</address>\n" +
+               "  </remote-configuration-registry>\n";
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/test/java/org/apache/knox/gateway/service/config/remote/zk/RemoteConfigurationRegistryClientServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/test/java/org/apache/knox/gateway/service/config/remote/zk/RemoteConfigurationRegistryClientServiceTest.java b/gateway-service-remoteconfig/src/test/java/org/apache/knox/gateway/service/config/remote/zk/RemoteConfigurationRegistryClientServiceTest.java
new file mode 100644
index 0000000..8a817a4
--- /dev/null
+++ b/gateway-service-remoteconfig/src/test/java/org/apache/knox/gateway/service/config/remote/zk/RemoteConfigurationRegistryClientServiceTest.java
@@ -0,0 +1,424 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.config.remote.zk;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.apache.curator.test.InstanceSpec;
+import org.apache.curator.test.TestingCluster;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClient.ChildEntryListener;
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClient;
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+import org.apache.knox.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceFactory;
+import org.apache.knox.gateway.service.config.remote.util.RemoteRegistryConfigTestUtils;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.zookeeper.ZooDefs;
+import org.apache.zookeeper.data.ACL;
+import org.apache.zookeeper.data.Id;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.Configuration;
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+public class RemoteConfigurationRegistryClientServiceTest {
+
+    /**
+     * Test a configuration for an unsecured remote registry, included in the gateway configuration.
+     */
+    @Test
+    public void testUnsecuredZooKeeperWithSimpleRegistryConfig() throws Exception {
+        final String REGISTRY_CLIENT_NAME = "unsecured-zk-registry-name";
+        final String PRINCIPAL = null;
+        final String PWD = null;
+        final String CRED_ALIAS = null;
+
+        // Configure and start a secure ZK cluster
+        TestingCluster zkCluster = setupAndStartSecureTestZooKeeper(PRINCIPAL, PWD);
+
+        try {
+            // Create the setup client for the test cluster, and initialize the test znodes
+            CuratorFramework setupClient = initializeTestClientAndZNodes(zkCluster, PRINCIPAL);
+
+            // Mock configuration
+            GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+            final String registryConfigValue =
+                        GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
+                        GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString();
+            EasyMock.expect(config.getRemoteRegistryConfiguration(REGISTRY_CLIENT_NAME))
+                    .andReturn(registryConfigValue)
+                    .anyTimes();
+            EasyMock.expect(config.getRemoteRegistryConfigurationNames())
+                    .andReturn(Collections.singletonList(REGISTRY_CLIENT_NAME)).anyTimes();
+            EasyMock.replay(config);
+
+            doTestZooKeeperClient(setupClient, REGISTRY_CLIENT_NAME, config, CRED_ALIAS, PWD);
+        } finally {
+            zkCluster.stop();
+        }
+    }
+
+    /**
+     * Test multiple configurations for an unsecured remote registry.
+     */
+    @Test
+    public void testMultipleUnsecuredZooKeeperWithSimpleRegistryConfig() throws Exception {
+        final String REGISTRY_CLIENT_NAME_1 = "zkclient1";
+        final String REGISTRY_CLIENT_NAME_2 = "zkclient2";
+        final String PRINCIPAL = null;
+        final String PWD = null;
+        final String CRED_ALIAS = null;
+
+        // Configure and start a secure ZK cluster
+        TestingCluster zkCluster = setupAndStartSecureTestZooKeeper(PRINCIPAL, PWD);
+
+        try {
+            // Create the setup client for the test cluster, and initialize the test znodes
+            CuratorFramework setupClient = initializeTestClientAndZNodes(zkCluster, PRINCIPAL);
+
+            // Mock configuration
+            GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+            final String registryConfigValue1 =
+                                GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
+                                GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString();
+            EasyMock.expect(config.getRemoteRegistryConfiguration(REGISTRY_CLIENT_NAME_1))
+                    .andReturn(registryConfigValue1).anyTimes();
+            final String registryConfigValue2 =
+                                GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
+                                GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString();
+            EasyMock.expect(config.getRemoteRegistryConfiguration(REGISTRY_CLIENT_NAME_2))
+                    .andReturn(registryConfigValue2).anyTimes();
+            EasyMock.expect(config.getRemoteRegistryConfigurationNames())
+                    .andReturn(Arrays.asList(REGISTRY_CLIENT_NAME_1, REGISTRY_CLIENT_NAME_2)).anyTimes();
+            EasyMock.replay(config);
+
+            // Create the client service instance
+            RemoteConfigurationRegistryClientService clientService =
+                    RemoteConfigurationRegistryClientServiceFactory.newInstance(config);
+            assertEquals("Wrong registry client service type.", clientService.getClass(), CuratorClientService.class);
+            clientService.setAliasService(null);
+            clientService.init(config, null);
+            clientService.start();
+
+            RemoteConfigurationRegistryClient client1 = clientService.get(REGISTRY_CLIENT_NAME_1);
+            assertNotNull(client1);
+
+            RemoteConfigurationRegistryClient client2 = clientService.get(REGISTRY_CLIENT_NAME_2);
+            assertNotNull(client2);
+
+            doTestZooKeeperClient(setupClient, REGISTRY_CLIENT_NAME_1, clientService, false);
+            doTestZooKeeperClient(setupClient, REGISTRY_CLIENT_NAME_2, clientService, false);
+        } finally {
+            zkCluster.stop();
+        }
+    }
+
+    /**
+     * Test a configuration for a secure remote registry, included in the gateway configuration.
+     */
+    @Test
+    public void testZooKeeperWithSimpleRegistryConfig() throws Exception {
+        final String AUTH_TYPE = "digest";
+        final String REGISTRY_CLIENT_NAME = "zk-registry-name";
+        final String PRINCIPAL = "knox";
+        final String PWD = "knoxtest";
+        final String CRED_ALIAS = "zkCredential";
+
+        // Configure and start a secure ZK cluster
+        TestingCluster zkCluster = setupAndStartSecureTestZooKeeper(PRINCIPAL, PWD);
+
+        try {
+            // Create the setup client for the test cluster, and initialize the test znodes
+            CuratorFramework setupClient = initializeTestClientAndZNodes(zkCluster, PRINCIPAL);
+
+            // Mock configuration
+            GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+            final String registryConfigValue =
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString() + ";" +
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE + "=" + AUTH_TYPE + ";" +
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL + "=" + PRINCIPAL + ";" +
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS + "=" + CRED_ALIAS;
+            EasyMock.expect(config.getRemoteRegistryConfiguration(REGISTRY_CLIENT_NAME))
+                    .andReturn(registryConfigValue)
+                    .anyTimes();
+            EasyMock.expect(config.getRemoteRegistryConfigurationNames())
+                    .andReturn(Collections.singletonList(REGISTRY_CLIENT_NAME)).anyTimes();
+            EasyMock.replay(config);
+
+            doTestZooKeeperClient(setupClient, REGISTRY_CLIENT_NAME, config, CRED_ALIAS, PWD);
+        } finally {
+            zkCluster.stop();
+        }
+    }
+
+    /**
+     * Test the remote registry configuration external to, and referenced from, the gateway configuration, for a secure
+     * client.
+     */
+    @Test
+    public void testZooKeeperWithSingleExternalRegistryConfig() throws Exception {
+        final String AUTH_TYPE = "digest";
+        final String REGISTRY_CLIENT_NAME = "my-zookeeper_registryNAME";
+        final String PRINCIPAL = "knox";
+        final String PWD = "knoxtest";
+        final String CRED_ALIAS = "zkCredential";
+
+        // Configure and start a secure ZK cluster
+        TestingCluster zkCluster = setupAndStartSecureTestZooKeeper(PRINCIPAL, PWD);
+
+        File tmpRegConfigFile = null;
+
+        try {
+            // Create the setup client for the test cluster, and initialize the test znodes
+            CuratorFramework setupClient = initializeTestClientAndZNodes(zkCluster, PRINCIPAL);
+
+            // Mock configuration
+            Map<String, String> registryConfigProps = new HashMap<>();
+            registryConfigProps.put("type", ZooKeeperClientService.TYPE);
+            registryConfigProps.put("name", REGISTRY_CLIENT_NAME);
+            registryConfigProps.put("address", zkCluster.getConnectString());
+            registryConfigProps.put("secure", "true");
+            registryConfigProps.put("authType", AUTH_TYPE);
+            registryConfigProps.put("principal", PRINCIPAL);
+            registryConfigProps.put("credentialAlias", CRED_ALIAS);
+            String registryConfigXML =
+                  RemoteRegistryConfigTestUtils.createRemoteConfigRegistriesXML(Collections.singleton(registryConfigProps));
+            tmpRegConfigFile = File.createTempFile("myRemoteRegistryConfig", "xml");
+            FileUtils.writeStringToFile(tmpRegConfigFile, registryConfigXML);
+
+            System.setProperty("org.apache.knox.gateway.remote.registry.config.file", tmpRegConfigFile.getAbsolutePath());
+
+            GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+            EasyMock.replay(config);
+
+            doTestZooKeeperClient(setupClient, REGISTRY_CLIENT_NAME, config, CRED_ALIAS, PWD);
+        } finally {
+            zkCluster.stop();
+            if (tmpRegConfigFile != null && tmpRegConfigFile.exists()) {
+                tmpRegConfigFile.delete();
+            }
+            System.clearProperty("org.apache.knox.gateway.remote.registry.config.file");
+        }
+    }
+
+    /**
+     * Setup and start a secure test ZooKeeper cluster.
+     */
+    private TestingCluster setupAndStartSecureTestZooKeeper(String principal, String digestPassword) throws Exception {
+        final boolean applyAuthentication = (principal != null);
+
+        // Configure security for the ZK cluster instances
+        Map<String, Object> customInstanceSpecProps = new HashMap<>();
+
+        if (applyAuthentication) {
+            customInstanceSpecProps.put("authProvider.1", "org.apache.zookeeper.server.auth.SASLAuthenticationProvider");
+            customInstanceSpecProps.put("requireClientAuthScheme", "sasl");
+        }
+
+        // Define the test cluster
+        List<InstanceSpec> instanceSpecs = new ArrayList<>();
+        for (int i = 0 ; i < 3 ; i++) {
+            InstanceSpec is = new InstanceSpec(null, -1, -1, -1, false, (i+1), -1, -1, customInstanceSpecProps);
+            instanceSpecs.add(is);
+        }
+        TestingCluster zkCluster = new TestingCluster(instanceSpecs);
+
+        if (applyAuthentication) {
+            // Setup ZooKeeper server SASL
+            Map<String, String> digestOptions = new HashMap<>();
+            digestOptions.put("user_" + principal, digestPassword);
+            final AppConfigurationEntry[] serverEntries =
+                    {new AppConfigurationEntry("org.apache.zookeeper.server.auth.DigestLoginModule",
+                            AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+                            digestOptions)};
+            Configuration.setConfiguration(new Configuration() {
+                @Override
+                public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
+                    return ("Server".equalsIgnoreCase(name)) ? serverEntries : null;
+                }
+            });
+        }
+
+        // Start the cluster
+        zkCluster.start();
+
+        return zkCluster;
+    }
+
+    /**
+     * Create a ZooKeeper client with SASL digest auth configured, and initialize the test znodes.
+     */
+    private CuratorFramework initializeTestClientAndZNodes(TestingCluster zkCluster, String principal) throws Exception {
+        // Create the client for the test cluster
+        CuratorFramework setupClient = CuratorFrameworkFactory.builder()
+                                                              .connectString(zkCluster.getConnectString())
+                                                              .retryPolicy(new ExponentialBackoffRetry(100, 3))
+                                                              .build();
+        assertNotNull(setupClient);
+        setupClient.start();
+
+        List<ACL> acls = new ArrayList<>();
+        if (principal != null) {
+            acls.add(new ACL(ZooDefs.Perms.ALL, new Id("sasl", principal)));
+        } else {
+            acls.add(new ACL(ZooDefs.Perms.ALL, ZooDefs.Ids.ANYONE_ID_UNSAFE));
+        }
+        setupClient.create().creatingParentsIfNeeded().withACL(acls).forPath("/knox/config/descriptors");
+        setupClient.create().creatingParentsIfNeeded().withACL(acls).forPath("/knox/config/shared-providers");
+
+        List<ACL> negativeACLs = new ArrayList<>();
+        if (principal != null) {
+            negativeACLs.add(new ACL(ZooDefs.Perms.ALL, new Id("sasl", "notyou")));
+        } else {
+            negativeACLs.add(new ACL(ZooDefs.Perms.ALL, ZooDefs.Ids.ANYONE_ID_UNSAFE));
+        }
+        setupClient.create().creatingParentsIfNeeded().withACL(negativeACLs).forPath("/someotherconfig");
+
+        return setupClient;
+    }
+
+    private void doTestZooKeeperClient(final CuratorFramework setupClient,
+                                       final String           testClientName,
+                                       final GatewayConfig    config,
+                                       final String           credentialAlias,
+                                       final String           digestPassword) throws Exception {
+        boolean isSecureTest = (credentialAlias != null && digestPassword != null);
+
+        // Mock alias service
+        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
+        EasyMock.expect(aliasService.getPasswordFromAliasForGateway(credentialAlias))
+                .andReturn(isSecureTest ? digestPassword.toCharArray() : null)
+                .anyTimes();
+        EasyMock.replay(aliasService);
+
+        // Create the client service instance
+        RemoteConfigurationRegistryClientService clientService =
+                RemoteConfigurationRegistryClientServiceFactory.newInstance(config);
+        assertEquals("Wrong registry client service type.", clientService.getClass(), CuratorClientService.class);
+        clientService.setAliasService(aliasService);
+        clientService.init(config, null);
+        clientService.start();
+
+        doTestZooKeeperClient(setupClient, testClientName, clientService, isSecureTest);
+    }
+
+    /**
+     * Test secure ZooKeeper client interactions.
+     *
+     * @param setupClient    The client used for interacting with ZooKeeper independent from the registry client service.
+     * @param testClientName The name of the client to use from the registry client service.
+     * @param clientService  The RemoteConfigurationRegistryClientService
+     * @param isSecureTest   Flag to indicate whether this is a secure interaction test
+     */
+    private void doTestZooKeeperClient(final CuratorFramework                         setupClient,
+                                       final String                                   testClientName,
+                                       final RemoteConfigurationRegistryClientService clientService,
+                                       boolean                                        isSecureTest) throws Exception {
+
+        RemoteConfigurationRegistryClient client = clientService.get(testClientName);
+        assertNotNull(client);
+        List<String> descriptors = client.listChildEntries("/knox/config/descriptors");
+        assertNotNull(descriptors);
+        for (String descriptor : descriptors) {
+            System.out.println("Descriptor: " + descriptor);
+        }
+
+        List<String> providerConfigs = client.listChildEntries("/knox/config/shared-providers");
+        assertNotNull(providerConfigs);
+        for (String providerConfig : providerConfigs) {
+            System.out.println("Provider config: " + providerConfig);
+        }
+
+        List<String> someotherConfig = client.listChildEntries("/someotherconfig");
+        if (isSecureTest) {
+            assertNull("Expected null because of the ACL mismatch.", someotherConfig);
+        } else {
+            assertNotNull(someotherConfig);
+        }
+
+        // Test listeners
+        final String MY_NEW_ZNODE = "/clientServiceTestNode";
+        final String MY_NEW_DATA_ZNODE = MY_NEW_ZNODE + "/mydata";
+
+        if (setupClient.checkExists().forPath(MY_NEW_ZNODE) != null) {
+            setupClient.delete().deletingChildrenIfNeeded().forPath(MY_NEW_ZNODE);
+        }
+
+        final List<String> listenerLog = new ArrayList<>();
+        client.addChildEntryListener(MY_NEW_ZNODE, (c, type, path) -> {
+            listenerLog.add("EXTERNAL: " + type.toString() + ":" + path);
+            if (RemoteConfigurationRegistryClient.ChildEntryListener.Type.ADDED.equals(type)) {
+                try {
+                    c.addEntryListener(path, (cc, p, d) -> listenerLog.add("EXTERNAL: " + p + ":" + (d != null ? new String(d) : "null")));
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+            }
+        });
+
+        client.createEntry(MY_NEW_ZNODE);
+        client.createEntry(MY_NEW_DATA_ZNODE, "more test data");
+        String testData = client.getEntryData(MY_NEW_DATA_ZNODE);
+        assertNotNull(testData);
+        assertEquals("more test data", testData);
+
+        assertTrue(client.entryExists(MY_NEW_DATA_ZNODE));
+        client.setEntryData(MY_NEW_DATA_ZNODE, "still more data");
+
+        try {
+            Thread.sleep(1000);
+        } catch (InterruptedException e) {
+            //
+        }
+
+        client.setEntryData(MY_NEW_DATA_ZNODE, "changed completely");
+
+        try {
+            Thread.sleep(1000);
+        } catch (InterruptedException e) {
+            //
+        }
+
+        client.deleteEntry(MY_NEW_DATA_ZNODE);
+
+        try {
+            Thread.sleep(1000);
+        } catch (InterruptedException e) {
+            //
+        }
+
+        assertFalse(listenerLog.isEmpty());
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/test/java/org/apache/knox/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfigTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/test/java/org/apache/knox/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfigTest.java b/gateway-service-remoteconfig/src/test/java/org/apache/knox/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfigTest.java
new file mode 100644
index 0000000..7a123f4
--- /dev/null
+++ b/gateway-service-remoteconfig/src/test/java/org/apache/knox/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfigTest.java
@@ -0,0 +1,255 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.config.remote.zk;
+
+import org.apache.knox.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
+import org.apache.knox.gateway.service.config.remote.zk.RemoteConfigurationRegistryJAASConfig;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.Configuration;
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+public class RemoteConfigurationRegistryJAASConfigTest {
+
+    @Test
+    public void testZooKeeperDigestContextEntry() throws Exception {
+        List<RemoteConfigurationRegistryConfig> registryConfigs = new ArrayList<>();
+        final String ENTRY_NAME       = "my_digest_context";
+        final String DIGEST_PRINCIPAL = "myIdentity";
+        final String DIGEST_PWD_ALIAS = "myAlias";
+        final String DIGEST_PWD       = "mysecret";
+
+        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
+        EasyMock.expect(aliasService.getPasswordFromAliasForGateway(DIGEST_PWD_ALIAS)).andReturn(DIGEST_PWD.toCharArray()).anyTimes();
+        EasyMock.replay(aliasService);
+
+        registryConfigs.add(createDigestConfig(ENTRY_NAME, DIGEST_PRINCIPAL, DIGEST_PWD_ALIAS));
+
+        try {
+            RemoteConfigurationRegistryJAASConfig jaasConfig =
+                                    RemoteConfigurationRegistryJAASConfig.configure(registryConfigs, aliasService);
+
+            // Make sure there are no entries for an invalid context entry name
+            assertNull(jaasConfig.getAppConfigurationEntry("invalid"));
+
+            // Validate the intended context entry
+            validateDigestContext(jaasConfig,
+                                  ENTRY_NAME,
+                                  RemoteConfigurationRegistryJAASConfig.digestLoginModules.get("ZOOKEEPER"),
+                                  DIGEST_PRINCIPAL,
+                                  DIGEST_PWD);
+        } finally {
+            Configuration.setConfiguration(null);
+        }
+    }
+
+    @Test
+    public void testKerberosContextEntry() throws Exception {
+        List<RemoteConfigurationRegistryConfig> registryConfigs = new ArrayList<>();
+        final String ENTRY_NAME = "my_kerberos_context";
+        final String PRINCIPAL  = "myIdentity";
+
+        File dummyKeyTab = File.createTempFile("my_context", "keytab");
+        registryConfigs.add(createKerberosConfig(ENTRY_NAME, PRINCIPAL, dummyKeyTab.getAbsolutePath()));
+
+        try {
+            RemoteConfigurationRegistryJAASConfig jaasConfig =
+                                            RemoteConfigurationRegistryJAASConfig.configure(registryConfigs, null);
+
+            // Make sure there are no entries for an invalid context entry name
+            assertNull(jaasConfig.getAppConfigurationEntry("invalid"));
+
+            // Validate the intended context entry
+            validateKerberosContext(jaasConfig,
+                                    ENTRY_NAME,
+                                    PRINCIPAL,
+                                    dummyKeyTab.getAbsolutePath(),
+                                    true,
+                                    false);
+
+        } finally {
+            Configuration.setConfiguration(null);
+        }
+    }
+
+    @Test
+    public void testZooKeeperMultipleContextEntries() throws Exception {
+        List<RemoteConfigurationRegistryConfig> registryConfigs = new ArrayList<>();
+        final String KERBEROS_ENTRY_NAME = "my_kerberos_context";
+        final String KERBEROS_PRINCIPAL  = "myKerberosIdentity";
+        final String DIGEST_ENTRY_NAME   = "my_digest_context";
+        final String DIGEST_PRINCIPAL    = "myDigestIdentity";
+        final String DIGEST_PWD_ALIAS    = "myAlias";
+        final String DIGEST_PWD          = "mysecret";
+
+        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
+        EasyMock.expect(aliasService.getPasswordFromAliasForGateway(DIGEST_PWD_ALIAS)).andReturn(DIGEST_PWD.toCharArray()).anyTimes();
+        EasyMock.replay(aliasService);
+
+        File dummyKeyTab = File.createTempFile("my_context", "keytab");
+        registryConfigs.add(createKerberosConfig(KERBEROS_ENTRY_NAME, KERBEROS_PRINCIPAL, dummyKeyTab.getAbsolutePath()));
+        registryConfigs.add(createDigestConfig(DIGEST_ENTRY_NAME, DIGEST_PRINCIPAL, DIGEST_PWD_ALIAS));
+
+        try {
+            RemoteConfigurationRegistryJAASConfig jaasConfig =
+                                        RemoteConfigurationRegistryJAASConfig.configure(registryConfigs, aliasService);
+
+            // Make sure there are no entries for an invalid context entry name
+            assertNull(jaasConfig.getAppConfigurationEntry("invalid"));
+
+            // Validate the kerberos context entry
+            validateKerberosContext(jaasConfig,
+                                    KERBEROS_ENTRY_NAME,
+                                    KERBEROS_PRINCIPAL,
+                                    dummyKeyTab.getAbsolutePath(),
+                                    true,
+                                    false);
+
+            // Validate the digest context entry
+            validateDigestContext(jaasConfig,
+                                  DIGEST_ENTRY_NAME,
+                                  RemoteConfigurationRegistryJAASConfig.digestLoginModules.get("ZOOKEEPER"),
+                                  DIGEST_PRINCIPAL,
+                                  DIGEST_PWD);
+
+        } finally {
+            Configuration.setConfiguration(null);
+        }
+    }
+
+    @Test
+    public void testZooKeeperDigestContextEntryWithoutAliasService() throws Exception {
+        List<RemoteConfigurationRegistryConfig> registryConfigs = new ArrayList<>();
+        final String ENTRY_NAME       = "my_digest_context";
+        final String DIGEST_PRINCIPAL = "myIdentity";
+        final String DIGEST_PWD_ALIAS = "myAlias";
+
+        registryConfigs.add(createDigestConfig(ENTRY_NAME, DIGEST_PRINCIPAL, DIGEST_PWD_ALIAS));
+
+        try {
+            RemoteConfigurationRegistryJAASConfig jaasConfig =
+                                            RemoteConfigurationRegistryJAASConfig.configure(registryConfigs, null);
+            fail("Expected IllegalArgumentException because the AliasService is not available.");
+        } catch (IllegalArgumentException e) {
+            // Expected
+            assertTrue(e.getMessage().contains("AliasService"));
+        } catch (Throwable e) {
+            fail("Wrong exception encountered: " + e.getClass().getName() + ", " + e.getMessage());
+        } finally {
+            Configuration.setConfiguration(null);
+        }
+    }
+
+    private static RemoteConfigurationRegistryConfig createDigestConfig(String entryName,
+                                                                        String principal,
+                                                                        String credentialAlias) {
+        return createDigestConfig(entryName, principal, credentialAlias, "ZooKeeper");
+    }
+
+    private static RemoteConfigurationRegistryConfig createDigestConfig(String entryName,
+                                                                        String principal,
+                                                                        String credentialAlias,
+                                                                        String registryType) {
+        RemoteConfigurationRegistryConfig rc = EasyMock.createNiceMock(RemoteConfigurationRegistryConfig.class);
+        EasyMock.expect(rc.getRegistryType()).andReturn(registryType).anyTimes();
+        EasyMock.expect(rc.getName()).andReturn(entryName).anyTimes();
+        EasyMock.expect(rc.isSecureRegistry()).andReturn(true).anyTimes();
+        EasyMock.expect(rc.getAuthType()).andReturn("digest").anyTimes();
+        EasyMock.expect(rc.getPrincipal()).andReturn(principal).anyTimes();
+        EasyMock.expect(rc.getCredentialAlias()).andReturn(credentialAlias).anyTimes();
+        EasyMock.replay(rc);
+        return rc;
+    }
+
+
+    private static RemoteConfigurationRegistryConfig createKerberosConfig(String entryName,
+                                                                          String principal,
+                                                                          String keyTabPath) {
+        return createKerberosConfig(entryName, principal, keyTabPath, "ZooKeeper");
+    }
+
+    private static RemoteConfigurationRegistryConfig createKerberosConfig(String entryName,
+                                                                          String principal,
+                                                                          String keyTabPath,
+                                                                          String registryType) {
+        return createKerberosConfig(entryName, principal, keyTabPath, null, null, registryType);
+    }
+
+    private static RemoteConfigurationRegistryConfig createKerberosConfig(String entryName,
+                                                                          String principal,
+                                                                          String keyTabPath,
+                                                                          Boolean useKeyTab,
+                                                                          Boolean useTicketCache,
+                                                                          String registryType) {
+        RemoteConfigurationRegistryConfig rc = EasyMock.createNiceMock(RemoteConfigurationRegistryConfig.class);
+        EasyMock.expect(rc.getRegistryType()).andReturn(registryType).anyTimes();
+        EasyMock.expect(rc.getName()).andReturn(entryName).anyTimes();
+        EasyMock.expect(rc.isSecureRegistry()).andReturn(true).anyTimes();
+        EasyMock.expect(rc.getAuthType()).andReturn("kerberos").anyTimes();
+        EasyMock.expect(rc.getPrincipal()).andReturn(principal).anyTimes();
+        EasyMock.expect(rc.getKeytab()).andReturn(keyTabPath).anyTimes();
+        EasyMock.expect(rc.isUseKeyTab()).andReturn(useKeyTab != null ? useKeyTab : true).anyTimes();
+        EasyMock.expect(rc.isUseTicketCache()).andReturn(useTicketCache != null ? useTicketCache : false).anyTimes();
+        EasyMock.replay(rc);
+        return rc;
+    }
+
+    private static void validateDigestContext(RemoteConfigurationRegistryJAASConfig config,
+                                              String                                entryName,
+                                              String                                loginModule,
+                                              String                                principal,
+                                              String                                password) throws Exception {
+        AppConfigurationEntry[] myContextEntries = config.getAppConfigurationEntry(entryName);
+        assertNotNull(myContextEntries);
+        assertEquals(1, myContextEntries.length);
+        AppConfigurationEntry entry = myContextEntries[0];
+        assertTrue(entry.getLoginModuleName().equals(loginModule));
+        Map<String, ?> entryOpts = entry.getOptions();
+        assertEquals(principal, entryOpts.get("username"));
+        assertEquals(password, entryOpts.get("password"));
+    }
+
+    private static void validateKerberosContext(RemoteConfigurationRegistryJAASConfig config,
+                                                String                                entryName,
+                                                String                                principal,
+                                                String                                keyTab,
+                                                boolean                               useKeyTab,
+                                                boolean                               useTicketCache) throws Exception {
+        AppConfigurationEntry[] myContextEntries = config.getAppConfigurationEntry(entryName);
+        assertNotNull(myContextEntries);
+        assertEquals(1, myContextEntries.length);
+        AppConfigurationEntry entry = myContextEntries[0];
+        assertTrue(entry.getLoginModuleName().endsWith(".security.auth.module.Krb5LoginModule"));
+        Map<String, ?> entryOpts = entry.getOptions();
+        assertEquals(principal, entryOpts.get("principal"));
+        assertEquals(keyTab, entryOpts.get("keyTab"));
+        assertEquals(useKeyTab, Boolean.valueOf((String)entryOpts.get("isUseKeyTab")));
+        assertEquals(useTicketCache, Boolean.valueOf((String)entryOpts.get("isUseTicketCache")));
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClient.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClient.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClient.java
deleted file mode 100644
index bfb4518..0000000
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClient.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.services.config.client;
-
-import java.util.List;
-
-public interface RemoteConfigurationRegistryClient {
-
-    String getAddress();
-
-    boolean isAuthenticationConfigured();
-
-    boolean entryExists(String path);
-
-    List<EntryACL> getACL(String path);
-
-    void setACL(String path, List<EntryACL> acls);
-
-    List<String> listChildEntries(String path);
-
-    String getEntryData(String path);
-
-    String getEntryData(String path, String encoding);
-
-    void createEntry(String path);
-
-    void createEntry(String path, String data);
-
-    void createEntry(String path, String data, String encoding);
-
-    int setEntryData(String path, String data);
-
-    int setEntryData(String path, String data, String encoding);
-
-    void deleteEntry(String path);
-
-    void addChildEntryListener(String path, ChildEntryListener listener) throws Exception;
-
-    void addEntryListener(String path, EntryListener listener) throws Exception;
-
-    void removeEntryListener(String path) throws Exception;
-
-    interface ChildEntryListener {
-
-        enum Type {
-            ADDED,
-            REMOVED,
-            UPDATED
-        }
-
-        void childEvent(RemoteConfigurationRegistryClient client, ChildEntryListener.Type type, String path);
-    }
-
-    interface EntryListener {
-        void entryChanged(RemoteConfigurationRegistryClient client, String path, byte[] data);
-    }
-
-    interface EntryACL {
-        String getId();
-        String getType();
-        Object getPermissions();
-        boolean canRead();
-        boolean canWrite();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClientService.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClientService.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClientService.java
deleted file mode 100644
index 1467f75..0000000
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClientService.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.services.config.client;
-
-import org.apache.hadoop.gateway.services.Service;
-import org.apache.hadoop.gateway.services.security.AliasService;
-
-public interface RemoteConfigurationRegistryClientService extends Service {
-
-    void setAliasService(AliasService aliasService);
-
-    RemoteConfigurationRegistryClient get(String l);
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/ClusterConfigurationMonitorService.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/ClusterConfigurationMonitorService.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/ClusterConfigurationMonitorService.java
deleted file mode 100644
index 961f2e5..0000000
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/ClusterConfigurationMonitorService.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology;
-
-import org.apache.hadoop.gateway.services.Service;
-import org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitor;
-
-/**
- * Gateway service for managing cluster configuration monitors.
- */
-public interface ClusterConfigurationMonitorService extends Service {
-
-    /**
-     *
-     * @param type The type of monitor (e.g., Ambari)
-     *
-     * @return The monitor associated with the specified type, or null if there is no such monitor.
-     */
-    ClusterConfigurationMonitor getMonitor(String type);
-
-
-    /**
-     * Register for configuration change notifications from <em>any</em> of the monitors managed by this service.
-     *
-     * @param listener The listener to register.
-     */
-    void addListener(ClusterConfigurationMonitor.ConfigurationChangeListener listener);
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ClusterConfigurationMonitor.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ClusterConfigurationMonitor.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ClusterConfigurationMonitor.java
deleted file mode 100644
index fc3614d..0000000
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ClusterConfigurationMonitor.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery;
-
-public interface ClusterConfigurationMonitor {
-
-    /**
-     * Start the monitor.
-     */
-    void start();
-
-    /**
-     * Stop the monitor.
-     */
-    void stop();
-
-    /**
-     *
-     * @param interval The polling interval, in seconds
-     */
-    void setPollingInterval(int interval);
-
-    /**
-     * Register for notifications from the monitor.
-     */
-    void addListener(ConfigurationChangeListener listener);
-
-    /**
-     * Monitor listener interface for receiving notifications that a configuration has changed.
-     */
-    interface ConfigurationChangeListener {
-        void onConfigurationChange(String source, String clusterName);
-    }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ClusterConfigurationMonitorProvider.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ClusterConfigurationMonitorProvider.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ClusterConfigurationMonitorProvider.java
deleted file mode 100644
index a8d5f30..0000000
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ClusterConfigurationMonitorProvider.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery;
-
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.services.security.AliasService;
-
-public interface ClusterConfigurationMonitorProvider {
-
-    String getType();
-
-    ClusterConfigurationMonitor newInstance(GatewayConfig config, AliasService aliasService);
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitor.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitor.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitor.java
deleted file mode 100644
index 82c5809..0000000
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitor.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.monitor;
-
-public interface RemoteConfigurationMonitor {
-
-    void start() throws Exception;
-
-    void stop() throws Exception;
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorProvider.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorProvider.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorProvider.java
deleted file mode 100644
index d19dace..0000000
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorProvider.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.monitor;
-
-
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
-
-public interface RemoteConfigurationMonitorProvider {
-
-    /**
-     *
-     * @param config        The gateway configuration.
-     * @param clientService The RemoteConfigurationRegistryClientService for accessing the remote configuration.
-     *
-     * @return A RemoteConfigurationMonitor for keeping the local config in sync with the remote config
-     */
-    RemoteConfigurationMonitor newInstance(GatewayConfig config, RemoteConfigurationRegistryClientService clientService);
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-spi/src/main/java/org/apache/knox/gateway/services/config/client/RemoteConfigurationRegistryClient.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/knox/gateway/services/config/client/RemoteConfigurationRegistryClient.java b/gateway-spi/src/main/java/org/apache/knox/gateway/services/config/client/RemoteConfigurationRegistryClient.java
new file mode 100644
index 0000000..5afae63
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/services/config/client/RemoteConfigurationRegistryClient.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.services.config.client;
+
+import java.util.List;
+
+public interface RemoteConfigurationRegistryClient {
+
+    String getAddress();
+
+    boolean isAuthenticationConfigured();
+
+    boolean entryExists(String path);
+
+    List<EntryACL> getACL(String path);
+
+    void setACL(String path, List<EntryACL> acls);
+
+    List<String> listChildEntries(String path);
+
+    String getEntryData(String path);
+
+    String getEntryData(String path, String encoding);
+
+    void createEntry(String path);
+
+    void createEntry(String path, String data);
+
+    void createEntry(String path, String data, String encoding);
+
+    int setEntryData(String path, String data);
+
+    int setEntryData(String path, String data, String encoding);
+
+    void deleteEntry(String path);
+
+    void addChildEntryListener(String path, ChildEntryListener listener) throws Exception;
+
+    void addEntryListener(String path, EntryListener listener) throws Exception;
+
+    void removeEntryListener(String path) throws Exception;
+
+    interface ChildEntryListener {
+
+        enum Type {
+            ADDED,
+            REMOVED,
+            UPDATED
+        }
+
+        void childEvent(RemoteConfigurationRegistryClient client, ChildEntryListener.Type type, String path);
+    }
+
+    interface EntryListener {
+        void entryChanged(RemoteConfigurationRegistryClient client, String path, byte[] data);
+    }
+
+    interface EntryACL {
+        String getId();
+        String getType();
+        Object getPermissions();
+        boolean canRead();
+        boolean canWrite();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-spi/src/main/java/org/apache/knox/gateway/services/config/client/RemoteConfigurationRegistryClientService.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/knox/gateway/services/config/client/RemoteConfigurationRegistryClientService.java b/gateway-spi/src/main/java/org/apache/knox/gateway/services/config/client/RemoteConfigurationRegistryClientService.java
new file mode 100644
index 0000000..77573dd
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/services/config/client/RemoteConfigurationRegistryClientService.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.services.config.client;
+
+import org.apache.knox.gateway.services.Service;
+import org.apache.knox.gateway.services.security.AliasService;
+
+public interface RemoteConfigurationRegistryClientService extends Service {
+
+    void setAliasService(AliasService aliasService);
+
+    RemoteConfigurationRegistryClient get(String l);
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-spi/src/main/java/org/apache/knox/gateway/topology/ClusterConfigurationMonitorService.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/knox/gateway/topology/ClusterConfigurationMonitorService.java b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/ClusterConfigurationMonitorService.java
new file mode 100644
index 0000000..0bfaa5f
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/ClusterConfigurationMonitorService.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology;
+
+import org.apache.knox.gateway.services.Service;
+import org.apache.knox.gateway.topology.discovery.ClusterConfigurationMonitor;
+
+/**
+ * Gateway service for managing cluster configuration monitors.
+ */
+public interface ClusterConfigurationMonitorService extends Service {
+
+    /**
+     *
+     * @param type The type of monitor (e.g., Ambari)
+     *
+     * @return The monitor associated with the specified type, or null if there is no such monitor.
+     */
+    ClusterConfigurationMonitor getMonitor(String type);
+
+
+    /**
+     * Register for configuration change notifications from <em>any</em> of the monitors managed by this service.
+     *
+     * @param listener The listener to register.
+     */
+    void addListener(ClusterConfigurationMonitor.ConfigurationChangeListener listener);
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/ClusterConfigurationMonitor.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/ClusterConfigurationMonitor.java b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/ClusterConfigurationMonitor.java
new file mode 100644
index 0000000..641bad5
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/ClusterConfigurationMonitor.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery;
+
+public interface ClusterConfigurationMonitor {
+
+    /**
+     * Start the monitor.
+     */
+    void start();
+
+    /**
+     * Stop the monitor.
+     */
+    void stop();
+
+    /**
+     *
+     * @param interval The polling interval, in seconds
+     */
+    void setPollingInterval(int interval);
+
+    /**
+     * Register for notifications from the monitor.
+     */
+    void addListener(ConfigurationChangeListener listener);
+
+    /**
+     * Monitor listener interface for receiving notifications that a configuration has changed.
+     */
+    interface ConfigurationChangeListener {
+        void onConfigurationChange(String source, String clusterName);
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/ClusterConfigurationMonitorProvider.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/ClusterConfigurationMonitorProvider.java b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/ClusterConfigurationMonitorProvider.java
new file mode 100644
index 0000000..c84e5c9
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/ClusterConfigurationMonitorProvider.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery;
+
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.services.security.AliasService;
+
+public interface ClusterConfigurationMonitorProvider {
+
+    String getType();
+
+    ClusterConfigurationMonitor newInstance(GatewayConfig config, AliasService aliasService);
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-spi/src/main/java/org/apache/knox/gateway/topology/monitor/RemoteConfigurationMonitor.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/knox/gateway/topology/monitor/RemoteConfigurationMonitor.java b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/monitor/RemoteConfigurationMonitor.java
new file mode 100644
index 0000000..0ce1513
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/monitor/RemoteConfigurationMonitor.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.monitor;
+
+public interface RemoteConfigurationMonitor {
+
+    void start() throws Exception;
+
+    void stop() throws Exception;
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-spi/src/main/java/org/apache/knox/gateway/topology/monitor/RemoteConfigurationMonitorProvider.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/knox/gateway/topology/monitor/RemoteConfigurationMonitorProvider.java b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/monitor/RemoteConfigurationMonitorProvider.java
new file mode 100644
index 0000000..cab33f9
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/monitor/RemoteConfigurationMonitorProvider.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.monitor;
+
+
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+
+public interface RemoteConfigurationMonitorProvider {
+
+    /**
+     *
+     * @param config        The gateway configuration.
+     * @param clientService The RemoteConfigurationRegistryClientService for accessing the remote configuration.
+     *
+     * @return A RemoteConfigurationMonitor for keeping the local config in sync with the remote config
+     */
+    RemoteConfigurationMonitor newInstance(GatewayConfig config, RemoteConfigurationRegistryClientService clientService);
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-test/src/test/java/org/apache/hadoop/gateway/SimpleDescriptorHandlerFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/SimpleDescriptorHandlerFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/SimpleDescriptorHandlerFuncTest.java
deleted file mode 100644
index bda8952..0000000
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/SimpleDescriptorHandlerFuncTest.java
+++ /dev/null
@@ -1,275 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.services.GatewayServices;
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.hadoop.gateway.services.security.KeystoreService;
-import org.apache.hadoop.gateway.services.security.MasterService;
-import org.apache.hadoop.gateway.services.topology.TopologyService;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
-import org.apache.hadoop.gateway.topology.simple.SimpleDescriptor;
-import org.apache.hadoop.gateway.topology.simple.SimpleDescriptorHandler;
-import org.apache.hadoop.test.TestUtils;
-import org.easymock.Capture;
-import org.easymock.EasyMock;
-import org.junit.Test;
-
-import java.io.File;
-import java.net.InetSocketAddress;
-import java.security.KeyStore;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.capture;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-public class SimpleDescriptorHandlerFuncTest {
-
-
-  private static final String TEST_PROVIDER_CONFIG =
-      "    <gateway>\n" +
-          "        <provider>\n" +
-          "            <role>authentication</role>\n" +
-          "            <name>ShiroProvider</name>\n" +
-          "            <enabled>true</enabled>\n" +
-          "            <param>\n" +
-          "                <name>sessionTimeout</name>\n" +
-          "                <value>30</value>\n" +
-          "            </param>\n" +
-          "            <param>\n" +
-          "                <name>main.ldapRealm</name>\n" +
-          "                <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>\n" +
-          "            </param>\n" +
-          "            <param>\n" +
-          "                <name>main.ldapContextFactory</name>\n" +
-          "                <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
-          "            </param>\n" +
-          "            <param>\n" +
-          "                <name>main.ldapRealm.contextFactory</name>\n" +
-          "                <value>$ldapContextFactory</value>\n" +
-          "            </param>\n" +
-          "            <param>\n" +
-          "                <name>main.ldapRealm.userDnTemplate</name>\n" +
-          "                <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
-          "            </param>\n" +
-          "            <param>\n" +
-          "                <name>main.ldapRealm.contextFactory.url</name>\n" +
-          "                <value>ldap://localhost:33389</value>\n" +
-          "            </param>\n" +
-          "            <param>\n" +
-          "                <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
-          "                <value>simple</value>\n" +
-          "            </param>\n" +
-          "            <param>\n" +
-          "                <name>urls./**</name>\n" +
-          "                <value>authcBasic</value>\n" +
-          "            </param>\n" +
-          "        </provider>\n" +
-          "\n" +
-          "        <provider>\n" +
-          "            <role>identity-assertion</role>\n" +
-          "            <name>Default</name>\n" +
-          "            <enabled>true</enabled>\n" +
-          "        </provider>\n" +
-          "\n" +
-          "        <provider>\n" +
-          "            <role>hostmap</role>\n" +
-          "            <name>static</name>\n" +
-          "            <enabled>true</enabled>\n" +
-          "            <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" +
-          "        </provider>\n" +
-          "    </gateway>\n";
-
-
-  /**
-   * KNOX-1136
-   * <p>
-   * Test that a credential store is created, and a encryptQueryString alias is defined, with a password that is not
-   * random (but is derived from the master secret and the topology name).
-   * <p>
-   * N.B. This test depends on the NoOpServiceDiscovery extension being configured in META-INF/services
-   */
-  @Test
-  public void testSimpleDescriptorHandlerQueryStringCredentialAliasCreation() throws Exception {
-
-    final String testMasterSecret = "mysecret";
-    final String discoveryType = "NO_OP";
-    final String clusterName = "dummy";
-
-    final Map<String, List<String>> serviceURLs = new HashMap<>();
-    serviceURLs.put("RESOURCEMANAGER", Collections.singletonList("http://myhost:1234/resource"));
-
-    File testRootDir = TestUtils.createTempDir(getClass().getSimpleName());
-    File testConfDir = new File(testRootDir, "conf");
-    File testProvDir = new File(testConfDir, "shared-providers");
-    File testTopoDir = new File(testConfDir, "topologies");
-    File testDeployDir = new File(testConfDir, "deployments");
-
-    // Write the externalized provider config to a temp file
-    File providerConfig = new File(testProvDir, "ambari-cluster-policy.xml");
-    FileUtils.write(providerConfig, TEST_PROVIDER_CONFIG);
-
-    File topologyFile = null;
-    try {
-      File destDir = new File(System.getProperty("java.io.tmpdir")).getCanonicalFile();
-
-      // Mock out the simple descriptor
-      SimpleDescriptor testDescriptor = EasyMock.createNiceMock(SimpleDescriptor.class);
-      EasyMock.expect(testDescriptor.getName()).andReturn("mysimpledescriptor").anyTimes();
-      EasyMock.expect(testDescriptor.getDiscoveryAddress()).andReturn(null).anyTimes();
-      EasyMock.expect(testDescriptor.getDiscoveryType()).andReturn(discoveryType).anyTimes();
-      EasyMock.expect(testDescriptor.getDiscoveryUser()).andReturn(null).anyTimes();
-      EasyMock.expect(testDescriptor.getProviderConfig()).andReturn(providerConfig.getAbsolutePath()).anyTimes();
-      EasyMock.expect(testDescriptor.getClusterName()).andReturn(clusterName).anyTimes();
-      List<SimpleDescriptor.Service> serviceMocks = new ArrayList<>();
-      for (String serviceName : serviceURLs.keySet()) {
-        SimpleDescriptor.Service svc = EasyMock.createNiceMock(SimpleDescriptor.Service.class);
-        EasyMock.expect(svc.getName()).andReturn(serviceName).anyTimes();
-        EasyMock.expect(svc.getURLs()).andReturn(serviceURLs.get(serviceName)).anyTimes();
-        EasyMock.expect(svc.getParams()).andReturn(Collections.emptyMap()).anyTimes();
-        EasyMock.replay(svc);
-        serviceMocks.add(svc);
-      }
-      EasyMock.expect(testDescriptor.getServices()).andReturn(serviceMocks).anyTimes();
-      EasyMock.replay(testDescriptor);
-
-      // Try setting up enough of the GatewayServer to support the test...
-      GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
-      InetSocketAddress gatewayAddress = new InetSocketAddress(0);
-      EasyMock.expect(config.getGatewayTopologyDir()).andReturn(testTopoDir.getAbsolutePath()).anyTimes();
-      EasyMock.expect(config.getGatewayDeploymentDir()).andReturn(testDeployDir.getAbsolutePath()).anyTimes();
-      EasyMock.expect(config.getGatewayAddress()).andReturn(gatewayAddress).anyTimes();
-      EasyMock.expect(config.getGatewayPortMappings()).andReturn(Collections.emptyMap()).anyTimes();
-      EasyMock.replay(config);
-
-      // Setup the Gateway Services
-      GatewayServices gatewayServices = EasyMock.createNiceMock(GatewayServices.class);
-
-      // Master Service
-      MasterService ms = EasyMock.createNiceMock(MasterService.class);
-      EasyMock.expect(ms.getMasterSecret()).andReturn(testMasterSecret.toCharArray()).anyTimes();
-      EasyMock.replay(ms);
-      EasyMock.expect(gatewayServices.getService("MasterService")).andReturn(ms).anyTimes();
-
-      // Keystore Service
-      KeystoreService ks = EasyMock.createNiceMock(KeystoreService.class);
-      EasyMock.expect(ks.isCredentialStoreForClusterAvailable(testDescriptor.getName())).andReturn(false).once();
-      ks.createCredentialStoreForCluster(testDescriptor.getName());
-      EasyMock.expectLastCall().once();
-      KeyStore credStore = EasyMock.createNiceMock(KeyStore.class);
-      EasyMock.expect(ks.getCredentialStoreForCluster(testDescriptor.getName())).andReturn(credStore).anyTimes();
-      EasyMock.replay(ks);
-      EasyMock.expect(gatewayServices.getService(GatewayServices.KEYSTORE_SERVICE)).andReturn(ks).anyTimes();
-
-      // Alias Service
-      AliasService as = EasyMock.createNiceMock(AliasService.class);
-      // Captures for validating the alias creation for a generated topology
-      Capture<String> capturedCluster = EasyMock.newCapture();
-      Capture<String> capturedAlias = EasyMock.newCapture();
-      Capture<String> capturedPwd = EasyMock.newCapture();
-      as.addAliasForCluster(capture(capturedCluster), capture(capturedAlias), capture(capturedPwd));
-      EasyMock.expectLastCall().anyTimes();
-      EasyMock.replay(as);
-      EasyMock.expect(gatewayServices.getService(GatewayServices.ALIAS_SERVICE)).andReturn(as).anyTimes();
-
-      // Topology Service
-      TopologyService ts = EasyMock.createNiceMock(TopologyService.class);
-      ts.addTopologyChangeListener(anyObject());
-      EasyMock.expectLastCall().anyTimes();
-      ts.reloadTopologies();
-      EasyMock.expectLastCall().anyTimes();
-      EasyMock.expect(ts.getTopologies()).andReturn(Collections.emptyList()).anyTimes();
-      EasyMock.replay(ts);
-      EasyMock.expect(gatewayServices.getService(GatewayServices.TOPOLOGY_SERVICE)).andReturn(ts).anyTimes();
-
-      EasyMock.replay(gatewayServices);
-
-      // Start a GatewayService with the GatewayServices mock
-      GatewayServer server = GatewayServer.startGateway(config, gatewayServices);
-
-      // Invoke the simple descriptor handler, which will also create the credential store
-      // (because it doesn't exist) and the encryptQueryString alias
-      Map<String, File> files = SimpleDescriptorHandler.handle(testDescriptor,
-                                                               providerConfig.getParentFile(),
-                                                               destDir);
-      topologyFile = files.get("topology");
-
-      // Validate the AliasService interaction
-      assertEquals("Unexpected cluster name for the alias (should be the topology name).",
-                   testDescriptor.getName(), capturedCluster.getValue());
-      assertEquals("Unexpected alias name.", "encryptQueryString", capturedAlias.getValue());
-      assertEquals("Unexpected alias value (should be master secret + topology name.",
-                   testMasterSecret + testDescriptor.getName(), capturedPwd.getValue());
-
-    } catch (Exception e) {
-      e.printStackTrace();
-      fail(e.getMessage());
-    } finally {
-      FileUtils.forceDelete(testRootDir);
-      if (topologyFile != null) {
-        topologyFile.delete();
-      }
-    }
-  }
-
-
-  ///////////////////////////////////////////////////////////////////////////////////////////////////////
-  // Test classes for effectively "skipping" service discovery for this test.
-  ///////////////////////////////////////////////////////////////////////////////////////////////////////
-
-  public static final class NoOpServiceDiscoveryType implements ServiceDiscoveryType {
-    @Override
-    public String getType() {
-      return NoOpServiceDiscovery.TYPE;
-    }
-
-    @Override
-    public ServiceDiscovery newInstance() {
-      return new NoOpServiceDiscovery();
-    }
-  }
-
-  private static final class NoOpServiceDiscovery implements ServiceDiscovery {
-    static final String TYPE = "NO_OP";
-
-    @Override
-    public String getType() {
-      return TYPE;
-    }
-
-    @Override
-    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
-      return Collections.emptyMap();
-    }
-
-    @Override
-    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
-      return null;
-    }
-  }
-
-}


[30/49] knox git commit: KNOX-1139 - Fix threapool size for UnitTests (Attila Csoma via Sandeep More)

Posted by mo...@apache.org.
KNOX-1139 - Fix threapool size for UnitTests (Attila Csoma via Sandeep More)


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/19362b9d
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/19362b9d
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/19362b9d

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 19362b9dd99586e64ba4cadc6e29a41d40e41f72
Parents: a874f39
Author: Sandeep More <mo...@apache.org>
Authored: Wed Dec 6 11:26:49 2017 -0500
Committer: Sandeep More <mo...@apache.org>
Committed: Wed Dec 6 11:26:49 2017 -0500

----------------------------------------------------------------------
 .../src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/19362b9d/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
----------------------------------------------------------------------
diff --git a/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java b/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
index e04c581..1198223 100644
--- a/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
+++ b/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
@@ -367,7 +367,7 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
 
   @Override
   public int getThreadPoolMax() {
-    return 16;
+    return 254;
   }
 
   @Override


[32/49] knox git commit: Updated CHANGES for v0.14.0

Posted by mo...@apache.org.
Updated CHANGES for v0.14.0

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/370c8610
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/370c8610
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/370c8610

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 370c8610587a2a3045f699525f9a77ffbcce9f9f
Parents: bfb556c
Author: Larry McCay <lm...@hortonworks.com>
Authored: Wed Dec 6 12:58:30 2017 -0500
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Wed Dec 6 12:59:19 2017 -0500

----------------------------------------------------------------------
 CHANGES | 14 +++++++++++---
 1 file changed, 11 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/370c8610/CHANGES
----------------------------------------------------------------------
diff --git a/CHANGES b/CHANGES
index 3fe8edd..e9b8c0a 100644
--- a/CHANGES
+++ b/CHANGES
@@ -14,14 +14,15 @@ Release Notes - Apache Knox - Version 0.14.0
    * [KNOX-842] - Add support for Livy (Spark REST Service).
    * [KNOX-1017] - Add support for enabling "Strict-Transport-Security" header in Knox responses (Latha Appanna via lmccay)
    * [KNOX-1049] - Default Service or App Context for Topologies
-   * [KNOX-1049] - change BeanConverter for path element
-   * [KNOX-1049] - marshaling changes
-   * [KNOX-1049] - change defaultServicePath to path
    * [KNOX-1041] - High Availability Support For Apache SOLR, HBase & Kafka (Rick Kellogg via Sandeep More)
    * [KNOX-1046] - Add Client Cert Wanted Capability with Configurable Validation that Checks for It
    * [KNOX-1072] - Add Client Cert Required Capability to KnoxToken
    * [KNOX-1107] - Remote Configuration Registry Client Service (Phil Zampino via lmccay)
    * [KNOX-1128] - Readonly protection for generated topologies in Knox Admin UI
+   * [KNOX-1013] - Monitor Ambari for Cluster Topology changes (Phil Zampino via lmccay)
+   * [KNOX-1136] - Provision Consistent Credentials For Generated Topologies (phil zampino via lmccay)
+   * [KNOX-1129] - Remote Configuration Monitor Should Define The Entries It Monitors If They're Not Yet Defined (Phil Zampino via lmccay)
+   * [KNOX-1125] - KNOXCLI Additions to Support Management of Knox config in remote registry (Phil Zampino via Sandeep More)
 ** Improvement
    * [KNOX-921] - Httpclient max connections are always set to default values
    * [KNOX-1106] - Tighten the rewrite rule on oozieui to reduce false positives (Wei Han via Sandeep More)
@@ -78,6 +79,11 @@ Release Notes - Apache Knox - Version 0.14.0
    * [KNOX-1119] - Pac4J OAuth/OpenID Principal Needs to be Configurable (Andreas Hildebrandt via lmccay)
    * [KNOX-1120] - Pac4J Stop Using ConfigSingleton
    * [KNOX-1128] - Readonly protection for generated topologies in Knox Admin UI
+   * [KNOX-1083] - HttpClient default timeout will be 20 sec
+   * [KNOX-1124] - Service Discovery Doesn't Work For Simple Descriptors With .yaml Extension (Phil Zampino  via Sandeep More)
+   * [KNOX-1117] - HostMap Provider configuration comment in topologies included with Knox has typos (Phil Zampino via Sandeep More)
+   * [KNOX-1119] - Add missing tests for valid and invalid id attributes
+   * [KNOX-1128] - Readonly protection for generated topologies in Knox Admin UI
 ** Bug
    * [KNOX-1003] - Fix the rewrite rules for Zeppelin 0.7.2 UI
    * [KNOX-1004] - Failing (flaky) Knox unit tests
@@ -88,6 +94,8 @@ Release Notes - Apache Knox - Version 0.14.0
    * [KNOX-1022] - Configuring knox token ttl to higher value generates an access token which is not valid
    * [KNOX-1118] - Remove POC Service Definition for AmbariUI
    * [KNOX-1134] - Regression due to KNOX-1119
+   * [KNOX-1132] - Address Coverity Defects in gateway-service-remoteconfig (Phil Zampino via lmccay)
+   * [KNOX-1139] - Fix threapool size for UnitTests (Attila Csoma via Sandeep More)
 
 ------------------------------------------------------------------------------
 Release Notes - Apache Knox - Version 0.13.0


[45/49] knox git commit: KNOX-998 - Merge from trunk 0.14.0 code

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/zk/ZooKeeperClientServiceProvider.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/zk/ZooKeeperClientServiceProvider.java b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/zk/ZooKeeperClientServiceProvider.java
new file mode 100644
index 0000000..96b931c
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/zk/ZooKeeperClientServiceProvider.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.config.remote.zk;
+
+import org.apache.knox.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider;
+
+
+public class ZooKeeperClientServiceProvider implements RemoteConfigurationRegistryClientServiceProvider {
+
+    @Override
+    public String getType() {
+        return ZooKeeperClientService.TYPE;
+    }
+
+    @Override
+    public ZooKeeperClientService newInstance() {
+        return new CuratorClientService();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/resources/META-INF/services/org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/resources/META-INF/services/org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider b/gateway-service-remoteconfig/src/main/resources/META-INF/services/org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider
deleted file mode 100644
index 7f2312a..0000000
--- a/gateway-service-remoteconfig/src/main/resources/META-INF/services/org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider
+++ /dev/null
@@ -1,19 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.hadoop.gateway.service.config.remote.zk.ZooKeeperClientServiceProvider

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/resources/META-INF/services/org.apache.knox.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/resources/META-INF/services/org.apache.knox.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider b/gateway-service-remoteconfig/src/main/resources/META-INF/services/org.apache.knox.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider
new file mode 100644
index 0000000..fe12e48
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/resources/META-INF/services/org.apache.knox.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.knox.gateway.service.config.remote.zk.ZooKeeperClientServiceProvider

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistriesTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistriesTest.java b/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistriesTest.java
deleted file mode 100644
index a33fcc2..0000000
--- a/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistriesTest.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.config.remote.config;
-
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.easymock.EasyMock;
-import org.junit.Test;
-
-import java.util.ArrayList;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-
-import static org.junit.Assert.assertEquals;
-import static org.testng.Assert.assertNotNull;
-
-public class DefaultRemoteConfigurationRegistriesTest {
-
-    /**
-     * Test a single registry configuration with digest auth configuration.
-     */
-    @Test
-    public void testPropertiesRemoteConfigurationRegistriesSingleDigest() throws Exception {
-        Map<String, Properties> testProperties = new HashMap<>();
-        Properties p = new Properties();
-        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE, "ZooKeeper");
-        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS, "hostx:2181");
-        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL, "zkDigestUser");
-        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE, "digest");
-        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS, "zkDigestAlias");
-        testProperties.put("testDigest", p);
-
-        doTestPropertiesRemoteConfigurationRegistries(testProperties);
-    }
-
-
-    /**
-     * Test a single registry configuration with kerberos auth configuration.
-     */
-    @Test
-    public void testPropertiesRemoteConfigurationRegistriesSingleKerberos() throws Exception {
-        Map<String, Properties> testProperties = new HashMap<>();
-        Properties p = new Properties();
-        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE, "ZooKeeper");
-        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS, "hostx:2181");
-        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL, "zkUser");
-        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE, "kerberos");
-        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_KEYTAB, "/home/user/remoteregistry.keytab");
-        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_KEYTAB, "true");
-        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_TICKET_CACHE, "false");
-        testProperties.put("testKerb", p);
-
-        doTestPropertiesRemoteConfigurationRegistries(testProperties);
-    }
-
-    /**
-     * Test multiple registry configuration with varying auth configurations.
-     */
-    @Test
-    public void testPropertiesRemoteConfigurationRegistriesMultipleMixed() throws Exception {
-        Map<String, Properties> testProperties = new HashMap<>();
-
-        Properties kerb = new Properties();
-        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE, "ZooKeeper");
-        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS, "host1:2181");
-        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_NAMESPACE, "/knox/config");
-        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL, "kerbPrincipal");
-        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE, "kerberos");
-        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_KEYTAB, "/home/user/mykrb.keytab");
-        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_KEYTAB, "true");
-        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_TICKET_CACHE, "false");
-        testProperties.put("testKerb1", kerb);
-
-        Properties digest = new Properties();
-        digest.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE, "ZooKeeper");
-        digest.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS, "host2:2181");
-        digest.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL, "digestPrincipal");
-        digest.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE, "digest");
-        digest.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS, "digestPwdAlias");
-        testProperties.put("testDigest1", digest);
-
-        Properties unsecured = new Properties();
-        unsecured.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE, "ZooKeeper");
-        unsecured.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS, "host2:2181");
-        testProperties.put("testUnsecured", unsecured);
-
-        doTestPropertiesRemoteConfigurationRegistries(testProperties);
-    }
-
-
-    /**
-     * Perform the actual test.
-     *
-     * @param testProperties The test properties
-     */
-    private void doTestPropertiesRemoteConfigurationRegistries(Map<String, Properties> testProperties) throws Exception {
-        // Mock gateway config
-        GatewayConfig gc = mockGatewayConfig(testProperties);
-
-        // Create the RemoteConfigurationRegistries object to be tested from the GatewayConfig
-        RemoteConfigurationRegistries registries = new DefaultRemoteConfigurationRegistries(gc);
-
-        // Basic validation
-        assertNotNull(registries);
-        List<RemoteConfigurationRegistry> registryConfigs = registries.getRegistryConfigurations();
-        assertNotNull(registryConfigs);
-        assertEquals(testProperties.size(), registryConfigs.size());
-
-        // Validate the contents of the created object
-        for (RemoteConfigurationRegistry regConfig : registryConfigs) {
-            validateRemoteRegistryConfig(regConfig.getName(), testProperties.get(regConfig.getName()), regConfig);
-        }
-    }
-
-
-    /**
-     * Create a mock GatewayConfig based on the specified test properties.
-     *
-     * @param testProperties The test properties to set on the config
-     */
-    private GatewayConfig mockGatewayConfig(Map<String, Properties> testProperties) {
-        // Mock gateway config
-        GatewayConfig gc = EasyMock.createNiceMock(GatewayConfig.class);
-        List<String> configNames = new ArrayList<>();
-        for (String registryName : testProperties.keySet()) {
-            configNames.add(registryName);
-
-            String propertyValueString = "";
-            Properties props = testProperties.get(registryName);
-            Enumeration names = props.propertyNames();
-            while (names.hasMoreElements()) {
-                String propertyName = (String) names.nextElement();
-                propertyValueString += propertyName + "=" + props.get(propertyName);
-                if (names.hasMoreElements()) {
-                    propertyValueString += ";";
-                }
-            }
-            EasyMock.expect(gc.getRemoteRegistryConfiguration(registryName))
-                    .andReturn(propertyValueString)
-                    .anyTimes();
-        }
-        EasyMock.expect(gc.getRemoteRegistryConfigurationNames()).andReturn(configNames).anyTimes();
-        EasyMock.replay(gc);
-
-        return gc;
-    }
-
-
-    /**
-     * Validate the specified RemoteConfigurationRegistry based on the expected test properties.
-     */
-    private void validateRemoteRegistryConfig(String                      configName,
-                                              Properties                  expected,
-                                              RemoteConfigurationRegistry registryConfig) throws Exception {
-        assertEquals(configName, registryConfig.getName());
-        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE), registryConfig.getRegistryType());
-        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS), registryConfig.getConnectionString());
-        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_NAMESPACE), registryConfig.getNamespace());
-        assertEquals(registryConfig.isSecureRegistry(), expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE) != null);
-        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE), registryConfig.getAuthType());
-        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL), registryConfig.getPrincipal());
-        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS), registryConfig.getCredentialAlias());
-        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_KEYTAB), registryConfig.getKeytab());
-        assertEquals(Boolean.valueOf((String)expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_KEYTAB)), registryConfig.isUseKeyTab());
-        assertEquals(Boolean.valueOf((String)expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_TICKET_CACHE)), registryConfig.isUseTicketCache());
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistryConfigParserTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistryConfigParserTest.java b/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistryConfigParserTest.java
deleted file mode 100644
index 386e332..0000000
--- a/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistryConfigParserTest.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.config.remote.config;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
-import org.apache.hadoop.gateway.service.config.remote.util.RemoteRegistryConfigTestUtils;
-import org.junit.Test;
-
-import java.io.File;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-
-import static org.apache.hadoop.gateway.service.config.remote.util.RemoteRegistryConfigTestUtils.*;
-
-public class RemoteConfigurationRegistryConfigParserTest {
-
-    @Test
-    public void testExternalXMLParsing() throws Exception {
-        final String CONN_STR = "http://my.zookeeper.host:2181";
-
-        Map<String, Map<String, String>> testRegistryConfigurations = new HashMap<>();
-
-        Map<String, String> config1 = new HashMap<>();
-        config1.put(PROPERTY_TYPE, "ZooKeeper");
-        config1.put(PROPERTY_NAME, "registry1");
-        config1.put(PROPERTY_ADDRESS, CONN_STR);
-        config1.put(PROPERTY_SECURE, "true");
-        config1.put(PROPERTY_AUTH_TYPE, "Digest");
-        config1.put(PROPERTY_PRINCIPAL, "knox");
-        config1.put(PROPERTY_CRED_ALIAS, "zkCredential");
-        testRegistryConfigurations.put(config1.get("name"), config1);
-
-        Map<String, String> config2 = new HashMap<>();
-        config2.put(PROPERTY_TYPE, "ZooKeeper");
-        config2.put(PROPERTY_NAME, "MyKerberos");
-        config2.put(PROPERTY_ADDRESS, CONN_STR);
-        config2.put(PROPERTY_SECURE, "true");
-        config2.put(PROPERTY_AUTH_TYPE, "Kerberos");
-        config2.put(PROPERTY_PRINCIPAL, "knox");
-        File myKeyTab = File.createTempFile("mytest", "keytab");
-        config2.put(PROPERTY_KEYTAB, myKeyTab.getAbsolutePath());
-        config2.put(PROPERTY_USE_KEYTAB, "false");
-        config2.put(PROPERTY_USE_TICKET_CACHE, "true");
-        testRegistryConfigurations.put(config2.get("name"), config2);
-
-        Map<String, String> config3 = new HashMap<>();
-        config3.put(PROPERTY_TYPE, "ZooKeeper");
-        config3.put(PROPERTY_NAME, "anotherRegistry");
-        config3.put(PROPERTY_ADDRESS, "whatever:1281");
-        testRegistryConfigurations.put(config3.get("name"), config3);
-
-        String configXML =
-                    RemoteRegistryConfigTestUtils.createRemoteConfigRegistriesXML(testRegistryConfigurations.values());
-
-        File registryConfigFile = File.createTempFile("remote-registries", "xml");
-        try {
-            FileUtils.writeStringToFile(registryConfigFile, configXML);
-
-            List<RemoteConfigurationRegistryConfig> configs =
-                                    RemoteConfigurationRegistriesParser.getConfig(registryConfigFile.getAbsolutePath());
-            assertNotNull(configs);
-            assertEquals(testRegistryConfigurations.keySet().size(), configs.size());
-
-            for (RemoteConfigurationRegistryConfig registryConfig : configs) {
-                Map<String, String> expected = testRegistryConfigurations.get(registryConfig.getName());
-                assertNotNull(expected);
-                validateParsedRegistryConfiguration(registryConfig, expected);
-            }
-        } finally {
-            registryConfigFile.delete();
-        }
-    }
-
-    private void validateParsedRegistryConfiguration(RemoteConfigurationRegistryConfig config,
-                                                     Map<String, String> expected) throws Exception {
-        assertEquals(expected.get(PROPERTY_TYPE), config.getRegistryType());
-        assertEquals(expected.get(PROPERTY_ADDRESS), config.getConnectionString());
-        assertEquals(expected.get(PROPERTY_NAME), config.getName());
-        assertEquals(expected.get(PROPERTY_NAMESAPCE), config.getNamespace());
-        assertEquals(Boolean.valueOf(expected.get(PROPERTY_SECURE)), config.isSecureRegistry());
-        assertEquals(expected.get(PROPERTY_AUTH_TYPE), config.getAuthType());
-        assertEquals(expected.get(PROPERTY_PRINCIPAL), config.getPrincipal());
-        assertEquals(expected.get(PROPERTY_CRED_ALIAS), config.getCredentialAlias());
-        assertEquals(expected.get(PROPERTY_KEYTAB), config.getKeytab());
-        assertEquals(Boolean.valueOf(expected.get(PROPERTY_USE_KEYTAB)), config.isUseKeyTab());
-        assertEquals(Boolean.valueOf(expected.get(PROPERTY_USE_TICKET_CACHE)), config.isUseTicketCache());
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/util/RemoteRegistryConfigTestUtils.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/util/RemoteRegistryConfigTestUtils.java b/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/util/RemoteRegistryConfigTestUtils.java
deleted file mode 100644
index 35919d0..0000000
--- a/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/util/RemoteRegistryConfigTestUtils.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.config.remote.util;
-
-import java.util.Collection;
-import java.util.Map;
-
-public class RemoteRegistryConfigTestUtils {
-
-    public static final String PROPERTY_TYPE = "type";
-    public static final String PROPERTY_NAME = "name";
-    public static final String PROPERTY_ADDRESS = "address";
-    public static final String PROPERTY_NAMESAPCE = "namespace";
-    public static final String PROPERTY_SECURE = "secure";
-    public static final String PROPERTY_AUTH_TYPE = "authType";
-    public static final String PROPERTY_PRINCIPAL = "principal";
-    public static final String PROPERTY_CRED_ALIAS = "credentialAlias";
-    public static final String PROPERTY_KEYTAB = "keyTab";
-    public static final String PROPERTY_USE_KEYTAB = "useKeyTab";
-    public static final String PROPERTY_USE_TICKET_CACHE = "useTicketCache";
-
-    public static String createRemoteConfigRegistriesXML(Collection<Map<String, String>> configProperties) {
-        String result = "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
-                        "<remote-configuration-registries>\n";
-
-        for (Map<String, String> props : configProperties) {
-            String authType = props.get(PROPERTY_AUTH_TYPE);
-            if ("Kerberos".equalsIgnoreCase(authType)) {
-                result +=
-                   createRemoteConfigRegistryXMLWithKerberosAuth(props.get(PROPERTY_TYPE),
-                                                                 props.get(PROPERTY_NAME),
-                                                                 props.get(PROPERTY_ADDRESS),
-                                                                 props.get(PROPERTY_PRINCIPAL),
-                                                                 props.get(PROPERTY_KEYTAB),
-                                                                 Boolean.valueOf(props.get(PROPERTY_USE_KEYTAB)),
-                                                                 Boolean.valueOf(props.get(PROPERTY_USE_TICKET_CACHE)));
-            } else if ("Digest".equalsIgnoreCase(authType)) {
-                result +=
-                    createRemoteConfigRegistryXMLWithDigestAuth(props.get(PROPERTY_TYPE),
-                                                                props.get(PROPERTY_NAME),
-                                                                props.get(PROPERTY_ADDRESS),
-                                                                props.get(PROPERTY_PRINCIPAL),
-                                                                props.get(PROPERTY_CRED_ALIAS));
-            } else {
-                result += createRemoteConfigRegistryXMLNoAuth(props.get(PROPERTY_TYPE),
-                                                              props.get(PROPERTY_NAME),
-                                                              props.get(PROPERTY_ADDRESS));
-            }
-        }
-
-        result += "</remote-configuration-registries>\n";
-
-        return result;
-    }
-
-    public static String createRemoteConfigRegistryXMLWithKerberosAuth(String type,
-                                                                       String name,
-                                                                       String address,
-                                                                       String principal,
-                                                                       String keyTab,
-                                                                       boolean userKeyTab,
-                                                                       boolean useTicketCache) {
-        return "  <remote-configuration-registry>\n" +
-               "    <name>" + name + "</name>\n" +
-               "    <type>" + type + "</type>\n" +
-               "    <address>" + address + "</address>\n" +
-               "    <secure>true</secure>\n" +
-               "    <auth-type>" + "Kerberos" + "</auth-type>\n" +
-               "    <principal>" + principal + "</principal>\n" +
-               "    <keytab>" + keyTab + "</keytab>\n" +
-               "    <use-keytab>" + String.valueOf(userKeyTab) + "</use-keytab>\n" +
-               "    <use-ticket-cache>" + String.valueOf(useTicketCache) + "</use-ticket-cache>\n" +
-               "  </remote-configuration-registry>\n";
-    }
-
-    public static String createRemoteConfigRegistryXMLWithDigestAuth(String type,
-                                                                     String name,
-                                                                     String address,
-                                                                     String principal,
-                                                                     String credentialAlias) {
-        return "  <remote-configuration-registry>\n" +
-               "    <name>" + name + "</name>\n" +
-               "    <type>" + type + "</type>\n" +
-               "    <address>" + address + "</address>\n" +
-               "    <secure>true</secure>\n" +
-               "    <auth-type>" + "Digest" + "</auth-type>\n" +
-               "    <principal>" + principal + "</principal>\n" +
-               "    <credential-alias>" + credentialAlias + "</credential-alias>\n" +
-               "  </remote-configuration-registry>\n";
-    }
-
-
-    public static String createRemoteConfigRegistryXMLNoAuth(String type,
-                                                             String name,
-                                                             String address) {
-        return "  <remote-configuration-registry>\n" +
-               "    <name>" + name + "</name>\n" +
-               "    <type>" + type + "</type>\n" +
-               "    <address>" + address + "</address>\n" +
-               "  </remote-configuration-registry>\n";
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryClientServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryClientServiceTest.java b/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryClientServiceTest.java
deleted file mode 100644
index 0292ee3..0000000
--- a/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryClientServiceTest.java
+++ /dev/null
@@ -1,424 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.config.remote.zk;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.CuratorFrameworkFactory;
-import org.apache.curator.retry.ExponentialBackoffRetry;
-import org.apache.curator.test.InstanceSpec;
-import org.apache.curator.test.TestingCluster;
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient.ChildEntryListener;
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient;
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
-import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceFactory;
-import org.apache.hadoop.gateway.service.config.remote.util.RemoteRegistryConfigTestUtils;
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.zookeeper.ZooDefs;
-import org.apache.zookeeper.data.ACL;
-import org.apache.zookeeper.data.Id;
-import org.easymock.EasyMock;
-import org.junit.Test;
-
-import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.Configuration;
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-
-public class RemoteConfigurationRegistryClientServiceTest {
-
-    /**
-     * Test a configuration for an unsecured remote registry, included in the gateway configuration.
-     */
-    @Test
-    public void testUnsecuredZooKeeperWithSimpleRegistryConfig() throws Exception {
-        final String REGISTRY_CLIENT_NAME = "unsecured-zk-registry-name";
-        final String PRINCIPAL = null;
-        final String PWD = null;
-        final String CRED_ALIAS = null;
-
-        // Configure and start a secure ZK cluster
-        TestingCluster zkCluster = setupAndStartSecureTestZooKeeper(PRINCIPAL, PWD);
-
-        try {
-            // Create the setup client for the test cluster, and initialize the test znodes
-            CuratorFramework setupClient = initializeTestClientAndZNodes(zkCluster, PRINCIPAL);
-
-            // Mock configuration
-            GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
-            final String registryConfigValue =
-                        GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
-                        GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString();
-            EasyMock.expect(config.getRemoteRegistryConfiguration(REGISTRY_CLIENT_NAME))
-                    .andReturn(registryConfigValue)
-                    .anyTimes();
-            EasyMock.expect(config.getRemoteRegistryConfigurationNames())
-                    .andReturn(Collections.singletonList(REGISTRY_CLIENT_NAME)).anyTimes();
-            EasyMock.replay(config);
-
-            doTestZooKeeperClient(setupClient, REGISTRY_CLIENT_NAME, config, CRED_ALIAS, PWD);
-        } finally {
-            zkCluster.stop();
-        }
-    }
-
-    /**
-     * Test multiple configurations for an unsecured remote registry.
-     */
-    @Test
-    public void testMultipleUnsecuredZooKeeperWithSimpleRegistryConfig() throws Exception {
-        final String REGISTRY_CLIENT_NAME_1 = "zkclient1";
-        final String REGISTRY_CLIENT_NAME_2 = "zkclient2";
-        final String PRINCIPAL = null;
-        final String PWD = null;
-        final String CRED_ALIAS = null;
-
-        // Configure and start a secure ZK cluster
-        TestingCluster zkCluster = setupAndStartSecureTestZooKeeper(PRINCIPAL, PWD);
-
-        try {
-            // Create the setup client for the test cluster, and initialize the test znodes
-            CuratorFramework setupClient = initializeTestClientAndZNodes(zkCluster, PRINCIPAL);
-
-            // Mock configuration
-            GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
-            final String registryConfigValue1 =
-                                GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
-                                GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString();
-            EasyMock.expect(config.getRemoteRegistryConfiguration(REGISTRY_CLIENT_NAME_1))
-                    .andReturn(registryConfigValue1).anyTimes();
-            final String registryConfigValue2 =
-                                GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
-                                GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString();
-            EasyMock.expect(config.getRemoteRegistryConfiguration(REGISTRY_CLIENT_NAME_2))
-                    .andReturn(registryConfigValue2).anyTimes();
-            EasyMock.expect(config.getRemoteRegistryConfigurationNames())
-                    .andReturn(Arrays.asList(REGISTRY_CLIENT_NAME_1, REGISTRY_CLIENT_NAME_2)).anyTimes();
-            EasyMock.replay(config);
-
-            // Create the client service instance
-            RemoteConfigurationRegistryClientService clientService =
-                    RemoteConfigurationRegistryClientServiceFactory.newInstance(config);
-            assertEquals("Wrong registry client service type.", clientService.getClass(), CuratorClientService.class);
-            clientService.setAliasService(null);
-            clientService.init(config, null);
-            clientService.start();
-
-            RemoteConfigurationRegistryClient client1 = clientService.get(REGISTRY_CLIENT_NAME_1);
-            assertNotNull(client1);
-
-            RemoteConfigurationRegistryClient client2 = clientService.get(REGISTRY_CLIENT_NAME_2);
-            assertNotNull(client2);
-
-            doTestZooKeeperClient(setupClient, REGISTRY_CLIENT_NAME_1, clientService, false);
-            doTestZooKeeperClient(setupClient, REGISTRY_CLIENT_NAME_2, clientService, false);
-        } finally {
-            zkCluster.stop();
-        }
-    }
-
-    /**
-     * Test a configuration for a secure remote registry, included in the gateway configuration.
-     */
-    @Test
-    public void testZooKeeperWithSimpleRegistryConfig() throws Exception {
-        final String AUTH_TYPE = "digest";
-        final String REGISTRY_CLIENT_NAME = "zk-registry-name";
-        final String PRINCIPAL = "knox";
-        final String PWD = "knoxtest";
-        final String CRED_ALIAS = "zkCredential";
-
-        // Configure and start a secure ZK cluster
-        TestingCluster zkCluster = setupAndStartSecureTestZooKeeper(PRINCIPAL, PWD);
-
-        try {
-            // Create the setup client for the test cluster, and initialize the test znodes
-            CuratorFramework setupClient = initializeTestClientAndZNodes(zkCluster, PRINCIPAL);
-
-            // Mock configuration
-            GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
-            final String registryConfigValue =
-                            GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
-                            GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString() + ";" +
-                            GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE + "=" + AUTH_TYPE + ";" +
-                            GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL + "=" + PRINCIPAL + ";" +
-                            GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS + "=" + CRED_ALIAS;
-            EasyMock.expect(config.getRemoteRegistryConfiguration(REGISTRY_CLIENT_NAME))
-                    .andReturn(registryConfigValue)
-                    .anyTimes();
-            EasyMock.expect(config.getRemoteRegistryConfigurationNames())
-                    .andReturn(Collections.singletonList(REGISTRY_CLIENT_NAME)).anyTimes();
-            EasyMock.replay(config);
-
-            doTestZooKeeperClient(setupClient, REGISTRY_CLIENT_NAME, config, CRED_ALIAS, PWD);
-        } finally {
-            zkCluster.stop();
-        }
-    }
-
-    /**
-     * Test the remote registry configuration external to, and referenced from, the gateway configuration, for a secure
-     * client.
-     */
-    @Test
-    public void testZooKeeperWithSingleExternalRegistryConfig() throws Exception {
-        final String AUTH_TYPE = "digest";
-        final String REGISTRY_CLIENT_NAME = "my-zookeeper_registryNAME";
-        final String PRINCIPAL = "knox";
-        final String PWD = "knoxtest";
-        final String CRED_ALIAS = "zkCredential";
-
-        // Configure and start a secure ZK cluster
-        TestingCluster zkCluster = setupAndStartSecureTestZooKeeper(PRINCIPAL, PWD);
-
-        File tmpRegConfigFile = null;
-
-        try {
-            // Create the setup client for the test cluster, and initialize the test znodes
-            CuratorFramework setupClient = initializeTestClientAndZNodes(zkCluster, PRINCIPAL);
-
-            // Mock configuration
-            Map<String, String> registryConfigProps = new HashMap<>();
-            registryConfigProps.put("type", ZooKeeperClientService.TYPE);
-            registryConfigProps.put("name", REGISTRY_CLIENT_NAME);
-            registryConfigProps.put("address", zkCluster.getConnectString());
-            registryConfigProps.put("secure", "true");
-            registryConfigProps.put("authType", AUTH_TYPE);
-            registryConfigProps.put("principal", PRINCIPAL);
-            registryConfigProps.put("credentialAlias", CRED_ALIAS);
-            String registryConfigXML =
-                  RemoteRegistryConfigTestUtils.createRemoteConfigRegistriesXML(Collections.singleton(registryConfigProps));
-            tmpRegConfigFile = File.createTempFile("myRemoteRegistryConfig", "xml");
-            FileUtils.writeStringToFile(tmpRegConfigFile, registryConfigXML);
-
-            System.setProperty("org.apache.knox.gateway.remote.registry.config.file", tmpRegConfigFile.getAbsolutePath());
-
-            GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
-            EasyMock.replay(config);
-
-            doTestZooKeeperClient(setupClient, REGISTRY_CLIENT_NAME, config, CRED_ALIAS, PWD);
-        } finally {
-            zkCluster.stop();
-            if (tmpRegConfigFile != null && tmpRegConfigFile.exists()) {
-                tmpRegConfigFile.delete();
-            }
-            System.clearProperty("org.apache.knox.gateway.remote.registry.config.file");
-        }
-    }
-
-    /**
-     * Setup and start a secure test ZooKeeper cluster.
-     */
-    private TestingCluster setupAndStartSecureTestZooKeeper(String principal, String digestPassword) throws Exception {
-        final boolean applyAuthentication = (principal != null);
-
-        // Configure security for the ZK cluster instances
-        Map<String, Object> customInstanceSpecProps = new HashMap<>();
-
-        if (applyAuthentication) {
-            customInstanceSpecProps.put("authProvider.1", "org.apache.zookeeper.server.auth.SASLAuthenticationProvider");
-            customInstanceSpecProps.put("requireClientAuthScheme", "sasl");
-        }
-
-        // Define the test cluster
-        List<InstanceSpec> instanceSpecs = new ArrayList<>();
-        for (int i = 0 ; i < 3 ; i++) {
-            InstanceSpec is = new InstanceSpec(null, -1, -1, -1, false, (i+1), -1, -1, customInstanceSpecProps);
-            instanceSpecs.add(is);
-        }
-        TestingCluster zkCluster = new TestingCluster(instanceSpecs);
-
-        if (applyAuthentication) {
-            // Setup ZooKeeper server SASL
-            Map<String, String> digestOptions = new HashMap<>();
-            digestOptions.put("user_" + principal, digestPassword);
-            final AppConfigurationEntry[] serverEntries =
-                    {new AppConfigurationEntry("org.apache.zookeeper.server.auth.DigestLoginModule",
-                            AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
-                            digestOptions)};
-            Configuration.setConfiguration(new Configuration() {
-                @Override
-                public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
-                    return ("Server".equalsIgnoreCase(name)) ? serverEntries : null;
-                }
-            });
-        }
-
-        // Start the cluster
-        zkCluster.start();
-
-        return zkCluster;
-    }
-
-    /**
-     * Create a ZooKeeper client with SASL digest auth configured, and initialize the test znodes.
-     */
-    private CuratorFramework initializeTestClientAndZNodes(TestingCluster zkCluster, String principal) throws Exception {
-        // Create the client for the test cluster
-        CuratorFramework setupClient = CuratorFrameworkFactory.builder()
-                                                              .connectString(zkCluster.getConnectString())
-                                                              .retryPolicy(new ExponentialBackoffRetry(100, 3))
-                                                              .build();
-        assertNotNull(setupClient);
-        setupClient.start();
-
-        List<ACL> acls = new ArrayList<>();
-        if (principal != null) {
-            acls.add(new ACL(ZooDefs.Perms.ALL, new Id("sasl", principal)));
-        } else {
-            acls.add(new ACL(ZooDefs.Perms.ALL, ZooDefs.Ids.ANYONE_ID_UNSAFE));
-        }
-        setupClient.create().creatingParentsIfNeeded().withACL(acls).forPath("/knox/config/descriptors");
-        setupClient.create().creatingParentsIfNeeded().withACL(acls).forPath("/knox/config/shared-providers");
-
-        List<ACL> negativeACLs = new ArrayList<>();
-        if (principal != null) {
-            negativeACLs.add(new ACL(ZooDefs.Perms.ALL, new Id("sasl", "notyou")));
-        } else {
-            negativeACLs.add(new ACL(ZooDefs.Perms.ALL, ZooDefs.Ids.ANYONE_ID_UNSAFE));
-        }
-        setupClient.create().creatingParentsIfNeeded().withACL(negativeACLs).forPath("/someotherconfig");
-
-        return setupClient;
-    }
-
-    private void doTestZooKeeperClient(final CuratorFramework setupClient,
-                                       final String           testClientName,
-                                       final GatewayConfig    config,
-                                       final String           credentialAlias,
-                                       final String           digestPassword) throws Exception {
-        boolean isSecureTest = (credentialAlias != null && digestPassword != null);
-
-        // Mock alias service
-        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
-        EasyMock.expect(aliasService.getPasswordFromAliasForGateway(credentialAlias))
-                .andReturn(isSecureTest ? digestPassword.toCharArray() : null)
-                .anyTimes();
-        EasyMock.replay(aliasService);
-
-        // Create the client service instance
-        RemoteConfigurationRegistryClientService clientService =
-                RemoteConfigurationRegistryClientServiceFactory.newInstance(config);
-        assertEquals("Wrong registry client service type.", clientService.getClass(), CuratorClientService.class);
-        clientService.setAliasService(aliasService);
-        clientService.init(config, null);
-        clientService.start();
-
-        doTestZooKeeperClient(setupClient, testClientName, clientService, isSecureTest);
-    }
-
-    /**
-     * Test secure ZooKeeper client interactions.
-     *
-     * @param setupClient    The client used for interacting with ZooKeeper independent from the registry client service.
-     * @param testClientName The name of the client to use from the registry client service.
-     * @param clientService  The RemoteConfigurationRegistryClientService
-     * @param isSecureTest   Flag to indicate whether this is a secure interaction test
-     */
-    private void doTestZooKeeperClient(final CuratorFramework                         setupClient,
-                                       final String                                   testClientName,
-                                       final RemoteConfigurationRegistryClientService clientService,
-                                       boolean                                        isSecureTest) throws Exception {
-
-        RemoteConfigurationRegistryClient client = clientService.get(testClientName);
-        assertNotNull(client);
-        List<String> descriptors = client.listChildEntries("/knox/config/descriptors");
-        assertNotNull(descriptors);
-        for (String descriptor : descriptors) {
-            System.out.println("Descriptor: " + descriptor);
-        }
-
-        List<String> providerConfigs = client.listChildEntries("/knox/config/shared-providers");
-        assertNotNull(providerConfigs);
-        for (String providerConfig : providerConfigs) {
-            System.out.println("Provider config: " + providerConfig);
-        }
-
-        List<String> someotherConfig = client.listChildEntries("/someotherconfig");
-        if (isSecureTest) {
-            assertNull("Expected null because of the ACL mismatch.", someotherConfig);
-        } else {
-            assertNotNull(someotherConfig);
-        }
-
-        // Test listeners
-        final String MY_NEW_ZNODE = "/clientServiceTestNode";
-        final String MY_NEW_DATA_ZNODE = MY_NEW_ZNODE + "/mydata";
-
-        if (setupClient.checkExists().forPath(MY_NEW_ZNODE) != null) {
-            setupClient.delete().deletingChildrenIfNeeded().forPath(MY_NEW_ZNODE);
-        }
-
-        final List<String> listenerLog = new ArrayList<>();
-        client.addChildEntryListener(MY_NEW_ZNODE, (c, type, path) -> {
-            listenerLog.add("EXTERNAL: " + type.toString() + ":" + path);
-            if (ChildEntryListener.Type.ADDED.equals(type)) {
-                try {
-                    c.addEntryListener(path, (cc, p, d) -> listenerLog.add("EXTERNAL: " + p + ":" + (d != null ? new String(d) : "null")));
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-            }
-        });
-
-        client.createEntry(MY_NEW_ZNODE);
-        client.createEntry(MY_NEW_DATA_ZNODE, "more test data");
-        String testData = client.getEntryData(MY_NEW_DATA_ZNODE);
-        assertNotNull(testData);
-        assertEquals("more test data", testData);
-
-        assertTrue(client.entryExists(MY_NEW_DATA_ZNODE));
-        client.setEntryData(MY_NEW_DATA_ZNODE, "still more data");
-
-        try {
-            Thread.sleep(1000);
-        } catch (InterruptedException e) {
-            //
-        }
-
-        client.setEntryData(MY_NEW_DATA_ZNODE, "changed completely");
-
-        try {
-            Thread.sleep(1000);
-        } catch (InterruptedException e) {
-            //
-        }
-
-        client.deleteEntry(MY_NEW_DATA_ZNODE);
-
-        try {
-            Thread.sleep(1000);
-        } catch (InterruptedException e) {
-            //
-        }
-
-        assertFalse(listenerLog.isEmpty());
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfigTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfigTest.java b/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfigTest.java
deleted file mode 100644
index 6cbef9b..0000000
--- a/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfigTest.java
+++ /dev/null
@@ -1,255 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.config.remote.zk;
-
-import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
-import org.apache.hadoop.gateway.service.config.remote.zk.RemoteConfigurationRegistryJAASConfig;
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.easymock.EasyMock;
-import org.junit.Test;
-
-import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.Configuration;
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-public class RemoteConfigurationRegistryJAASConfigTest {
-
-    @Test
-    public void testZooKeeperDigestContextEntry() throws Exception {
-        List<RemoteConfigurationRegistryConfig> registryConfigs = new ArrayList<>();
-        final String ENTRY_NAME       = "my_digest_context";
-        final String DIGEST_PRINCIPAL = "myIdentity";
-        final String DIGEST_PWD_ALIAS = "myAlias";
-        final String DIGEST_PWD       = "mysecret";
-
-        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
-        EasyMock.expect(aliasService.getPasswordFromAliasForGateway(DIGEST_PWD_ALIAS)).andReturn(DIGEST_PWD.toCharArray()).anyTimes();
-        EasyMock.replay(aliasService);
-
-        registryConfigs.add(createDigestConfig(ENTRY_NAME, DIGEST_PRINCIPAL, DIGEST_PWD_ALIAS));
-
-        try {
-            RemoteConfigurationRegistryJAASConfig jaasConfig =
-                                    RemoteConfigurationRegistryJAASConfig.configure(registryConfigs, aliasService);
-
-            // Make sure there are no entries for an invalid context entry name
-            assertNull(jaasConfig.getAppConfigurationEntry("invalid"));
-
-            // Validate the intended context entry
-            validateDigestContext(jaasConfig,
-                                  ENTRY_NAME,
-                                  RemoteConfigurationRegistryJAASConfig.digestLoginModules.get("ZOOKEEPER"),
-                                  DIGEST_PRINCIPAL,
-                                  DIGEST_PWD);
-        } finally {
-            Configuration.setConfiguration(null);
-        }
-    }
-
-    @Test
-    public void testKerberosContextEntry() throws Exception {
-        List<RemoteConfigurationRegistryConfig> registryConfigs = new ArrayList<>();
-        final String ENTRY_NAME = "my_kerberos_context";
-        final String PRINCIPAL  = "myIdentity";
-
-        File dummyKeyTab = File.createTempFile("my_context", "keytab");
-        registryConfigs.add(createKerberosConfig(ENTRY_NAME, PRINCIPAL, dummyKeyTab.getAbsolutePath()));
-
-        try {
-            RemoteConfigurationRegistryJAASConfig jaasConfig =
-                                            RemoteConfigurationRegistryJAASConfig.configure(registryConfigs, null);
-
-            // Make sure there are no entries for an invalid context entry name
-            assertNull(jaasConfig.getAppConfigurationEntry("invalid"));
-
-            // Validate the intended context entry
-            validateKerberosContext(jaasConfig,
-                                    ENTRY_NAME,
-                                    PRINCIPAL,
-                                    dummyKeyTab.getAbsolutePath(),
-                                    true,
-                                    false);
-
-        } finally {
-            Configuration.setConfiguration(null);
-        }
-    }
-
-    @Test
-    public void testZooKeeperMultipleContextEntries() throws Exception {
-        List<RemoteConfigurationRegistryConfig> registryConfigs = new ArrayList<>();
-        final String KERBEROS_ENTRY_NAME = "my_kerberos_context";
-        final String KERBEROS_PRINCIPAL  = "myKerberosIdentity";
-        final String DIGEST_ENTRY_NAME   = "my_digest_context";
-        final String DIGEST_PRINCIPAL    = "myDigestIdentity";
-        final String DIGEST_PWD_ALIAS    = "myAlias";
-        final String DIGEST_PWD          = "mysecret";
-
-        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
-        EasyMock.expect(aliasService.getPasswordFromAliasForGateway(DIGEST_PWD_ALIAS)).andReturn(DIGEST_PWD.toCharArray()).anyTimes();
-        EasyMock.replay(aliasService);
-
-        File dummyKeyTab = File.createTempFile("my_context", "keytab");
-        registryConfigs.add(createKerberosConfig(KERBEROS_ENTRY_NAME, KERBEROS_PRINCIPAL, dummyKeyTab.getAbsolutePath()));
-        registryConfigs.add(createDigestConfig(DIGEST_ENTRY_NAME, DIGEST_PRINCIPAL, DIGEST_PWD_ALIAS));
-
-        try {
-            RemoteConfigurationRegistryJAASConfig jaasConfig =
-                                        RemoteConfigurationRegistryJAASConfig.configure(registryConfigs, aliasService);
-
-            // Make sure there are no entries for an invalid context entry name
-            assertNull(jaasConfig.getAppConfigurationEntry("invalid"));
-
-            // Validate the kerberos context entry
-            validateKerberosContext(jaasConfig,
-                                    KERBEROS_ENTRY_NAME,
-                                    KERBEROS_PRINCIPAL,
-                                    dummyKeyTab.getAbsolutePath(),
-                                    true,
-                                    false);
-
-            // Validate the digest context entry
-            validateDigestContext(jaasConfig,
-                                  DIGEST_ENTRY_NAME,
-                                  RemoteConfigurationRegistryJAASConfig.digestLoginModules.get("ZOOKEEPER"),
-                                  DIGEST_PRINCIPAL,
-                                  DIGEST_PWD);
-
-        } finally {
-            Configuration.setConfiguration(null);
-        }
-    }
-
-    @Test
-    public void testZooKeeperDigestContextEntryWithoutAliasService() throws Exception {
-        List<RemoteConfigurationRegistryConfig> registryConfigs = new ArrayList<>();
-        final String ENTRY_NAME       = "my_digest_context";
-        final String DIGEST_PRINCIPAL = "myIdentity";
-        final String DIGEST_PWD_ALIAS = "myAlias";
-
-        registryConfigs.add(createDigestConfig(ENTRY_NAME, DIGEST_PRINCIPAL, DIGEST_PWD_ALIAS));
-
-        try {
-            RemoteConfigurationRegistryJAASConfig jaasConfig =
-                                            RemoteConfigurationRegistryJAASConfig.configure(registryConfigs, null);
-            fail("Expected IllegalArgumentException because the AliasService is not available.");
-        } catch (IllegalArgumentException e) {
-            // Expected
-            assertTrue(e.getMessage().contains("AliasService"));
-        } catch (Throwable e) {
-            fail("Wrong exception encountered: " + e.getClass().getName() + ", " + e.getMessage());
-        } finally {
-            Configuration.setConfiguration(null);
-        }
-    }
-
-    private static RemoteConfigurationRegistryConfig createDigestConfig(String entryName,
-                                                                        String principal,
-                                                                        String credentialAlias) {
-        return createDigestConfig(entryName, principal, credentialAlias, "ZooKeeper");
-    }
-
-    private static RemoteConfigurationRegistryConfig createDigestConfig(String entryName,
-                                                                        String principal,
-                                                                        String credentialAlias,
-                                                                        String registryType) {
-        RemoteConfigurationRegistryConfig rc = EasyMock.createNiceMock(RemoteConfigurationRegistryConfig.class);
-        EasyMock.expect(rc.getRegistryType()).andReturn(registryType).anyTimes();
-        EasyMock.expect(rc.getName()).andReturn(entryName).anyTimes();
-        EasyMock.expect(rc.isSecureRegistry()).andReturn(true).anyTimes();
-        EasyMock.expect(rc.getAuthType()).andReturn("digest").anyTimes();
-        EasyMock.expect(rc.getPrincipal()).andReturn(principal).anyTimes();
-        EasyMock.expect(rc.getCredentialAlias()).andReturn(credentialAlias).anyTimes();
-        EasyMock.replay(rc);
-        return rc;
-    }
-
-
-    private static RemoteConfigurationRegistryConfig createKerberosConfig(String entryName,
-                                                                          String principal,
-                                                                          String keyTabPath) {
-        return createKerberosConfig(entryName, principal, keyTabPath, "ZooKeeper");
-    }
-
-    private static RemoteConfigurationRegistryConfig createKerberosConfig(String entryName,
-                                                                          String principal,
-                                                                          String keyTabPath,
-                                                                          String registryType) {
-        return createKerberosConfig(entryName, principal, keyTabPath, null, null, registryType);
-    }
-
-    private static RemoteConfigurationRegistryConfig createKerberosConfig(String entryName,
-                                                                          String principal,
-                                                                          String keyTabPath,
-                                                                          Boolean useKeyTab,
-                                                                          Boolean useTicketCache,
-                                                                          String registryType) {
-        RemoteConfigurationRegistryConfig rc = EasyMock.createNiceMock(RemoteConfigurationRegistryConfig.class);
-        EasyMock.expect(rc.getRegistryType()).andReturn(registryType).anyTimes();
-        EasyMock.expect(rc.getName()).andReturn(entryName).anyTimes();
-        EasyMock.expect(rc.isSecureRegistry()).andReturn(true).anyTimes();
-        EasyMock.expect(rc.getAuthType()).andReturn("kerberos").anyTimes();
-        EasyMock.expect(rc.getPrincipal()).andReturn(principal).anyTimes();
-        EasyMock.expect(rc.getKeytab()).andReturn(keyTabPath).anyTimes();
-        EasyMock.expect(rc.isUseKeyTab()).andReturn(useKeyTab != null ? useKeyTab : true).anyTimes();
-        EasyMock.expect(rc.isUseTicketCache()).andReturn(useTicketCache != null ? useTicketCache : false).anyTimes();
-        EasyMock.replay(rc);
-        return rc;
-    }
-
-    private static void validateDigestContext(RemoteConfigurationRegistryJAASConfig config,
-                                              String                                entryName,
-                                              String                                loginModule,
-                                              String                                principal,
-                                              String                                password) throws Exception {
-        AppConfigurationEntry[] myContextEntries = config.getAppConfigurationEntry(entryName);
-        assertNotNull(myContextEntries);
-        assertEquals(1, myContextEntries.length);
-        AppConfigurationEntry entry = myContextEntries[0];
-        assertTrue(entry.getLoginModuleName().equals(loginModule));
-        Map<String, ?> entryOpts = entry.getOptions();
-        assertEquals(principal, entryOpts.get("username"));
-        assertEquals(password, entryOpts.get("password"));
-    }
-
-    private static void validateKerberosContext(RemoteConfigurationRegistryJAASConfig config,
-                                                String                                entryName,
-                                                String                                principal,
-                                                String                                keyTab,
-                                                boolean                               useKeyTab,
-                                                boolean                               useTicketCache) throws Exception {
-        AppConfigurationEntry[] myContextEntries = config.getAppConfigurationEntry(entryName);
-        assertNotNull(myContextEntries);
-        assertEquals(1, myContextEntries.length);
-        AppConfigurationEntry entry = myContextEntries[0];
-        assertTrue(entry.getLoginModuleName().endsWith(".security.auth.module.Krb5LoginModule"));
-        Map<String, ?> entryOpts = entry.getOptions();
-        assertEquals(principal, entryOpts.get("principal"));
-        assertEquals(keyTab, entryOpts.get("keyTab"));
-        assertEquals(useKeyTab, Boolean.valueOf((String)entryOpts.get("isUseKeyTab")));
-        assertEquals(useTicketCache, Boolean.valueOf((String)entryOpts.get("isUseTicketCache")));
-    }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/test/java/org/apache/knox/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistriesTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/test/java/org/apache/knox/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistriesTest.java b/gateway-service-remoteconfig/src/test/java/org/apache/knox/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistriesTest.java
new file mode 100644
index 0000000..ce223e9
--- /dev/null
+++ b/gateway-service-remoteconfig/src/test/java/org/apache/knox/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistriesTest.java
@@ -0,0 +1,184 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.config.remote.config;
+
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import static org.junit.Assert.assertEquals;
+import static org.testng.Assert.assertNotNull;
+
+public class DefaultRemoteConfigurationRegistriesTest {
+
+    /**
+     * Test a single registry configuration with digest auth configuration.
+     */
+    @Test
+    public void testPropertiesRemoteConfigurationRegistriesSingleDigest() throws Exception {
+        Map<String, Properties> testProperties = new HashMap<>();
+        Properties p = new Properties();
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE, "ZooKeeper");
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS, "hostx:2181");
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL, "zkDigestUser");
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE, "digest");
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS, "zkDigestAlias");
+        testProperties.put("testDigest", p);
+
+        doTestPropertiesRemoteConfigurationRegistries(testProperties);
+    }
+
+
+    /**
+     * Test a single registry configuration with kerberos auth configuration.
+     */
+    @Test
+    public void testPropertiesRemoteConfigurationRegistriesSingleKerberos() throws Exception {
+        Map<String, Properties> testProperties = new HashMap<>();
+        Properties p = new Properties();
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE, "ZooKeeper");
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS, "hostx:2181");
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL, "zkUser");
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE, "kerberos");
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_KEYTAB, "/home/user/remoteregistry.keytab");
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_KEYTAB, "true");
+        p.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_TICKET_CACHE, "false");
+        testProperties.put("testKerb", p);
+
+        doTestPropertiesRemoteConfigurationRegistries(testProperties);
+    }
+
+    /**
+     * Test multiple registry configuration with varying auth configurations.
+     */
+    @Test
+    public void testPropertiesRemoteConfigurationRegistriesMultipleMixed() throws Exception {
+        Map<String, Properties> testProperties = new HashMap<>();
+
+        Properties kerb = new Properties();
+        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE, "ZooKeeper");
+        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS, "host1:2181");
+        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_NAMESPACE, "/knox/config");
+        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL, "kerbPrincipal");
+        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE, "kerberos");
+        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_KEYTAB, "/home/user/mykrb.keytab");
+        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_KEYTAB, "true");
+        kerb.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_TICKET_CACHE, "false");
+        testProperties.put("testKerb1", kerb);
+
+        Properties digest = new Properties();
+        digest.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE, "ZooKeeper");
+        digest.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS, "host2:2181");
+        digest.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL, "digestPrincipal");
+        digest.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE, "digest");
+        digest.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS, "digestPwdAlias");
+        testProperties.put("testDigest1", digest);
+
+        Properties unsecured = new Properties();
+        unsecured.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE, "ZooKeeper");
+        unsecured.setProperty(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS, "host2:2181");
+        testProperties.put("testUnsecured", unsecured);
+
+        doTestPropertiesRemoteConfigurationRegistries(testProperties);
+    }
+
+
+    /**
+     * Perform the actual test.
+     *
+     * @param testProperties The test properties
+     */
+    private void doTestPropertiesRemoteConfigurationRegistries(Map<String, Properties> testProperties) throws Exception {
+        // Mock gateway config
+        GatewayConfig gc = mockGatewayConfig(testProperties);
+
+        // Create the RemoteConfigurationRegistries object to be tested from the GatewayConfig
+        RemoteConfigurationRegistries registries = new DefaultRemoteConfigurationRegistries(gc);
+
+        // Basic validation
+        assertNotNull(registries);
+        List<RemoteConfigurationRegistry> registryConfigs = registries.getRegistryConfigurations();
+        assertNotNull(registryConfigs);
+        assertEquals(testProperties.size(), registryConfigs.size());
+
+        // Validate the contents of the created object
+        for (RemoteConfigurationRegistry regConfig : registryConfigs) {
+            validateRemoteRegistryConfig(regConfig.getName(), testProperties.get(regConfig.getName()), regConfig);
+        }
+    }
+
+
+    /**
+     * Create a mock GatewayConfig based on the specified test properties.
+     *
+     * @param testProperties The test properties to set on the config
+     */
+    private GatewayConfig mockGatewayConfig(Map<String, Properties> testProperties) {
+        // Mock gateway config
+        GatewayConfig gc = EasyMock.createNiceMock(GatewayConfig.class);
+        List<String> configNames = new ArrayList<>();
+        for (String registryName : testProperties.keySet()) {
+            configNames.add(registryName);
+
+            String propertyValueString = "";
+            Properties props = testProperties.get(registryName);
+            Enumeration names = props.propertyNames();
+            while (names.hasMoreElements()) {
+                String propertyName = (String) names.nextElement();
+                propertyValueString += propertyName + "=" + props.get(propertyName);
+                if (names.hasMoreElements()) {
+                    propertyValueString += ";";
+                }
+            }
+            EasyMock.expect(gc.getRemoteRegistryConfiguration(registryName))
+                    .andReturn(propertyValueString)
+                    .anyTimes();
+        }
+        EasyMock.expect(gc.getRemoteRegistryConfigurationNames()).andReturn(configNames).anyTimes();
+        EasyMock.replay(gc);
+
+        return gc;
+    }
+
+
+    /**
+     * Validate the specified RemoteConfigurationRegistry based on the expected test properties.
+     */
+    private void validateRemoteRegistryConfig(String                      configName,
+                                              Properties                  expected,
+                                              RemoteConfigurationRegistry registryConfig) throws Exception {
+        assertEquals(configName, registryConfig.getName());
+        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE), registryConfig.getRegistryType());
+        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS), registryConfig.getConnectionString());
+        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_NAMESPACE), registryConfig.getNamespace());
+        assertEquals(registryConfig.isSecureRegistry(), expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE) != null);
+        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE), registryConfig.getAuthType());
+        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL), registryConfig.getPrincipal());
+        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS), registryConfig.getCredentialAlias());
+        assertEquals(expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_KEYTAB), registryConfig.getKeytab());
+        assertEquals(Boolean.valueOf((String)expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_KEYTAB)), registryConfig.isUseKeyTab());
+        assertEquals(Boolean.valueOf((String)expected.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_TICKET_CACHE)), registryConfig.isUseTicketCache());
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/test/java/org/apache/knox/gateway/service/config/remote/config/RemoteConfigurationRegistryConfigParserTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/test/java/org/apache/knox/gateway/service/config/remote/config/RemoteConfigurationRegistryConfigParserTest.java b/gateway-service-remoteconfig/src/test/java/org/apache/knox/gateway/service/config/remote/config/RemoteConfigurationRegistryConfigParserTest.java
new file mode 100644
index 0000000..1ff5dec
--- /dev/null
+++ b/gateway-service-remoteconfig/src/test/java/org/apache/knox/gateway/service/config/remote/config/RemoteConfigurationRegistryConfigParserTest.java
@@ -0,0 +1,115 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.config.remote.config;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.knox.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
+import org.apache.knox.gateway.service.config.remote.util.RemoteRegistryConfigTestUtils;
+import org.junit.Test;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import static org.apache.knox.gateway.service.config.remote.util.RemoteRegistryConfigTestUtils.*;
+
+public class RemoteConfigurationRegistryConfigParserTest {
+
+    @Test
+    public void testExternalXMLParsing() throws Exception {
+        final String CONN_STR = "http://my.zookeeper.host:2181";
+
+        Map<String, Map<String, String>> testRegistryConfigurations = new HashMap<>();
+
+        Map<String, String> config1 = new HashMap<>();
+        config1.put(RemoteRegistryConfigTestUtils.PROPERTY_TYPE, "ZooKeeper");
+        config1.put(RemoteRegistryConfigTestUtils.PROPERTY_NAME, "registry1");
+        config1.put(RemoteRegistryConfigTestUtils.PROPERTY_ADDRESS, CONN_STR);
+        config1.put(RemoteRegistryConfigTestUtils.PROPERTY_SECURE, "true");
+        config1.put(RemoteRegistryConfigTestUtils.PROPERTY_AUTH_TYPE, "Digest");
+        config1.put(RemoteRegistryConfigTestUtils.PROPERTY_PRINCIPAL, "knox");
+        config1.put(RemoteRegistryConfigTestUtils.PROPERTY_CRED_ALIAS, "zkCredential");
+        testRegistryConfigurations.put(config1.get("name"), config1);
+
+        Map<String, String> config2 = new HashMap<>();
+        config2.put(RemoteRegistryConfigTestUtils.PROPERTY_TYPE, "ZooKeeper");
+        config2.put(RemoteRegistryConfigTestUtils.PROPERTY_NAME, "MyKerberos");
+        config2.put(RemoteRegistryConfigTestUtils.PROPERTY_ADDRESS, CONN_STR);
+        config2.put(RemoteRegistryConfigTestUtils.PROPERTY_SECURE, "true");
+        config2.put(RemoteRegistryConfigTestUtils.PROPERTY_AUTH_TYPE, "Kerberos");
+        config2.put(RemoteRegistryConfigTestUtils.PROPERTY_PRINCIPAL, "knox");
+        File myKeyTab = File.createTempFile("mytest", "keytab");
+        config2.put(RemoteRegistryConfigTestUtils.PROPERTY_KEYTAB, myKeyTab.getAbsolutePath());
+        config2.put(RemoteRegistryConfigTestUtils.PROPERTY_USE_KEYTAB, "false");
+        config2.put(RemoteRegistryConfigTestUtils.PROPERTY_USE_TICKET_CACHE, "true");
+        testRegistryConfigurations.put(config2.get("name"), config2);
+
+        Map<String, String> config3 = new HashMap<>();
+        config3.put(RemoteRegistryConfigTestUtils.PROPERTY_TYPE, "ZooKeeper");
+        config3.put(RemoteRegistryConfigTestUtils.PROPERTY_NAME, "anotherRegistry");
+        config3.put(RemoteRegistryConfigTestUtils.PROPERTY_ADDRESS, "whatever:1281");
+        testRegistryConfigurations.put(config3.get("name"), config3);
+
+        String configXML =
+                    RemoteRegistryConfigTestUtils.createRemoteConfigRegistriesXML(testRegistryConfigurations.values());
+
+        File registryConfigFile = File.createTempFile("remote-registries", "xml");
+        try {
+            FileUtils.writeStringToFile(registryConfigFile, configXML);
+
+            List<RemoteConfigurationRegistryConfig> configs =
+                                    RemoteConfigurationRegistriesParser.getConfig(registryConfigFile.getAbsolutePath());
+            assertNotNull(configs);
+            assertEquals(testRegistryConfigurations.keySet().size(), configs.size());
+
+            for (RemoteConfigurationRegistryConfig registryConfig : configs) {
+                Map<String, String> expected = testRegistryConfigurations.get(registryConfig.getName());
+                assertNotNull(expected);
+                validateParsedRegistryConfiguration(registryConfig, expected);
+            }
+        } finally {
+            registryConfigFile.delete();
+        }
+    }
+
+    private void validateParsedRegistryConfiguration(RemoteConfigurationRegistryConfig config,
+                                                     Map<String, String> expected) throws Exception {
+        assertEquals(expected.get(RemoteRegistryConfigTestUtils.PROPERTY_TYPE), config.getRegistryType());
+        assertEquals(expected.get(RemoteRegistryConfigTestUtils.PROPERTY_ADDRESS), config.getConnectionString());
+        assertEquals(expected.get(RemoteRegistryConfigTestUtils.PROPERTY_NAME), config.getName());
+        assertEquals(expected.get(
+            RemoteRegistryConfigTestUtils.PROPERTY_NAMESAPCE), config.getNamespace());
+        assertEquals(Boolean.valueOf(expected.get(
+            RemoteRegistryConfigTestUtils.PROPERTY_SECURE)), config.isSecureRegistry());
+        assertEquals(expected.get(
+            RemoteRegistryConfigTestUtils.PROPERTY_AUTH_TYPE), config.getAuthType());
+        assertEquals(expected.get(
+            RemoteRegistryConfigTestUtils.PROPERTY_PRINCIPAL), config.getPrincipal());
+        assertEquals(expected.get(
+            RemoteRegistryConfigTestUtils.PROPERTY_CRED_ALIAS), config.getCredentialAlias());
+        assertEquals(expected.get(RemoteRegistryConfigTestUtils.PROPERTY_KEYTAB), config.getKeytab());
+        assertEquals(Boolean.valueOf(expected.get(
+            RemoteRegistryConfigTestUtils.PROPERTY_USE_KEYTAB)), config.isUseKeyTab());
+        assertEquals(Boolean.valueOf(expected.get(
+            RemoteRegistryConfigTestUtils.PROPERTY_USE_TICKET_CACHE)), config.isUseTicketCache());
+    }
+
+}


[12/49] knox git commit: KNOX-1128 - Readonly protection for generated topologies in Knox Admin UI

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-applications/src/main/resources/applications/admin-ui/app/styles.b2328beb0372c051d06d.bundle.js
----------------------------------------------------------------------
diff --git a/gateway-applications/src/main/resources/applications/admin-ui/app/styles.b2328beb0372c051d06d.bundle.js b/gateway-applications/src/main/resources/applications/admin-ui/app/styles.b2328beb0372c051d06d.bundle.js
deleted file mode 100644
index b94948e..0000000
--- a/gateway-applications/src/main/resources/applications/admin-ui/app/styles.b2328beb0372c051d06d.bundle.js
+++ /dev/null
@@ -1,2 +0,0 @@
-webpackJsonp([1,3],{346:function(n,o){},630:function(n,o,c){n.exports=c(346)}},[630]);
-//# sourceMappingURL=styles.b2328beb0372c051d06d.bundle.map
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-applications/src/main/resources/applications/admin-ui/app/styles.b2328beb0372c051d06d.bundle.map
----------------------------------------------------------------------
diff --git a/gateway-applications/src/main/resources/applications/admin-ui/app/styles.b2328beb0372c051d06d.bundle.map b/gateway-applications/src/main/resources/applications/admin-ui/app/styles.b2328beb0372c051d06d.bundle.map
deleted file mode 100644
index a982c00..0000000
--- a/gateway-applications/src/main/resources/applications/admin-ui/app/styles.b2328beb0372c051d06d.bundle.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"sources":[],"names":[],"mappings":"","file":"styles.d41d8cd98f00b204e9800998ecf8427e.bundle.css","sourceRoot":""}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-applications/src/main/resources/applications/admin-ui/app/styles.d41d8cd98f00b204e980.bundle.css
----------------------------------------------------------------------
diff --git a/gateway-applications/src/main/resources/applications/admin-ui/app/styles.d41d8cd98f00b204e980.bundle.css b/gateway-applications/src/main/resources/applications/admin-ui/app/styles.d41d8cd98f00b204e980.bundle.css
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-applications/src/main/resources/applications/admin-ui/app/styles.d41d8cd98f00b204e9800998ecf8427e.bundle.css
----------------------------------------------------------------------
diff --git a/gateway-applications/src/main/resources/applications/admin-ui/app/styles.d41d8cd98f00b204e9800998ecf8427e.bundle.css b/gateway-applications/src/main/resources/applications/admin-ui/app/styles.d41d8cd98f00b204e9800998ecf8427e.bundle.css
deleted file mode 100644
index 9ab459c..0000000
--- a/gateway-applications/src/main/resources/applications/admin-ui/app/styles.d41d8cd98f00b204e9800998ecf8427e.bundle.css
+++ /dev/null
@@ -1,2 +0,0 @@
-
-/*# sourceMappingURL=styles.b2328beb0372c051d06d.bundle.map*/
\ No newline at end of file


[10/49] knox git commit: KNOX-1128 - Readonly protection for generated topologies in Knox Admin UI

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-applications/src/main/resources/applications/admin-ui/app/vendor.48771018d3da89d3269f.bundle.js.gz
----------------------------------------------------------------------
diff --git a/gateway-applications/src/main/resources/applications/admin-ui/app/vendor.48771018d3da89d3269f.bundle.js.gz b/gateway-applications/src/main/resources/applications/admin-ui/app/vendor.48771018d3da89d3269f.bundle.js.gz
deleted file mode 100644
index 5886a33..0000000
Binary files a/gateway-applications/src/main/resources/applications/admin-ui/app/vendor.48771018d3da89d3269f.bundle.js.gz and /dev/null differ


[22/49] knox git commit: KNOX-1124 - Service Discovery Doesn't Work For Simple Descriptors With .yaml Extension (Phil Zampino via Sandeep More)

Posted by mo...@apache.org.
KNOX-1124 - Service Discovery Doesn't Work For Simple Descriptors With .yaml Extension (Phil Zampino  via Sandeep More)


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/a09e751d
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/a09e751d
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/a09e751d

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: a09e751d8e9d3a5f8a8989dec11f96d517bc16b5
Parents: 7b21146
Author: Sandeep More <mo...@apache.org>
Authored: Fri Dec 1 13:43:10 2017 -0500
Committer: Sandeep More <mo...@apache.org>
Committed: Fri Dec 1 13:43:10 2017 -0500

----------------------------------------------------------------------
 .../topology/simple/SimpleDescriptorFactory.java       |  2 +-
 .../topology/simple/SimpleDescriptorFactoryTest.java   | 13 +++++++++++--
 2 files changed, 12 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/a09e751d/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java
index 3df6d2f..2192eef 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java
@@ -39,7 +39,7 @@ public class SimpleDescriptorFactory {
 
         if (path.endsWith(".json")) {
             sd = parseJSON(path);
-        } else if (path.endsWith(".yml")) {
+        } else if (path.endsWith(".yml") || path.endsWith(".yaml")) {
             sd = parseYAML(path);
         } else {
            throw new IllegalArgumentException("Unsupported simple descriptor format: " + path.substring(path.lastIndexOf('.')));

http://git-wip-us.apache.org/repos/asf/knox/blob/a09e751d/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java
index 129ff7c..7c42542 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java
@@ -29,7 +29,8 @@ public class SimpleDescriptorFactoryTest {
 
     private enum FileType {
         JSON,
-        YAML
+        YAML,
+        YML
     }
 
     @Test
@@ -39,6 +40,7 @@ public class SimpleDescriptorFactoryTest {
 
     @Test
     public void testParseYAMLSimpleDescriptor() throws Exception {
+        testParseSimpleDescriptor(FileType.YML);
         testParseSimpleDescriptor(FileType.YAML);
     }
 
@@ -49,6 +51,7 @@ public class SimpleDescriptorFactoryTest {
 
     @Test
     public void testParseYAMLSimpleDescriptorWithServiceParams() throws Exception {
+        testParseSimpleDescriptorWithServiceParams(FileType.YML);
         testParseSimpleDescriptorWithServiceParams(FileType.YAML);
     }
 
@@ -59,6 +62,7 @@ public class SimpleDescriptorFactoryTest {
 
     @Test
     public void testParseYAMLSimpleDescriptorApplications() throws Exception {
+        testParseSimpleDescriptorWithApplications(FileType.YML);
         testParseSimpleDescriptorWithApplications(FileType.YAML);
     }
 
@@ -70,6 +74,7 @@ public class SimpleDescriptorFactoryTest {
 
     @Test
     public void testParseYAMLSimpleDescriptorWithServicesAndApplications() throws Exception {
+        testParseSimpleDescriptorWithServicesAndApplications(FileType.YML);
         testParseSimpleDescriptorWithServicesAndApplications(FileType.YAML);
     }
 
@@ -327,9 +332,12 @@ public class SimpleDescriptorFactoryTest {
             case JSON:
                 extension = "json";
                 break;
-            case YAML:
+            case YML:
                 extension = "yml";
                 break;
+            case YAML:
+                extension = "yaml";
+                break;
         }
         return extension;
     }
@@ -402,6 +410,7 @@ public class SimpleDescriptorFactoryTest {
                                    appParams);
                 break;
             case YAML:
+            case YML:
                 result = writeYAML(path,
                                    discoveryType,
                                    discoveryAddress,


[39/49] knox git commit: Merge branch 'master' into KNOX-998-Package_Restructuring

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-server/src/main/java/org/apache/knox/gateway/config/impl/GatewayConfigImpl.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/config/impl/GatewayConfigImpl.java
index c7b8df5,0000000..bc4fc31
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/config/impl/GatewayConfigImpl.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/config/impl/GatewayConfigImpl.java
@@@ -1,926 -1,0 +1,987 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.config.impl;
 +
 +import org.apache.commons.io.FilenameUtils;
 +import org.apache.commons.lang3.StringUtils;
 +import org.apache.hadoop.conf.Configuration;
 +import org.apache.hadoop.fs.Path;
 +import org.apache.knox.gateway.GatewayMessages;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.i18n.messages.MessagesFactory;
 +import org.joda.time.Period;
 +import org.joda.time.format.PeriodFormatter;
 +import org.joda.time.format.PeriodFormatterBuilder;
 +
 +import java.io.File;
 +import java.net.InetSocketAddress;
 +import java.net.MalformedURLException;
 +import java.net.URL;
 +import java.net.UnknownHostException;
 +import java.util.ArrayList;
 +import java.util.Arrays;
 +import java.util.Collections;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.concurrent.ConcurrentHashMap;
++import java.util.concurrent.TimeUnit;
 +
 +/**
 + * The configuration for the Gateway.
 + *
 + * The Gateway configuration variables are described in gateway-default.xml
 + *
 + * The Gateway specific configuration is split into two layers:
 + *
 + * 1. gateway-default.xml - All the configuration variables that the
 + *    Gateway needs.  These are the defaults that ship with the app
 + *    and should only be changed by the app developers.
 + *
 + * 2. gateway-site.xml - The (possibly empty) configuration that the
 + *    system administrator can set variables for their Hadoop cluster.
 + *
 + * To find the gateway configuration files the following process is used.
 + * First, if the GATEWAY_HOME system property contains a valid directory name,
 + * an attempt will be made to read the configuration files from that directory.
 + * Second, if the GATEWAY_HOME environment variable contains a valid directory name,
 + * an attempt will be made to read the configuration files from that directory.
 + * Third, an attempt will be made to load the configuration files from the directory
 + * specified via the "user.dir" system property.
 + * Fourth, an attempt will be made to load the configuration files from the classpath.
 + * Last, defaults will be used for all values will be used.
 + *
 + * If GATEWAY_HOME isn't set via either the system property or environment variable then
 + * a value for this will be defaulted.  The default selected will be the directory that
 + * contained the last loaded configuration file that was not contained in a JAR.  If
 + * no such configuration file is loaded the value of the "user.dir" system property will be used
 + * as the value of GATEWAY_HOME.  This is important to consider for any relative file names as they
 + * will be resolved relative to the value of GATEWAY_HOME.  One such relative value is the
 + * name of the directory containing cluster topologies.  This value default to "clusters".
 + */
 +public class GatewayConfigImpl extends Configuration implements GatewayConfig {
 +
 +  private static final String GATEWAY_DEFAULT_TOPOLOGY_NAME_PARAM = "default.app.topology.name";
 +  private static final String GATEWAY_DEFAULT_TOPOLOGY_NAME = null;
 +
 +  private static final GatewayMessages log = MessagesFactory.get( GatewayMessages.class );
 +
 +  private static final String GATEWAY_CONFIG_DIR_PREFIX = "conf";
 +
 +  private static final String GATEWAY_CONFIG_FILE_PREFIX = "gateway";
 +
 +  private static final String DEFAULT_STACKS_SERVICES_DIR = "services";
 +
 +  private static final String DEFAULT_APPLICATIONS_DIR = "applications";
 +
 +  public static final String[] GATEWAY_CONFIG_FILENAMES = {
 +      GATEWAY_CONFIG_DIR_PREFIX + "/" + GATEWAY_CONFIG_FILE_PREFIX + "-default.xml",
 +      GATEWAY_CONFIG_DIR_PREFIX + "/" + GATEWAY_CONFIG_FILE_PREFIX + "-site.xml"
 +  };
 +
 +//  private static final String[] HADOOP_CONF_FILENAMES = {
 +//      "core-default.xml",
 +//      "core-site.xml"
 +////      "hdfs-default.xml",
 +////      "hdfs-site.xml",
 +////      "mapred-default.xml",
 +////      "mapred-site.xml"
 +//  };
 +
 +//  private static final String[] HADOOP_PREFIX_VARS = {
 +//      "HADOOP_PREFIX",
 +//      "HADOOP_HOME"
 +//  };
 +
 +  public static final String HTTP_HOST = GATEWAY_CONFIG_FILE_PREFIX + ".host";
 +  public static final String HTTP_PORT = GATEWAY_CONFIG_FILE_PREFIX + ".port";
 +  public static final String HTTP_PATH = GATEWAY_CONFIG_FILE_PREFIX + ".path";
 +  public static final String DEPLOYMENT_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".deployment.dir";
 +  public static final String SECURITY_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".security.dir";
 +  public static final String DATA_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".data.dir";
 +  public static final String STACKS_SERVICES_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".services.dir";
 +  public static final String GLOBAL_RULES_SERVICES = GATEWAY_CONFIG_FILE_PREFIX + ".global.rules.services";
 +  public static final String APPLICATIONS_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".applications.dir";
 +  public static final String HADOOP_CONF_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".hadoop.conf.dir";
 +  public static final String FRONTEND_URL = GATEWAY_CONFIG_FILE_PREFIX + ".frontend.url";
 +  private static final String TRUST_ALL_CERTS = GATEWAY_CONFIG_FILE_PREFIX + ".trust.all.certs";
 +  private static final String CLIENT_AUTH_NEEDED = GATEWAY_CONFIG_FILE_PREFIX + ".client.auth.needed";
 +  private static final String CLIENT_AUTH_WANTED = GATEWAY_CONFIG_FILE_PREFIX + ".client.auth.wanted";
 +  private static final String TRUSTSTORE_PATH = GATEWAY_CONFIG_FILE_PREFIX + ".truststore.path";
 +  private static final String TRUSTSTORE_TYPE = GATEWAY_CONFIG_FILE_PREFIX + ".truststore.type";
 +  private static final String KEYSTORE_TYPE = GATEWAY_CONFIG_FILE_PREFIX + ".keystore.type";
 +  private static final String XFORWARDED_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".xforwarded.enabled";
 +  private static final String EPHEMERAL_DH_KEY_SIZE = GATEWAY_CONFIG_FILE_PREFIX + ".jdk.tls.ephemeralDHKeySize";
 +  private static final String HTTP_CLIENT_MAX_CONNECTION = GATEWAY_CONFIG_FILE_PREFIX + ".httpclient.maxConnections";
 +  private static final String HTTP_CLIENT_CONNECTION_TIMEOUT = GATEWAY_CONFIG_FILE_PREFIX + ".httpclient.connectionTimeout";
 +  private static final String HTTP_CLIENT_SOCKET_TIMEOUT = GATEWAY_CONFIG_FILE_PREFIX + ".httpclient.socketTimeout";
 +  private static final String THREAD_POOL_MAX = GATEWAY_CONFIG_FILE_PREFIX + ".threadpool.max";
 +  public static final String HTTP_SERVER_REQUEST_BUFFER = GATEWAY_CONFIG_FILE_PREFIX + ".httpserver.requestBuffer";
 +  public static final String HTTP_SERVER_REQUEST_HEADER_BUFFER = GATEWAY_CONFIG_FILE_PREFIX + ".httpserver.requestHeaderBuffer";
 +  public static final String HTTP_SERVER_RESPONSE_BUFFER = GATEWAY_CONFIG_FILE_PREFIX + ".httpserver.responseBuffer";
 +  public static final String HTTP_SERVER_RESPONSE_HEADER_BUFFER = GATEWAY_CONFIG_FILE_PREFIX + ".httpserver.responseHeaderBuffer";
 +  public static final String DEPLOYMENTS_BACKUP_VERSION_LIMIT = GATEWAY_CONFIG_FILE_PREFIX + ".deployment.backup.versionLimit";
 +  public static final String DEPLOYMENTS_BACKUP_AGE_LIMIT = GATEWAY_CONFIG_FILE_PREFIX + ".deployment.backup.ageLimit";
 +  public static final String METRICS_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".metrics.enabled";
 +  public static final String JMX_METRICS_REPORTING_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".jmx.metrics.reporting.enabled";
 +  public static final String GRAPHITE_METRICS_REPORTING_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".graphite.metrics.reporting.enabled";
 +  public static final String GRAPHITE_METRICS_REPORTING_HOST = GATEWAY_CONFIG_FILE_PREFIX + ".graphite.metrics.reporting.host";
 +  public static final String GRAPHITE_METRICS_REPORTING_PORT = GATEWAY_CONFIG_FILE_PREFIX + ".graphite.metrics.reporting.port";
 +  public static final String GRAPHITE_METRICS_REPORTING_FREQUENCY = GATEWAY_CONFIG_FILE_PREFIX + ".graphite.metrics.reporting.frequency";
 +  public static final String GATEWAY_IDLE_TIMEOUT = GATEWAY_CONFIG_FILE_PREFIX + ".idle.timeout";
 +  public static final String REMOTE_IP_HEADER_NAME = GATEWAY_CONFIG_FILE_PREFIX + ".remote.ip.header.name";
 +
 +  /* @since 0.10 Websocket config variables */
 +  public static final String WEBSOCKET_FEATURE_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.feature.enabled";
 +  public static final String WEBSOCKET_MAX_TEXT_MESSAGE_SIZE = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.max.text.size";
 +  public static final String WEBSOCKET_MAX_BINARY_MESSAGE_SIZE = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.max.binary.size";
 +  public static final String WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.max.text.buffer.size";
 +  public static final String WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.max.binary.buffer.size";
 +  public static final String WEBSOCKET_INPUT_BUFFER_SIZE = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.input.buffer.size";
 +  public static final String WEBSOCKET_ASYNC_WRITE_TIMEOUT = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.async.write.timeout";
 +  public static final String WEBSOCKET_IDLE_TIMEOUT = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.idle.timeout";
 +
 +  /**
 +   * Properties for for gateway port mapping feature
 +   */
 +  public static final String GATEWAY_PORT_MAPPING_PREFIX = GATEWAY_CONFIG_FILE_PREFIX + ".port.mapping.";
 +  public static final String GATEWAY_PORT_MAPPING_REGEX = GATEWAY_CONFIG_FILE_PREFIX + "\\.port\\.mapping\\..*";
 +  public static final String GATEWAY_PORT_MAPPING_ENABLED = GATEWAY_PORT_MAPPING_PREFIX + "enabled";
 +
 +  /**
 +   * Comma seperated list of MIME Types to be compressed by Knox on the way out.
 +   *
 +   * @since 0.12
 +   */
 +  public static final String MIME_TYPES_TO_COMPRESS = GATEWAY_CONFIG_FILE_PREFIX
 +      + ".gzip.compress.mime.types";
 +
++  public static final String CLUSTER_CONFIG_MONITOR_PREFIX = GATEWAY_CONFIG_FILE_PREFIX + ".cluster.config.monitor.";
++  public static final String CLUSTER_CONFIG_MONITOR_INTERVAL_SUFFIX = ".interval";
++  public static final String CLUSTER_CONFIG_MONITOR_ENABLED_SUFFIX = ".enabled";
++
++
 +  // These config property names are not inline with the convention of using the
 +  // GATEWAY_CONFIG_FILE_PREFIX as is done by those above. These are left for
 +  // backward compatibility. 
 +  // LET'S NOT CONTINUE THIS PATTERN BUT LEAVE THEM FOR NOW.
 +  private static final String SSL_ENABLED = "ssl.enabled";
 +  private static final String SSL_EXCLUDE_PROTOCOLS = "ssl.exclude.protocols";
 +  private static final String SSL_INCLUDE_CIPHERS = "ssl.include.ciphers";
 +  private static final String SSL_EXCLUDE_CIPHERS = "ssl.exclude.ciphers";
 +  // END BACKWARD COMPATIBLE BLOCK
 +  
 +  public static final String DEFAULT_HTTP_PORT = "8888";
 +  public static final String DEFAULT_HTTP_PATH = "gateway";
 +  public static final String DEFAULT_DEPLOYMENT_DIR = "deployments";
 +  public static final String DEFAULT_SECURITY_DIR = "security";
 +  public static final String DEFAULT_DATA_DIR = "data";
++  private static final String PROVIDERCONFIG_DIR_NAME = "shared-providers";
++  private static final String DESCRIPTORS_DIR_NAME = "descriptors";
 +
 +  /* Websocket defaults */
 +  public static final boolean DEFAULT_WEBSOCKET_FEATURE_ENABLED = false;
 +  public static final int DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE = Integer.MAX_VALUE;;
 +  public static final int DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_SIZE = Integer.MAX_VALUE;;
 +  public static final int DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE = 32768;
 +  public static final int DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE = 32768;
 +  public static final int DEFAULT_WEBSOCKET_INPUT_BUFFER_SIZE = 4096;
 +  public static final int DEFAULT_WEBSOCKET_ASYNC_WRITE_TIMEOUT = 60000;
 +  public static final int DEFAULT_WEBSOCKET_IDLE_TIMEOUT = 300000;
 +
 +  public static final boolean DEFAULT_GATEWAY_PORT_MAPPING_ENABLED = true;
 +
 +  /**
 +   * Default list of MIME Type to be compressed.
 +   * @since 0.12
 +   */
 +  public static final String DEFAULT_MIME_TYPES_TO_COMPRESS = "text/html, text/plain, text/xml, text/css, "
 +      + "application/javascript, application/x-javascript, text/javascript";
 +
 +  public static final String COOKIE_SCOPING_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".scope.cookies.feature.enabled";
 +  public static final boolean DEFAULT_COOKIE_SCOPING_FEATURE_ENABLED = false;
 +  private static final String CRYPTO_ALGORITHM = GATEWAY_CONFIG_FILE_PREFIX + ".crypto.algorithm";
 +  private static final String CRYPTO_PBE_ALGORITHM = GATEWAY_CONFIG_FILE_PREFIX + ".crypto.pbe.algorithm";
 +  private static final String CRYPTO_TRANSFORMATION = GATEWAY_CONFIG_FILE_PREFIX + ".crypto.transformation";
 +  private static final String CRYPTO_SALTSIZE = GATEWAY_CONFIG_FILE_PREFIX + ".crypto.salt.size";
 +  private static final String CRYPTO_ITERATION_COUNT = GATEWAY_CONFIG_FILE_PREFIX + ".crypto.iteration.count";
 +  private static final String CRYPTO_KEY_LENGTH = GATEWAY_CONFIG_FILE_PREFIX + ".crypto.key.length";
 +  public static final String SERVER_HEADER_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".server.header.enabled";
 +
++  /* @since 0.15 Remote configuration monitoring */
++  static final String CONFIG_REGISTRY_PREFIX = GATEWAY_CONFIG_FILE_PREFIX + ".remote.config.registry";
++  static final String REMOTE_CONFIG_MONITOR_CLIENT_NAME = GATEWAY_CONFIG_FILE_PREFIX + ".remote.config.monitor.client";
++
 +  private static List<String> DEFAULT_GLOBAL_RULES_SERVICES;
 +
 +
 +  public GatewayConfigImpl() {
 +    init();
 +  }
 +
 +  private String getVar( String variableName, String defaultValue ) {
 +    String value = get( variableName );
 +    if( value == null ) {
 +      value = System.getProperty( variableName );
 +    }
 +    if( value == null ) {
 +      value = System.getenv( variableName );
 +    }
 +    if( value == null ) {
 +      value = defaultValue;
 +    }
 +    return value;
 +  }
 +
 +  private String getGatewayHomeDir() {
 +    String home = get(
 +        GATEWAY_HOME_VAR,
 +        System.getProperty(
 +            GATEWAY_HOME_VAR,
 +            System.getenv( GATEWAY_HOME_VAR ) ) );
 +    return home;
 +  }
 +
 +  private void setGatewayHomeDir( String dir ) {
 +    set( GATEWAY_HOME_VAR, dir );
 +  }
 +
 +  @Override
 +  public String getGatewayConfDir() {
 +    String value = getVar( GATEWAY_CONF_HOME_VAR, getGatewayHomeDir() + File.separator + "conf"  );
 +    return FilenameUtils.normalize(value);
 +  }
 +
 +  @Override
 +  public String getGatewayDataDir() {
 +    String systemValue =
 +        System.getProperty(GATEWAY_DATA_HOME_VAR, System.getenv(GATEWAY_DATA_HOME_VAR));
 +    String dataDir = null;
 +    if (systemValue != null) {
 +      dataDir = systemValue;
 +    } else {
 +      dataDir = get(DATA_DIR, getGatewayHomeDir() + File.separator + DEFAULT_DATA_DIR);
 +    }
-     return dataDir;
++    return FilenameUtils.normalize(dataDir);
 +  }
 +
 +  @Override
 +  public String getGatewayServicesDir() {
 +    return get(STACKS_SERVICES_DIR, getGatewayDataDir() + File.separator + DEFAULT_STACKS_SERVICES_DIR);
 +  }
 +
 +  @Override
 +  public String getGatewayApplicationsDir() {
 +    return get(APPLICATIONS_DIR, getGatewayDataDir() + File.separator + DEFAULT_APPLICATIONS_DIR);
 +  }
 +
 +  @Override
 +  public String getHadoopConfDir() {
 +    return get( HADOOP_CONF_DIR );
 +  }
 +
 +  private void init() {
 +    // Load environment variables.
 +    for( Map.Entry<String, String> e : System.getenv().entrySet() ) {
 +      set( "env." + e.getKey(), e.getValue() );
 +    }
 +    // Load system properties.
 +    for( Map.Entry<Object, Object> p : System.getProperties().entrySet() ) {
 +      set( "sys." + p.getKey().toString(), p.getValue().toString() );
 +    }
 +
 +    URL lastFileUrl = null;
 +    for( String fileName : GATEWAY_CONFIG_FILENAMES ) {
 +      lastFileUrl = loadConfig( fileName, lastFileUrl );
 +    }
 +    //set default services list
 +    setDefaultGlobalRulesServices();
 +
 +    initGatewayHomeDir( lastFileUrl );
 +
 +    // log whether the scoping cookies to the gateway.path feature is enabled
 +    log.cookieScopingFeatureEnabled(isCookieScopingToPathEnabled());
 +  }
 +
 +  private void setDefaultGlobalRulesServices() {
 +    DEFAULT_GLOBAL_RULES_SERVICES = new ArrayList<>();
 +    DEFAULT_GLOBAL_RULES_SERVICES.add("NAMENODE");
 +    DEFAULT_GLOBAL_RULES_SERVICES.add("JOBTRACKER");
 +    DEFAULT_GLOBAL_RULES_SERVICES.add("WEBHDFS");
 +    DEFAULT_GLOBAL_RULES_SERVICES.add("WEBHCAT");
 +    DEFAULT_GLOBAL_RULES_SERVICES.add("OOZIE");
 +    DEFAULT_GLOBAL_RULES_SERVICES.add("WEBHBASE");
 +    DEFAULT_GLOBAL_RULES_SERVICES.add("HIVE");
 +    DEFAULT_GLOBAL_RULES_SERVICES.add("RESOURCEMANAGER");
 +  }
 +
 +  private void initGatewayHomeDir( URL lastFileUrl ) {
 +    String home = System.getProperty( GATEWAY_HOME_VAR );
 +    if( home != null ) {
 +      set( GATEWAY_HOME_VAR, home );
 +      log.settingGatewayHomeDir( "system property", home );
 +      return;
 +    }
 +    home = System.getenv( GATEWAY_HOME_VAR );
 +    if( home != null ) {
 +      set( GATEWAY_HOME_VAR, home );
 +      log.settingGatewayHomeDir( "environment variable", home );
 +      return;
 +    }
 +    if( lastFileUrl != null ) {
 +      File file = new File( lastFileUrl.getFile() ).getAbsoluteFile();
 +      File dir = file.getParentFile().getParentFile(); // Move up two levels to get to parent of conf.
 +      if( dir.exists() && dir.canRead() )
 +        home = dir.getAbsolutePath();
 +      set( GATEWAY_HOME_VAR, home );
 +      log.settingGatewayHomeDir( "configuration file location", home );
 +      return;
 +    }
 +    home = System.getProperty( "user.dir" );
 +    if( home != null ) {
 +      set( GATEWAY_HOME_VAR, home );
 +      log.settingGatewayHomeDir( "user.dir system property", home );
 +      return;
 +    }
 +  }
 +
 +  // 1. GATEWAY_HOME system property
 +  // 2. GATEWAY_HOME environment variable
 +  // 3. user.dir system property
 +  // 4. class path
 +  private URL loadConfig( String fileName, URL lastFileUrl ) {
 +    lastFileUrl = loadConfigFile( System.getProperty( GATEWAY_HOME_VAR ), fileName );
 +    if( lastFileUrl == null ) {
 +      lastFileUrl = loadConfigFile( System.getenv( GATEWAY_HOME_VAR ), fileName );
 +    }
 +    if( lastFileUrl == null ) {
 +      lastFileUrl = loadConfigFile( System.getProperty( "user.dir" ), fileName );
 +    }
 +    if( lastFileUrl == null ) {
 +      lastFileUrl = loadConfigResource( fileName );
 +    }
 +    if( lastFileUrl != null && !"file".equals( lastFileUrl.getProtocol() ) ) {
 +      lastFileUrl = null;
 +    }
 +    return lastFileUrl;
 +  }
 +
 +  private URL loadConfigFile( String dir, String file ) {
 +    URL url = null;
 +    if( dir != null ) {
 +      File f = new File( dir, file );
 +      if( f.exists() ) {
 +        String path = f.getAbsolutePath();
 +        try {
 +          url = f.toURI().toURL();
 +          addResource( new Path( path ) );
 +          log.loadingConfigurationFile( path );
 +        } catch ( MalformedURLException e ) {
 +          log.failedToLoadConfig( path, e );
 +        }
 +      }
 +    }
 +    return url;
 +  }
 +
 +  private URL loadConfigResource( String file ) {
 +    URL url = getResource( file );
 +    if( url != null ) {
 +      log.loadingConfigurationResource( url.toExternalForm() );
 +      addResource( url );
 +    }
 +    return url;
 +  }
 +
 +  @Override
 +  public String getGatewayHost() {
 +    String host = get( HTTP_HOST, "0.0.0.0" );
 +    return host;
 +  }
 +
 +  @Override
 +  public int getGatewayPort() {
 +    return Integer.parseInt( get( HTTP_PORT, DEFAULT_HTTP_PORT ) );
 +  }
 +
 +  @Override
 +  public String getGatewayPath() {
 +    return get( HTTP_PATH, DEFAULT_HTTP_PATH );
 +  }
 +
 +  @Override
++  public String getGatewayProvidersConfigDir() {
++    return getGatewayConfDir() + File.separator + PROVIDERCONFIG_DIR_NAME;
++  }
++
++  @Override
++  public String getGatewayDescriptorsDir() {
++    return getGatewayConfDir() + File.separator + DESCRIPTORS_DIR_NAME;
++  }
++
++  @Override
 +  public String getGatewayTopologyDir() {
 +    return getGatewayConfDir() + File.separator + "topologies";
 +  }
 +
 +  @Override
 +  public String getGatewayDeploymentDir() {
 +    return get(DEPLOYMENT_DIR, getGatewayDataDir() + File.separator + DEFAULT_DEPLOYMENT_DIR);
 +  }
 +
 +  @Override
 +  public String getGatewaySecurityDir() {
 +    return get(SECURITY_DIR, getGatewayDataDir() + File.separator + DEFAULT_SECURITY_DIR);
 +  }
 +
 +  @Override
 +  public InetSocketAddress getGatewayAddress() throws UnknownHostException {
 +    String host = getGatewayHost();
 +    int port = getGatewayPort();
 +    InetSocketAddress address = new InetSocketAddress( host, port );
 +    return address;
 +  }
 +
 +  @Override
 +  public boolean isSSLEnabled() {
 +    String enabled = get( SSL_ENABLED, "true" );
 +    
 +    return "true".equals(enabled);
 +  }
 +
 +  @Override
 +  public boolean isHadoopKerberosSecured() {
 +    String hadoopKerberosSecured = get( HADOOP_KERBEROS_SECURED, "false" );
 +    return "true".equals(hadoopKerberosSecured);
 +  }
 +
 +  @Override
 +  public String getKerberosConfig() {
 +    return get( KRB5_CONFIG ) ;
 +  }
 +
 +  @Override
 +  public boolean isKerberosDebugEnabled() {
 +    String kerberosDebugEnabled = get( KRB5_DEBUG, "false" );
 +    return "true".equals(kerberosDebugEnabled);
 +  }
 +  
 +  @Override
 +  public String getKerberosLoginConfig() {
 +    return get( KRB5_LOGIN_CONFIG );
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getDefaultTopologyName()
 +   */
 +  @Override
 +  public String getDefaultTopologyName() {
 +    String name = get(GATEWAY_DEFAULT_TOPOLOGY_NAME_PARAM);
 +    return name != null ? name : GATEWAY_DEFAULT_TOPOLOGY_NAME;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getDefaultAppRedirectPath()
 +   */
 +  @Override
 +  public String getDefaultAppRedirectPath() {
 +    String defTopo = getDefaultTopologyName();
 +    if( defTopo == null ) {
 +      return null;
 +    } else {
 +      return "/" + getGatewayPath() + "/" + defTopo;
 +    }
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getFrontendUrl()
 +   */
 +  @Override
 +  public String getFrontendUrl() {
 +    String url = get( FRONTEND_URL, null );
 +    return url;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getExcludedSSLProtocols()
 +   */
 +  @Override
 +  public List<String> getExcludedSSLProtocols() {
 +    List<String> protocols = null;
 +    String value = get(SSL_EXCLUDE_PROTOCOLS);
 +    if (!"none".equals(value)) {
 +      protocols = Arrays.asList(value.split("\\s*,\\s*"));
 +    }
 +    return protocols;
 +  }
 +
 +  @Override
 +  public List<String> getIncludedSSLCiphers() {
 +    List<String> list = null;
 +    String value = get(SSL_INCLUDE_CIPHERS);
 +    if (value != null && !value.isEmpty() && !"none".equalsIgnoreCase(value.trim())) {
 +      list = Arrays.asList(value.trim().split("\\s*,\\s*"));
 +    }
 +    return list;
 +  }
 +
 +  @Override
 +  public List<String> getExcludedSSLCiphers() {
 +    List<String> list = null;
 +    String value = get(SSL_EXCLUDE_CIPHERS);
 +    if (value != null && !value.isEmpty() && !"none".equalsIgnoreCase(value.trim())) {
 +      list = Arrays.asList(value.trim().split("\\s*,\\s*"));
 +    }
 +    return list;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#isClientAuthNeeded()
 +   */
 +  @Override
 +  public boolean isClientAuthNeeded() {
 +    String clientAuthNeeded = get( CLIENT_AUTH_NEEDED, "false" );
 +    return "true".equals(clientAuthNeeded);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see org.apache.knox.gateway.config.GatewayConfig#isClientAuthWanted()
 +   */
 +  @Override
 +  public boolean isClientAuthWanted() {
 +    String clientAuthWanted = get( CLIENT_AUTH_WANTED, "false" );
 +    return "true".equals(clientAuthWanted);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getTruststorePath()
 +   */
 +  @Override
 +  public String getTruststorePath() {
 +    return get( TRUSTSTORE_PATH, null);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getTrustAllCerts()
 +   */
 +  @Override
 +  public boolean getTrustAllCerts() {
 +    String trustAllCerts = get( TRUST_ALL_CERTS, "false" );
 +    return "true".equals(trustAllCerts);
 +  }
 +  
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getTruststorePath()
 +   */
 +  @Override
 +  public String getTruststoreType() {
 +    return get( TRUSTSTORE_TYPE, "JKS");
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getTruststorePath()
 +   */
 +  @Override
 +  public String getKeystoreType() {
 +    return get( KEYSTORE_TYPE, "JKS");
 +  }
 +
 +  @Override
 +  public boolean isXForwardedEnabled() {
 +    String xForwardedEnabled = get( XFORWARDED_ENABLED, "true" );
 +    return "true".equals(xForwardedEnabled);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getEphemeralDHKeySize()
 +   */
 +  @Override
 +  public String getEphemeralDHKeySize() {
 +    return get( EPHEMERAL_DH_KEY_SIZE, "2048");
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getHttpClientMaxConnections()
 +   */
 +  @Override
 +  public int getHttpClientMaxConnections() {
 +    return getInt( HTTP_CLIENT_MAX_CONNECTION, 32 );
 +  }
 +
 +  @Override
 +  public int getHttpClientConnectionTimeout() {
 +    int t = -1;
-     String s = get( HTTP_CLIENT_CONNECTION_TIMEOUT, null );
++    String s = get( HTTP_CLIENT_CONNECTION_TIMEOUT, String.valueOf(TimeUnit.SECONDS.toMillis(20)));
 +    if ( s != null ) {
 +      try {
 +        t = (int)parseNetworkTimeout( s );
 +      } catch ( Exception e ) {
 +        // Ignore it and use the default.
 +      }
 +    }
 +    return t;
 +  }
 +
 +  @Override
 +  public int getHttpClientSocketTimeout() {
 +    int t = -1;
-     String s = get( HTTP_CLIENT_SOCKET_TIMEOUT, null );
++    String s = get( HTTP_CLIENT_SOCKET_TIMEOUT, String.valueOf(TimeUnit.SECONDS.toMillis(20)) );
 +    if ( s != null ) {
 +      try {
 +        t = (int)parseNetworkTimeout( s );
 +      } catch ( Exception e ) {
 +        // Ignore it and use the default.
 +      }
 +    }
 +    return t;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getThreadPoolMax()
 +   */
 +  @Override
 +  public int getThreadPoolMax() {
 +    int i = getInt( THREAD_POOL_MAX, 254 );
 +    // Testing has shown that a value lower than 5 prevents Jetty from servicing request.
 +    if( i < 5 ) {
 +      i = 5;
 +    }
 +    return i;
 +  }
 +
 +  @Override
 +  public int getHttpServerRequestBuffer() {
 +    int i = getInt( HTTP_SERVER_REQUEST_BUFFER, 16 * 1024 );
 +    return i;
 +  }
 +
 +  @Override
 +  public int getHttpServerRequestHeaderBuffer() {
 +    int i = getInt( HTTP_SERVER_REQUEST_HEADER_BUFFER, 8 * 1024 );
 +    return i;
 +  }
 +
 +  @Override
 +  public int getHttpServerResponseBuffer() {
 +    int i = getInt( HTTP_SERVER_RESPONSE_BUFFER, 32 * 1024 );
 +    return i;
 +  }
 +
 +  @Override
 +  public int getHttpServerResponseHeaderBuffer() {
 +    int i = getInt( HTTP_SERVER_RESPONSE_HEADER_BUFFER, 8 * 1024 );
 +    return i;
 +  }
 +
 +  @Override
 +  public int getGatewayDeploymentsBackupVersionLimit() {
 +    int i = getInt( DEPLOYMENTS_BACKUP_VERSION_LIMIT, 5 );
 +    if( i < 0 ) {
 +      i = -1;
 +    }
 +    return i;
 +  }
 +
 +  @Override
 +  public long getGatewayIdleTimeout() {
 +    return getLong(GATEWAY_IDLE_TIMEOUT, 300000l);
 +  }
 +
 +  @Override
 +  public long getGatewayDeploymentsBackupAgeLimit() {
 +    PeriodFormatter f = new PeriodFormatterBuilder().appendDays().toFormatter();
 +    String s = get( DEPLOYMENTS_BACKUP_AGE_LIMIT, "-1" );
 +    long d;
 +    try {
 +      Period p = Period.parse( s, f );
 +      d = p.toStandardDuration().getMillis();
 +      if( d < 0 ) {
 +        d = -1;
 +      }
 +    } catch( Exception e ) {
 +      d = -1;
 +    }
 +    return d;
 +  }
 +
 +  @Override
 +  public String getSigningKeystoreName() {
 +    return get(SIGNING_KEYSTORE_NAME);
 +  }
 +
 +  @Override
 +  public String getSigningKeyAlias() {
 +    return get(SIGNING_KEY_ALIAS);
 +  }
 +
 +  @Override
 +  public List<String> getGlobalRulesServices() {
 +    String value = get( GLOBAL_RULES_SERVICES );
 +    if ( value != null && !value.isEmpty() && !"none".equalsIgnoreCase(value.trim()) ) {
 +      return Arrays.asList( value.trim().split("\\s*,\\s*") );
 +    }
 +    return DEFAULT_GLOBAL_RULES_SERVICES;
 +  }
 +
 +  @Override
 +  public boolean isMetricsEnabled() {
 +    String metricsEnabled = get( METRICS_ENABLED, "false" );
 +    return "true".equals(metricsEnabled);
 +  }
 +
 +  @Override
 +  public boolean isJmxMetricsReportingEnabled() {
 +    String enabled = get( JMX_METRICS_REPORTING_ENABLED, "false" );
 +    return "true".equals(enabled);
 +  }
 +
 +  @Override
 +  public boolean isGraphiteMetricsReportingEnabled() {
 +    String enabled = get( GRAPHITE_METRICS_REPORTING_ENABLED, "false" );
 +    return "true".equals(enabled);
 +  }
 +
 +  @Override
 +  public String getGraphiteHost() {
 +    String host = get( GRAPHITE_METRICS_REPORTING_HOST, "localhost" );
 +    return host;
 +  }
 +
 +  @Override
 +  public int getGraphitePort() {
 +    int i = getInt( GRAPHITE_METRICS_REPORTING_PORT, 32772 );
 +    return i;
 +  }
 +
 +  @Override
 +  public int getGraphiteReportingFrequency() {
 +    int i = getInt( GRAPHITE_METRICS_REPORTING_FREQUENCY, 1 );
 +    return i;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#isWebsocketEnabled()
 +   */
 +  @Override
 +  public boolean isWebsocketEnabled() {
 +    final String result = get( WEBSOCKET_FEATURE_ENABLED, Boolean.toString(DEFAULT_WEBSOCKET_FEATURE_ENABLED));
 +    return Boolean.parseBoolean(result);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#websocketMaxTextMessageSize()
 +   */
 +  @Override
 +  public int getWebsocketMaxTextMessageSize() {
 +    return getInt( WEBSOCKET_MAX_TEXT_MESSAGE_SIZE, DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#websocketMaxBinaryMessageSize()
 +   */
 +  @Override
 +  public int getWebsocketMaxBinaryMessageSize() {
 +    return getInt( WEBSOCKET_MAX_BINARY_MESSAGE_SIZE, DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_SIZE);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#websocketMaxTextMessageBufferSize()
 +   */
 +  @Override
 +  public int getWebsocketMaxTextMessageBufferSize() {
 +    return getInt( WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE, DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#websocketMaxBinaryMessageBufferSize()
 +   */
 +  @Override
 +  public int getWebsocketMaxBinaryMessageBufferSize() {
 +    return getInt( WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE, DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#websocketInputBufferSize()
 +   */
 +  @Override
 +  public int getWebsocketInputBufferSize() {
 +    return getInt( WEBSOCKET_INPUT_BUFFER_SIZE, DEFAULT_WEBSOCKET_INPUT_BUFFER_SIZE);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#websocketAsyncWriteTimeout()
 +   */
 +  @Override
 +  public int getWebsocketAsyncWriteTimeout() {
 +    return getInt( WEBSOCKET_ASYNC_WRITE_TIMEOUT, DEFAULT_WEBSOCKET_ASYNC_WRITE_TIMEOUT);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#websocketIdleTimeout()
 +   */
 +  @Override
 +  public int getWebsocketIdleTimeout() {
 +    return getInt( WEBSOCKET_IDLE_TIMEOUT, DEFAULT_WEBSOCKET_IDLE_TIMEOUT);
 +  }
 +
 +  /*
 +   * (non-Javadoc)
 +   *
 +   * @see
 +   * GatewayConfig#getMimeTypesToCompress()
 +   */
 +  @Override
 +  public List<String> getMimeTypesToCompress() {
 +    List<String> mimeTypes = null;
 +    String value = get(MIME_TYPES_TO_COMPRESS, DEFAULT_MIME_TYPES_TO_COMPRESS);
 +    if (value != null && !value.isEmpty()) {
 +      mimeTypes = Arrays.asList(value.trim().split("\\s*,\\s*"));
 +    }
 +    return mimeTypes;
 +  }
 +
 +  /**
 +   * Map of Topology names and their ports.
 +   *
 +   * @return
 +   */
 +  @Override
 +  public Map<String, Integer> getGatewayPortMappings() {
 +
 +    final Map<String, Integer> result = new ConcurrentHashMap<String, Integer>();
 +    final Map<String, String> properties = getValByRegex(GATEWAY_PORT_MAPPING_REGEX);
 +
 +    // Convert port no. from string to int
 +    for(final Map.Entry<String, String> e : properties.entrySet()) {
 +      // ignore the GATEWAY_PORT_MAPPING_ENABLED property
 +      if(!e.getKey().equalsIgnoreCase(GATEWAY_PORT_MAPPING_ENABLED)) {
 +        // extract the topology name and use it as a key
 +        result.put(StringUtils.substringAfter(e.getKey(), GATEWAY_PORT_MAPPING_PREFIX), Integer.parseInt(e.getValue()) );
 +      }
 +
 +    }
 +
 +    return Collections.unmodifiableMap(result);
 +  }
 +
 +  /**
 +   * Is the Port Mapping feature on ?
 +   *
 +   * @return
 +   */
 +  @Override
 +  public boolean isGatewayPortMappingEnabled() {
 +    final String result = get( GATEWAY_PORT_MAPPING_ENABLED, Boolean.toString(DEFAULT_GATEWAY_PORT_MAPPING_ENABLED));
 +    return Boolean.parseBoolean(result);
 +  }
 +
 +  private static long parseNetworkTimeout(String s ) {
 +    PeriodFormatter f = new PeriodFormatterBuilder()
 +        .appendMinutes().appendSuffix("m"," min")
 +        .appendSeconds().appendSuffix("s"," sec")
 +        .appendMillis().toFormatter();
 +    Period p = Period.parse( s, f );
 +    return p.toStandardDuration().getMillis();
 +  }
 +
 +  @Override
 +  public boolean isCookieScopingToPathEnabled() {
 +    final boolean result = Boolean.parseBoolean(get(COOKIE_SCOPING_ENABLED,
 +            Boolean.toString(DEFAULT_COOKIE_SCOPING_FEATURE_ENABLED)));
 +    return result;
 +  }
 +
 +  @Override
 +  public String getHeaderNameForRemoteAddress() {
 +    String value = getVar(REMOTE_IP_HEADER_NAME, "X-Forwarded-For");
 +    return value;
 +  }
 +
 +  @Override
 +  public String getAlgorithm() {
 +	return getVar(CRYPTO_ALGORITHM, null);
 +  }
 +
 +  @Override
 +  public String getPBEAlgorithm() {
 +	return getVar(CRYPTO_PBE_ALGORITHM, null);
 +  }
 +
 +  @Override
 +  public String getTransformation() {
 +	return getVar(CRYPTO_TRANSFORMATION, null);
 +  }
 +
 +  @Override
 +  public String getSaltSize() {
 +	return getVar(CRYPTO_SALTSIZE, null);
 +  }
 +
 +  @Override
 +  public String getIterationCount() {
 +	return getVar(CRYPTO_ITERATION_COUNT, null);
 +  }
 +
 +  @Override
 +  public String getKeyLength() {
 +	return getVar(CRYPTO_KEY_LENGTH, null);
 +  }
 +
 +  @Override
 +  public boolean isGatewayServerHeaderEnabled() {
 +    return Boolean.parseBoolean(getVar(SERVER_HEADER_ENABLED, "true"));
 +  }
++
++  @Override
++  public int getClusterMonitorPollingInterval(String type) {
++    return getInt(CLUSTER_CONFIG_MONITOR_PREFIX + type.toLowerCase() + CLUSTER_CONFIG_MONITOR_INTERVAL_SUFFIX, -1);
++  }
++  
++  @Override
++  public boolean isClusterMonitorEnabled(String type) {
++    return getBoolean(CLUSTER_CONFIG_MONITOR_PREFIX + type.toLowerCase() + CLUSTER_CONFIG_MONITOR_ENABLED_SUFFIX, true);
++  }
++
++  @Override
++  public List<String> getRemoteRegistryConfigurationNames() {
++    List<String> result = new ArrayList<>();
++
++    // Iterate over all the properties in this configuration
++    for (Map.Entry<String, String> entry : this) {
++      String propertyName = entry.getKey();
++
++      // Search for all the remote config registry properties
++      if (propertyName.startsWith(CONFIG_REGISTRY_PREFIX)) {
++        String registryName = propertyName.substring(CONFIG_REGISTRY_PREFIX.length() + 1);
++        result.add(registryName);
++      }
++    }
++
++    return result;
++  }
++
++  @Override
++  public String getRemoteRegistryConfiguration(String name) {
++    return get(CONFIG_REGISTRY_PREFIX + "." + name );
++  }
++
++  @Override
++  public String getRemoteConfigurationMonitorClientName() {
++    return get(REMOTE_CONFIG_MONITOR_CLIENT_NAME);
++  }
++
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-server/src/main/java/org/apache/knox/gateway/services/CLIGatewayServices.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/services/CLIGatewayServices.java
index 3f29930,0000000..a1ed549
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/services/CLIGatewayServices.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/services/CLIGatewayServices.java
@@@ -1,143 -1,0 +1,153 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.services;
 +
 +import org.apache.knox.gateway.GatewayMessages;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.deploy.DeploymentContext;
 +import org.apache.knox.gateway.descriptor.FilterParamDescriptor;
 +import org.apache.knox.gateway.descriptor.ResourceDescriptor;
 +import org.apache.knox.gateway.i18n.messages.MessagesFactory;
++import org.apache.knox.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceFactory;
++import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
 +import org.apache.knox.gateway.services.topology.impl.DefaultTopologyService;
 +import org.apache.knox.gateway.services.security.impl.DefaultAliasService;
 +import org.apache.knox.gateway.services.security.impl.DefaultCryptoService;
 +import org.apache.knox.gateway.services.security.impl.DefaultKeystoreService;
 +import org.apache.knox.gateway.services.security.impl.CLIMasterService;
 +import org.apache.knox.gateway.topology.Provider;
 +
 +import java.util.Collection;
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
 +
 +public class CLIGatewayServices implements GatewayServices {
 +
 +  private static GatewayMessages log = MessagesFactory.get( GatewayMessages.class );
 +
 +  private Map<String,Service> services = new HashMap<>();
 +  private CLIMasterService ms = null;
 +  private DefaultKeystoreService ks = null;
 +
 +  public CLIGatewayServices() {
 +    super();
 +  }
 +
 +  public void init(GatewayConfig config, Map<String,String> options) throws ServiceLifecycleException {
 +    ms = new CLIMasterService();
 +    ms.init(config, options);
 +    services.put("MasterService", ms);
 +
 +    ks = new DefaultKeystoreService();
 +    ks.setMasterService(ms);
 +    ks.init(config, options);
 +    services.put(KEYSTORE_SERVICE, ks);
 +    
 +    DefaultAliasService alias = new DefaultAliasService();
 +    alias.setKeystoreService(ks);
 +    alias.init(config, options);
 +    services.put(ALIAS_SERVICE, alias);
 +
 +    DefaultCryptoService crypto = new DefaultCryptoService();
 +    crypto.setKeystoreService(ks);
 +    crypto.setAliasService(alias);
 +    crypto.init(config, options);
 +    services.put(CRYPTO_SERVICE, crypto);
 +
 +    DefaultTopologyService tops = new DefaultTopologyService();
 +    tops.init(  config, options  );
 +    services.put(TOPOLOGY_SERVICE, tops);
++
++    RemoteConfigurationRegistryClientService registryClientService =
++                                                    RemoteConfigurationRegistryClientServiceFactory.newInstance(config);
++    registryClientService.setAliasService(alias);
++    registryClientService.init(config, options);
++    services.put(REMOTE_REGISTRY_CLIENT_SERVICE, registryClientService);
 +  }
 +  
 +  public void start() throws ServiceLifecycleException {
 +    ms.start();
 +
 +    ks.start();
 +
 +    DefaultAliasService alias = (DefaultAliasService) services.get(ALIAS_SERVICE);
 +    alias.start();
 +
 +    DefaultTopologyService tops = (DefaultTopologyService)services.get(TOPOLOGY_SERVICE);
 +    tops.start();
++
++    (services.get(REMOTE_REGISTRY_CLIENT_SERVICE)).start();
 +  }
 +
 +  public void stop() throws ServiceLifecycleException {
 +    ms.stop();
 +
 +    ks.stop();
 +
 +    DefaultAliasService alias = (DefaultAliasService) services.get(ALIAS_SERVICE);
 +    alias.stop();
 +
 +    DefaultTopologyService tops = (DefaultTopologyService)services.get(TOPOLOGY_SERVICE);
 +    tops.stop();
 +  }
 +  
 +  /* (non-Javadoc)
 +   * @see org.apache.knox.gateway.GatewayServices#getServiceNames()
 +   */
 +  @Override
 +  public Collection<String> getServiceNames() {
 +    return services.keySet();
 +  }
 +  
 +  /* (non-Javadoc)
 +   * @see org.apache.knox.gateway.GatewayServices#getService(java.lang.String)
 +   */
 +  @Override
 +  public <T> T getService(String serviceName) {
 +    return (T)services.get( serviceName );
 +  }
 +
 +  @Override
 +  public String getRole() {
 +    return "Services";
 +  }
 +
 +  @Override
 +  public String getName() {
 +    return "GatewayServices";
 +  }
 +
 +  @Override
 +  public void initializeContribution(DeploymentContext context) {
 +  }
 +
 +  @Override
 +  public void contributeProvider(DeploymentContext context, Provider provider) {
 +  }
 +
 +  @Override
 +  public void contributeFilter(DeploymentContext context, Provider provider,
 +      org.apache.knox.gateway.topology.Service service,
 +      ResourceDescriptor resource, List<FilterParamDescriptor> params) {
 +  }
 +
 +  @Override
 +  public void finalizeContribution(DeploymentContext context) {
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-server/src/main/java/org/apache/knox/gateway/services/DefaultGatewayServices.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/services/DefaultGatewayServices.java
index c2acd54,0000000..7542d75
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/services/DefaultGatewayServices.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/services/DefaultGatewayServices.java
@@@ -1,223 -1,0 +1,245 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.services;
 +
 +import org.apache.knox.gateway.GatewayMessages;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.deploy.DeploymentContext;
 +import org.apache.knox.gateway.descriptor.FilterParamDescriptor;
 +import org.apache.knox.gateway.descriptor.ResourceDescriptor;
 +import org.apache.knox.gateway.i18n.messages.MessagesFactory;
++import org.apache.knox.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceFactory;
++import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClientService;
 +import org.apache.knox.gateway.services.registry.impl.DefaultServiceDefinitionRegistry;
 +import org.apache.knox.gateway.services.metrics.impl.DefaultMetricsService;
++import org.apache.knox.gateway.services.topology.impl.DefaultClusterConfigurationMonitorService;
 +import org.apache.knox.gateway.services.topology.impl.DefaultTopologyService;
 +import org.apache.knox.gateway.services.hostmap.impl.DefaultHostMapperService;
 +import org.apache.knox.gateway.services.registry.impl.DefaultServiceRegistryService;
 +import org.apache.knox.gateway.services.security.KeystoreServiceException;
 +import org.apache.knox.gateway.services.security.SSLService;
 +import org.apache.knox.gateway.services.security.impl.DefaultAliasService;
 +import org.apache.knox.gateway.services.security.impl.DefaultCryptoService;
 +import org.apache.knox.gateway.services.security.impl.DefaultKeystoreService;
 +import org.apache.knox.gateway.services.security.impl.DefaultMasterService;
 +import org.apache.knox.gateway.services.security.impl.JettySSLService;
 +import org.apache.knox.gateway.services.token.impl.DefaultTokenAuthorityService;
 +import org.apache.knox.gateway.topology.Provider;
 +
 +import java.util.Collection;
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
 +
 +public class DefaultGatewayServices implements GatewayServices {
 +
 +  private static GatewayMessages log = MessagesFactory.get( GatewayMessages.class );
 +
 +  private Map<String,Service> services = new HashMap<>();
 +  private DefaultMasterService ms = null;
 +  private DefaultKeystoreService ks = null;
 +
 +  public DefaultGatewayServices() {
 +    super();
 +  }
 +
 +  public void init(GatewayConfig config, Map<String,String> options) throws ServiceLifecycleException {
 +    ms = new DefaultMasterService();
 +    ms.init(config, options);
 +    services.put("MasterService", ms);
 +
 +    ks = new DefaultKeystoreService();
 +    ks.setMasterService(ms);
 +    ks.init(config, options);
 +    services.put(KEYSTORE_SERVICE, ks);
 +    
 +    DefaultAliasService alias = new DefaultAliasService();
 +    alias.setKeystoreService(ks);
 +    alias.setMasterService(ms);
 +    alias.init(config, options);
 +    services.put(ALIAS_SERVICE, alias);
 +
 +    DefaultCryptoService crypto = new DefaultCryptoService();
 +    crypto.setKeystoreService(ks);
 +    crypto.setAliasService(alias);
 +    crypto.init(config, options);
 +    services.put(CRYPTO_SERVICE, crypto);
 +    
 +    DefaultTokenAuthorityService ts = new DefaultTokenAuthorityService();
 +    ts.setAliasService(alias);
 +    ts.setKeystoreService(ks);
 +    ts.init(config, options);
 +    // prolly should not allow the token service to be looked up?
 +    services.put(TOKEN_SERVICE, ts);
 +    
 +    JettySSLService ssl = new JettySSLService();
 +    ssl.setAliasService(alias);
 +    ssl.setKeystoreService(ks);
 +    ssl.setMasterService(ms);
 +    ssl.init(config, options);
 +    services.put(SSL_SERVICE, ssl);
 +
 +    DefaultServiceRegistryService sr = new DefaultServiceRegistryService();
 +    sr.setCryptoService( crypto );
 +    sr.init( config, options );
 +    services.put( SERVICE_REGISTRY_SERVICE, sr );
 +
 +    DefaultHostMapperService hm = new DefaultHostMapperService();
 +    hm.init( config, options );
 +    services.put( HOST_MAPPING_SERVICE, hm );
 +
 +    DefaultServerInfoService sis = new DefaultServerInfoService();
 +    sis.init( config, options );
 +    services.put( SERVER_INFO_SERVICE, sis );
 +
++    RemoteConfigurationRegistryClientService registryClientService =
++                                                    RemoteConfigurationRegistryClientServiceFactory.newInstance(config);
++    registryClientService.setAliasService(alias);
++    registryClientService.init(config, options);
++    services.put(REMOTE_REGISTRY_CLIENT_SERVICE, registryClientService);
++
++    DefaultClusterConfigurationMonitorService ccs = new DefaultClusterConfigurationMonitorService();
++    ccs.setAliasService(alias);
++    ccs.init(config, options);
++    services.put(CLUSTER_CONFIGURATION_MONITOR_SERVICE, ccs);
++
 +    DefaultTopologyService tops = new DefaultTopologyService();
 +    tops.setAliasService(alias);
 +    tops.init(  config, options  );
 +    services.put(  TOPOLOGY_SERVICE, tops  );
 +
 +    DefaultServiceDefinitionRegistry sdr = new DefaultServiceDefinitionRegistry();
 +    sdr.init( config, options );
 +    services.put( SERVICE_DEFINITION_REGISTRY, sdr );
 +
 +    DefaultMetricsService metricsService = new DefaultMetricsService();
 +    metricsService.init( config, options );
 +    services.put( METRICS_SERVICE, metricsService );
 +  }
-   
++
 +  public void start() throws ServiceLifecycleException {
 +    ms.start();
 +
 +    ks.start();
 +
 +    DefaultAliasService alias = (DefaultAliasService) services.get(ALIAS_SERVICE);
 +    alias.start();
 +
 +    SSLService ssl = (SSLService) services.get(SSL_SERVICE);
 +    ssl.start();
 +
 +    ServerInfoService sis = (ServerInfoService) services.get(SERVER_INFO_SERVICE);
 +    sis.start();
 +
++    RemoteConfigurationRegistryClientService clientService =
++                            (RemoteConfigurationRegistryClientService)services.get(REMOTE_REGISTRY_CLIENT_SERVICE);
++    clientService.start();
++
++    (services.get(CLUSTER_CONFIGURATION_MONITOR_SERVICE)).start();
++
 +    DefaultTopologyService tops = (DefaultTopologyService)services.get(TOPOLOGY_SERVICE);
 +    tops.start();
 +
 +    DefaultMetricsService metricsService = (DefaultMetricsService) services.get(METRICS_SERVICE);
 +    metricsService.start();
 +  }
 +
 +  public void stop() throws ServiceLifecycleException {
 +    ms.stop();
 +
 +    ks.stop();
 +
++    (services.get(CLUSTER_CONFIGURATION_MONITOR_SERVICE)).stop();
++
 +    DefaultAliasService alias = (DefaultAliasService) services.get(ALIAS_SERVICE);
 +    alias.stop();
 +
 +    SSLService ssl = (SSLService) services.get(SSL_SERVICE);
 +    ssl.stop();
 +
 +    ServerInfoService sis = (ServerInfoService) services.get(SERVER_INFO_SERVICE);
 +    sis.stop();
 +
 +    DefaultTopologyService tops = (DefaultTopologyService)services.get(TOPOLOGY_SERVICE);
 +    tops.stop();
 +
 +    DefaultMetricsService metricsService = (DefaultMetricsService) services.get(METRICS_SERVICE);
 +    metricsService.stop();
 +
 +  }
 +  
 +  /* (non-Javadoc)
 +   * @see org.apache.knox.gateway.GatewayServices#getServiceNames()
 +   */
 +  @Override
 +  public Collection<String> getServiceNames() {
 +    return services.keySet();
 +  }
 +  
 +  /* (non-Javadoc)
 +   * @see org.apache.knox.gateway.GatewayServices#getService(java.lang.String)
 +   */
 +  @Override
 +  public <T> T getService(String serviceName) {
 +    return (T)services.get(serviceName);
 +  }
 +
 +  @Override
 +  public String getRole() {
 +    return "Services";
 +  }
 +
 +  @Override
 +  public String getName() {
 +    return "GatewayServices";
 +  }
 +
 +  @Override
 +  public void initializeContribution(DeploymentContext context) {
 +    // setup credential store as appropriate
 +    String clusterName = context.getTopology().getName();
 +    try {
 +      if (!ks.isCredentialStoreForClusterAvailable(clusterName)) {
 +        log.creatingCredentialStoreForCluster(clusterName);
 +        ks.createCredentialStoreForCluster(clusterName);
 +      }
 +      else {
 +        log.credentialStoreForClusterFoundNotCreating(clusterName);
 +      }
 +    } catch (KeystoreServiceException e) {
 +      throw new RuntimeException("Credential store was found but was unable to be loaded - the provided (or persisted) master secret may not match the password for the credential store.", e);
 +    }
 +  }
 +
 +  @Override
 +  public void contributeProvider(DeploymentContext context, Provider provider) {
 +  }
 +
 +  @Override
 +  public void contributeFilter(DeploymentContext context, Provider provider,
 +      org.apache.knox.gateway.topology.Service service,
 +      ResourceDescriptor resource, List<FilterParamDescriptor> params) {
 +  }
 +
 +  @Override
 +  public void finalizeContribution(DeploymentContext context) {
 +    // Tell the provider the location of the descriptor.
 +    context.getWebAppDescriptor().createListener().listenerClass( GatewayServicesContextListener.class.getName() );
 +    context.getWebAppDescriptor().createListener().listenerClass(GatewayMetricsServletContextListener.class.getName());
 +  }
 +}


[23/49] knox git commit: KNOX-1125 - KNOXCLI Additions to Support Management of Knox config in remote registry (Phil Zampino via Sandeep More)

Posted by mo...@apache.org.
KNOX-1125 - KNOXCLI Additions to Support Management of Knox config in remote registry (Phil Zampino via Sandeep More)


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/828ea38f
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/828ea38f
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/828ea38f

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 828ea38fcfd4a4edee2813ae9d357ee0f555afc8
Parents: a09e751
Author: Sandeep More <mo...@apache.org>
Authored: Mon Dec 4 13:44:04 2017 -0500
Committer: Sandeep More <mo...@apache.org>
Committed: Mon Dec 4 13:44:04 2017 -0500

----------------------------------------------------------------------
 .../DefaultRemoteConfigurationMonitor.java      |   2 +
 .../org/apache/hadoop/gateway/util/KnoxCLI.java | 391 ++++++++++++++++++-
 ...emoteConfigurationRegistryClientService.java | 243 ++++++++++++
 ...figurationRegistryClientServiceProvider.java |  32 ++
 .../apache/hadoop/gateway/util/KnoxCLITest.java | 361 ++++++++++++++++-
 ...teConfigurationRegistryClientServiceProvider |  19 +
 6 files changed, 1039 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/828ea38f/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java
index 1dd71ac..03bbf16 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java
@@ -101,6 +101,8 @@ class DefaultRemoteConfigurationMonitor implements RemoteConfigurationMonitor {
 
     @Override
     public void stop() throws Exception {
+        client.removeEntryListener(NODE_KNOX_PROVIDERS);
+        client.removeEntryListener(NODE_KNOX_DESCRIPTORS);
     }
 
 

http://git-wip-us.apache.org/repos/asf/knox/blob/828ea38f/gateway-server/src/main/java/org/apache/hadoop/gateway/util/KnoxCLI.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/util/KnoxCLI.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/util/KnoxCLI.java
index afc6ee0..5576df7 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/util/KnoxCLI.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/util/KnoxCLI.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.gateway.util;
 import java.io.BufferedReader;
 import java.io.Console;
 import java.io.File;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
@@ -28,7 +27,6 @@ import java.io.PrintStream;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.security.cert.Certificate;
-import java.security.cert.CertificateEncodingException;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -51,6 +49,8 @@ import org.apache.hadoop.gateway.services.CLIGatewayServices;
 import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.Service;
 import org.apache.hadoop.gateway.services.ServiceLifecycleException;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
 import org.apache.hadoop.gateway.services.security.AliasService;
 import org.apache.hadoop.gateway.services.security.KeystoreService;
 import org.apache.hadoop.gateway.services.security.KeystoreServiceException;
@@ -82,6 +82,7 @@ import org.apache.shiro.util.ThreadContext;
 import org.eclipse.persistence.oxm.MediaType;
 import org.jboss.shrinkwrap.api.exporter.ExplodedExporter;
 import org.jboss.shrinkwrap.api.spec.EnterpriseArchive;
+
 /**
  *
  */
@@ -102,7 +103,13 @@ public class KnoxCLI extends Configured implements Tool {
       "   [" + ValidateTopologyCommand.USAGE + "]\n" +
       "   [" + LDAPAuthCommand.USAGE + "]\n" +
       "   [" + LDAPSysBindCommand.USAGE + "]\n" +
-      "   [" + ServiceTestCommand.USAGE + "]\n";
+      "   [" + ServiceTestCommand.USAGE + "]\n" +
+      "   [" + RemoteRegistryClientsListCommand.USAGE + "]\n" +
+      "   [" + RemoteRegistryUploadProviderConfigCommand.USAGE + "]\n" +
+      "   [" + RemoteRegistryUploadDescriptorCommand.USAGE + "]\n" +
+      "   [" + RemoteRegistryDeleteProviderConfigCommand.USAGE + "]\n" +
+      "   [" + RemoteRegistryDeleteDescriptorCommand.USAGE + "]\n" +
+      "   [" + RemoteRegistryGetACLCommand.USAGE + "]\n";
 
   /** allows stdout to be captured if necessary */
   public PrintStream out = System.out;
@@ -123,6 +130,9 @@ public class KnoxCLI extends Configured implements Tool {
   private String pass = null;
   private boolean groups = false;
 
+  private String remoteRegistryClient = null;
+  private String remoteRegistryEntryName = null;
+
   // For testing only
   private String master = null;
   private String type = null;
@@ -187,7 +197,12 @@ public class KnoxCLI extends Configured implements Tool {
    * % knoxcli user-auth-test [--cluster clustername] [--u username] [--p password]
    * % knoxcli system-user-auth-test [--cluster clustername] [--d]
    * % knoxcli service-test [--u user] [--p password] [--cluster clustername] [--hostname name] [--port port]
-   *
+   * % knoxcli list-registry-clients
+   * % knoxcli get-registry-acl entryName --registry-client name
+   * % knoxcli upload-provider-config filePath --registry-client name [--entry-name entryName]
+   * % knoxcli upload-descriptor filePath --registry-client name [--entry-name entryName]
+   * % knoxcli delete-provider-config providerConfig --registry-client name
+   * % knoxcli delete-descriptor descriptor --registry-client name
    * </pre>
    * @param args
    * @return
@@ -282,7 +297,7 @@ public class KnoxCLI extends Configured implements Tool {
         }
         this.cluster = args[++i];
       } else if (args[i].equals("service-test")) {
-        if( i + 1 >= args[i].length()) {
+        if( i + 1 >= args.length) {
           printKnoxShellUsage();
           return -1;
         } else {
@@ -348,6 +363,63 @@ public class KnoxCLI extends Configured implements Tool {
         }
       } else if (args[i].equals("--g")) {
         this.groups = true;
+      } else if (args[i].equals("list-registry-clients")) {
+        command = new RemoteRegistryClientsListCommand();
+      } else if (args[i].equals("--registry-client")) {
+        if (i + 1 >= args.length || args[i + 1].startsWith("-")) {
+          printKnoxShellUsage();
+          return -1;
+        }
+        this.remoteRegistryClient = args[++i];
+      } else if (args[i].equalsIgnoreCase("upload-provider-config")) {
+        String fileName;
+        if (i <= (args.length - 1)) {
+          fileName = args[++i];
+          command = new RemoteRegistryUploadProviderConfigCommand(fileName);
+        } else {
+          printKnoxShellUsage();
+          return -1;
+        }
+      } else if (args[i].equals("upload-descriptor")) {
+        String fileName;
+        if (i <= (args.length - 1)) {
+          fileName = args[++i];
+          command = new RemoteRegistryUploadDescriptorCommand(fileName);
+        } else {
+          printKnoxShellUsage();
+          return -1;
+        }
+      } else if (args[i].equals("--entry-name")) {
+        if (i <= (args.length - 1)) {
+          remoteRegistryEntryName = args[++i];
+        } else {
+          printKnoxShellUsage();
+          return -1;
+        }
+      } else if (args[i].equals("delete-descriptor")) {
+        if (i <= (args.length - 1)) {
+          String entry = args[++i];
+          command = new RemoteRegistryDeleteDescriptorCommand(entry);
+        } else {
+          printKnoxShellUsage();
+          return -1;
+        }
+      } else if (args[i].equals("delete-provider-config")) {
+        if (i <= (args.length - 1)) {
+          String entry = args[++i];
+          command = new RemoteRegistryDeleteProviderConfigCommand(entry);
+        } else {
+          printKnoxShellUsage();
+          return -1;
+        }
+      } else if (args[i].equalsIgnoreCase("get-registry-acl")) {
+        if (i <= (args.length - 1)) {
+          String entry = args[++i];
+          command = new RemoteRegistryGetACLCommand(entry);
+        } else {
+          printKnoxShellUsage();
+          return -1;
+        }
       } else {
         printKnoxShellUsage();
         //ToolRunner.printGenericCommandUsage(System.err);
@@ -406,6 +478,24 @@ public class KnoxCLI extends Configured implements Tool {
       out.println(ServiceTestCommand.USAGE + "\n\n" + ServiceTestCommand.DESC);
       out.println();
       out.println( div );
+      out.println(RemoteRegistryClientsListCommand.USAGE + "\n\n" + RemoteRegistryClientsListCommand.DESC);
+      out.println();
+      out.println( div );
+      out.println(RemoteRegistryGetACLCommand.USAGE + "\n\n" + RemoteRegistryGetACLCommand.DESC);
+      out.println();
+      out.println( div );
+      out.println(RemoteRegistryUploadProviderConfigCommand.USAGE + "\n\n" + RemoteRegistryUploadProviderConfigCommand.DESC);
+      out.println();
+      out.println( div );
+      out.println(RemoteRegistryUploadDescriptorCommand.USAGE + "\n\n" + RemoteRegistryUploadDescriptorCommand.DESC);
+      out.println();
+      out.println( div );
+      out.println(RemoteRegistryDeleteProviderConfigCommand.USAGE + "\n\n" + RemoteRegistryDeleteProviderConfigCommand.DESC);
+      out.println();
+      out.println( div );
+      out.println(RemoteRegistryDeleteDescriptorCommand.USAGE + "\n\n" + RemoteRegistryDeleteDescriptorCommand.DESC);
+      out.println();
+      out.println( div );
     }
   }
 
@@ -439,6 +529,11 @@ public class KnoxCLI extends Configured implements Tool {
       TopologyService ts = services.getService(GatewayServices.TOPOLOGY_SERVICE);
       return ts;
     }
+
+    protected RemoteConfigurationRegistryClientService getRemoteConfigRegistryClientService() {
+      return services.getService(GatewayServices.REMOTE_REGISTRY_CLIENT_SERVICE);
+    }
+
   }
 
  private class AliasListCommand extends Command {
@@ -1598,9 +1693,10 @@ public class KnoxCLI extends Configured implements Tool {
   public class ServiceTestCommand extends Command {
     public static final String USAGE = "service-test [--u username] [--p password] [--cluster clustername] [--hostname name] " +
         "[--port port]";
-    public static final String DESC = "This command requires a running instance of Knox to be present on the same " +
-        "machine. It will execute a test to make sure all services are accessible through the gateway URLs. Errors are " +
-        "reported and suggestions to resolve any problems are returned. JSON formatted.";
+    public static final String DESC =
+                        "This command requires a running instance of Knox to be present on the same machine.\n" +
+                        "It will execute a test to make sure all services are accessible through the gateway URLs.\n" +
+                        "Errors are reported and suggestions to resolve any problems are returned. JSON formatted.\n";
 
     private boolean ssl = true;
     private int attempts = 0;
@@ -1753,6 +1849,285 @@ public class KnoxCLI extends Configured implements Tool {
 
   }
 
+  public class RemoteRegistryClientsListCommand extends Command {
+
+    static final String USAGE = "list-registry-clients";
+    static final String DESC = "Lists all of the remote configuration registry clients defined in gateway-site.xml.\n";
+
+    /* (non-Javadoc)
+     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#execute()
+     */
+    @Override
+    public void execute() throws Exception {
+      GatewayConfig config = getGatewayConfig();
+      List<String> remoteConfigRegistryClientNames = config.getRemoteRegistryConfigurationNames();
+      if (!remoteConfigRegistryClientNames.isEmpty()) {
+        out.println("Listing remote configuration registry clients:");
+        for (String name : remoteConfigRegistryClientNames) {
+          out.println(name);
+        }
+      }
+    }
+
+    /* (non-Javadoc)
+     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#getUsage()
+     */
+    @Override
+    public String getUsage() {
+      return USAGE + ":\n\n" + DESC;
+    }
+ }
+
+
+  /**
+   * Base class for remote config registry upload commands
+   */
+  public abstract class RemoteRegistryUploadCommand extends Command {
+    protected static final String ROOT_ENTRY = "/knox";
+    protected static final String CONFIG_ENTRY = ROOT_ENTRY + "/config";
+    protected static final String PROVIDER_CONFIG_ENTRY = CONFIG_ENTRY + "/shared-providers";
+    protected static final String DESCRIPTORS__ENTRY = CONFIG_ENTRY + "/descriptors";
+
+    private File sourceFile = null;
+    protected String filename = null;
+
+    protected RemoteRegistryUploadCommand(String sourceFileName) {
+      this.filename = sourceFileName;
+    }
+
+    private void upload(RemoteConfigurationRegistryClient client, String entryPath, File source) throws Exception {
+      String content = FileUtils.readFileToString(source);
+      if (client.entryExists(entryPath)) {
+        // If it exists, then we're going to set the data
+        client.setEntryData(entryPath, content);
+      } else {
+        // If it does not exist, then create it and set the data
+        client.createEntry(entryPath, content);
+      }
+    }
+
+    File getSourceFile() {
+      if (sourceFile == null) {
+        sourceFile = new File(filename);
+      }
+      return sourceFile;
+    }
+
+    String getEntryName(String prefixPath) {
+      String entryName = remoteRegistryEntryName;
+      if (entryName == null) {
+        File sourceFile = getSourceFile();
+        if (sourceFile.exists()) {
+          String path = sourceFile.getAbsolutePath();
+          entryName = path.substring(path.lastIndexOf(File.separator) + 1);
+        } else {
+          out.println("Could not locate source file: " + filename);
+        }
+      }
+      return prefixPath + "/" + entryName;
+    }
+
+    protected void execute(String entryName, File sourceFile) throws Exception {
+      if (remoteRegistryClient != null) {
+        RemoteConfigurationRegistryClientService cs = getRemoteConfigRegistryClientService();
+        RemoteConfigurationRegistryClient client = cs.get(remoteRegistryClient);
+        if (client != null) {
+          if (entryName != null) {
+            upload(client, entryName, sourceFile);
+          }
+        } else {
+          out.println("No remote configuration registry identified by '" + remoteRegistryClient + "' could be found.");
+        }
+      } else {
+        out.println("Missing required argument : --registry-client\n");
+      }
+    }
+
+  }
+
+
+  public class RemoteRegistryUploadProviderConfigCommand extends RemoteRegistryUploadCommand {
+
+    static final String USAGE = "upload-provider-config providerConfigFile --registry-client name [--entry-name entryName]";
+    static final String DESC = "Uploads a provider configuration to the specified remote registry client, optionally " +
+                               "renaming the entry.\nIf the entry name is not specified, the name of the uploaded " +
+                               "file is used.\n";
+
+    RemoteRegistryUploadProviderConfigCommand(String fileName) {
+      super(fileName);
+    }
+
+    /* (non-Javadoc)
+     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#execute()
+     */
+    @Override
+    public void execute() throws Exception {
+      super.execute(getEntryName(PROVIDER_CONFIG_ENTRY), getSourceFile());
+    }
+
+    /* (non-Javadoc)
+     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#getUsage()
+     */
+    @Override
+    public String getUsage() {
+      return USAGE + ":\n\n" + DESC;
+    }
+  }
+
+
+  public class RemoteRegistryUploadDescriptorCommand extends RemoteRegistryUploadCommand {
+
+    static final String USAGE = "upload-descriptor descriptorFile --registry-client name [--entry-name entryName]";
+    static final String DESC = "Uploads a simple descriptor using the specified remote registry client, optionally " +
+                               "renaming the entry.\nIf the entry name is not specified, the name of the uploaded " +
+                               "file is used.\n";
+
+    RemoteRegistryUploadDescriptorCommand(String fileName) {
+      super(fileName);
+    }
+
+    /* (non-Javadoc)
+     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#execute()
+     */
+    @Override
+    public void execute() throws Exception {
+      super.execute(getEntryName(DESCRIPTORS__ENTRY), getSourceFile());
+    }
+
+    /* (non-Javadoc)
+     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#getUsage()
+     */
+    @Override
+    public String getUsage() {
+      return USAGE + ":\n\n" + DESC;
+    }
+  }
+
+
+  public class RemoteRegistryGetACLCommand extends Command {
+
+    static final String USAGE = "get-registry-acl entry --registry-client name";
+    static final String DESC = "Presents the ACL settings for the specified remote registry entry.\n";
+
+    private String entry = null;
+
+    RemoteRegistryGetACLCommand(String entry) {
+      this.entry = entry;
+    }
+
+    /* (non-Javadoc)
+     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#execute()
+     */
+    @Override
+    public void execute() throws Exception {
+      if (remoteRegistryClient != null) {
+        RemoteConfigurationRegistryClientService cs = getRemoteConfigRegistryClientService();
+        RemoteConfigurationRegistryClient client = cs.get(remoteRegistryClient);
+        if (client != null) {
+          if (entry != null) {
+            List<RemoteConfigurationRegistryClient.EntryACL> acls = client.getACL(entry);
+            for (RemoteConfigurationRegistryClient.EntryACL acl : acls) {
+              out.println(acl.getType() + ":" + acl.getId() + ":" + acl.getPermissions());
+            }
+          }
+        } else {
+          out.println("No remote configuration registry identified by '" + remoteRegistryClient + "' could be found.");
+        }
+      } else {
+        out.println("Missing required argument : --registry-client\n");
+      }
+    }
+
+    /* (non-Javadoc)
+     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#getUsage()
+     */
+    @Override
+    public String getUsage() {
+      return USAGE + ":\n\n" + DESC;
+    }
+  }
+
+
+  /**
+   * Base class for remote config registry delete commands
+   */
+  public abstract class RemoteRegistryDeleteCommand extends Command {
+    protected static final String ROOT_ENTRY = "/knox";
+    protected static final String CONFIG_ENTRY = ROOT_ENTRY + "/config";
+    protected static final String PROVIDER_CONFIG_ENTRY = CONFIG_ENTRY + "/shared-providers";
+    protected static final String DESCRIPTORS__ENTRY = CONFIG_ENTRY + "/descriptors";
+
+    protected String entryName = null;
+
+    protected RemoteRegistryDeleteCommand(String entryName) {
+      this.entryName = entryName;
+    }
+
+    private void delete(RemoteConfigurationRegistryClient client, String entryPath) throws Exception {
+      if (client.entryExists(entryPath)) {
+        // If it exists, then delete it
+        client.deleteEntry(entryPath);
+      }
+    }
+
+    protected void execute(String entryName) throws Exception {
+      if (remoteRegistryClient != null) {
+        RemoteConfigurationRegistryClientService cs = getRemoteConfigRegistryClientService();
+        RemoteConfigurationRegistryClient client = cs.get(remoteRegistryClient);
+        if (client != null) {
+          if (entryName != null) {
+            delete(client, entryName);
+          }
+        } else {
+          out.println("No remote configuration registry identified by '" + remoteRegistryClient + "' could be found.");
+        }
+      } else {
+        out.println("Missing required argument : --registry-client\n");
+      }
+    }
+  }
+
+
+  public class RemoteRegistryDeleteProviderConfigCommand extends RemoteRegistryDeleteCommand {
+    static final String USAGE = "delete-provider-config providerConfig --registry-client name";
+    static final String DESC = "Deletes a shared provider configuration from the specified remote registry.\n";
+
+    public RemoteRegistryDeleteProviderConfigCommand(String entryName) {
+      super(entryName);
+    }
+
+    @Override
+    public void execute() throws Exception {
+      execute(PROVIDER_CONFIG_ENTRY + "/" + entryName);
+    }
+
+    @Override
+    public String getUsage() {
+      return USAGE + ":\n\n" + DESC;
+    }
+  }
+
+
+  public class RemoteRegistryDeleteDescriptorCommand extends RemoteRegistryDeleteCommand {
+    static final String USAGE = "delete-descriptor descriptor --registry-client name";
+    static final String DESC = "Deletes a simple descriptor from the specified remote registry.\n";
+
+    public RemoteRegistryDeleteDescriptorCommand(String entryName) {
+      super(entryName);
+    }
+
+    @Override
+    public void execute() throws Exception {
+      execute(DESCRIPTORS__ENTRY + "/" + entryName);
+    }
+
+    @Override
+    public String getUsage() {
+      return USAGE + ":\n\n" + DESC;
+    }
+  }
+
+
   private static Properties loadBuildProperties() {
     Properties properties = new Properties();
     InputStream inputStream = KnoxCLI.class.getClassLoader().getResourceAsStream( "build.properties" );

http://git-wip-us.apache.org/repos/asf/knox/blob/828ea38f/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientService.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientService.java
new file mode 100644
index 0000000..161c201
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientService.java
@@ -0,0 +1,243 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.config.remote;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.service.config.remote.config.RemoteConfigurationRegistriesAccessor;
+import org.apache.hadoop.gateway.services.ServiceLifecycleException;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+import org.apache.hadoop.gateway.services.security.AliasService;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.attribute.PosixFilePermission;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.Function;
+
+/**
+ * An implementation of RemoteConfigurationRegistryClientService intended to be used for testing without having to
+ * connect to an actual remote configuration registry.
+ */
+public class LocalFileSystemRemoteConfigurationRegistryClientService implements RemoteConfigurationRegistryClientService {
+
+    public static final String TYPE = "LocalFileSystem";
+
+    private Map<String, RemoteConfigurationRegistryClient> clients = new HashMap<>();
+
+
+    @Override
+    public void setAliasService(AliasService aliasService) {
+        // N/A
+    }
+
+    @Override
+    public RemoteConfigurationRegistryClient get(String name) {
+        return clients.get(name);
+    }
+
+    @Override
+    public void init(GatewayConfig config, Map<String, String> options) throws ServiceLifecycleException {
+        List<RemoteConfigurationRegistryConfig> registryConfigurations =
+                                        RemoteConfigurationRegistriesAccessor.getRemoteRegistryConfigurations(config);
+        for (RemoteConfigurationRegistryConfig registryConfig : registryConfigurations) {
+            if (TYPE.equalsIgnoreCase(registryConfig.getRegistryType())) {
+                RemoteConfigurationRegistryClient registryClient = createClient(registryConfig);
+                clients.put(registryConfig.getName(), registryClient);
+            }
+        }
+    }
+
+    @Override
+    public void start() throws ServiceLifecycleException {
+
+    }
+
+    @Override
+    public void stop() throws ServiceLifecycleException {
+
+    }
+
+
+    private RemoteConfigurationRegistryClient createClient(RemoteConfigurationRegistryConfig config) {
+        String rootDir = config.getConnectionString();
+
+        return new RemoteConfigurationRegistryClient() {
+            private File root = new File(rootDir);
+
+            @Override
+            public String getAddress() {
+                return root.getAbsolutePath();
+            }
+
+            @Override
+            public boolean entryExists(String path) {
+                return (new File(root, path)).exists();
+            }
+
+            @Override
+            public List<EntryACL> getACL(String path) {
+                List<EntryACL> result = new ArrayList<>();
+
+                Path resolved = Paths.get(rootDir, path);
+                try {
+                    Map<String, List<String>> collected = new HashMap<>();
+
+                    Set<PosixFilePermission> perms = Files.getPosixFilePermissions(resolved);
+                    for (PosixFilePermission perm : perms) {
+                        String[] parsed = perm.toString().split("_");
+                        collected.computeIfAbsent(parsed[0].toLowerCase(), s -> new ArrayList<>()).add(parsed[1].toLowerCase());
+                    }
+
+                    for (String id : collected.keySet()) {
+                        EntryACL acl = new EntryACL() {
+                            @Override
+                            public String getId() {
+                                return id;
+                            }
+
+                            @Override
+                            public String getType() {
+                                return "fs";
+                            }
+
+                            @Override
+                            public Object getPermissions() {
+                                return collected.get(id).toString();
+                            }
+                        };
+                        result.add(acl);
+                    }
+                } catch (IOException e) {
+                    e.printStackTrace();
+                }
+                return result;
+            }
+
+            @Override
+            public List<String> listChildEntries(String path) {
+                List<String> result = new ArrayList<>();
+
+                File entry = new File(root, path);
+                if (entry.exists() && entry.isDirectory()) {
+                    String[] list = entry.list();
+                    if (list != null) {
+                        result.addAll(Arrays.asList(entry.list()));
+                    }
+                }
+
+                return result;
+            }
+
+            @Override
+            public String getEntryData(String path) {
+                return getEntryData(path, "UTF-8");
+            }
+
+            @Override
+            public String getEntryData(String path, String encoding) {
+                String result = null;
+                File entry = new File(root, path);
+                if (entry.isFile() && entry.exists()) {
+                    try {
+                        result = FileUtils.readFileToString(entry, encoding);
+                    } catch (IOException e) {
+                        e.printStackTrace();
+                    }
+                }
+                return result;
+            }
+
+            @Override
+            public void createEntry(String path) {
+                createEntry(path, "");
+            }
+
+            @Override
+            public void createEntry(String path, String data) {
+                createEntry(path, data, "UTF-8");
+            }
+
+            @Override
+            public void createEntry(String path, String data, String encoding) {
+                File entry = new File(root, path);
+                if (!entry.exists()) {
+                    if (data != null) {
+                        try {
+                            FileUtils.writeStringToFile(entry, data, encoding);
+                        } catch (IOException e) {
+                            e.printStackTrace();
+                        }
+                    }
+                }
+            }
+
+            @Override
+            public int setEntryData(String path, String data) {
+                setEntryData(path, data, "UTF-8");
+                return 0;
+            }
+
+            @Override
+            public int setEntryData(String path, String data, String encoding) {
+                File entry = new File(root, path);
+                if (entry.exists()) {
+                    try {
+                        FileUtils.writeStringToFile(entry, data, encoding);
+                    } catch (IOException e) {
+                        e.printStackTrace();
+                    }
+                }
+                return 0;
+            }
+
+            @Override
+            public void deleteEntry(String path) {
+                File entry = new File(root, path);
+                if (entry.exists()) {
+                    entry.delete();
+                }
+            }
+
+            @Override
+            public void addChildEntryListener(String path, ChildEntryListener listener) throws Exception {
+                // N/A
+            }
+
+            @Override
+            public void addEntryListener(String path, EntryListener listener) throws Exception {
+                // N/A
+            }
+
+            @Override
+            public void removeEntryListener(String path) throws Exception {
+                // N/A
+            }
+        };
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/828ea38f/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientServiceProvider.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientServiceProvider.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientServiceProvider.java
new file mode 100644
index 0000000..42e79c1
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientServiceProvider.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.config.remote;
+
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+
+public class LocalFileSystemRemoteConfigurationRegistryClientServiceProvider implements RemoteConfigurationRegistryClientServiceProvider {
+
+    @Override
+    public String getType() {
+        return LocalFileSystemRemoteConfigurationRegistryClientService.TYPE;
+    }
+
+    @Override
+    public RemoteConfigurationRegistryClientService newInstance() {
+        return new LocalFileSystemRemoteConfigurationRegistryClientService();
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/828ea38f/gateway-server/src/test/java/org/apache/hadoop/gateway/util/KnoxCLITest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/util/KnoxCLITest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/util/KnoxCLITest.java
index 838f114..2d4586f 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/util/KnoxCLITest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/util/KnoxCLITest.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegis
 import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
 import org.apache.hadoop.gateway.services.security.AliasService;
 import org.apache.hadoop.gateway.services.security.MasterService;
+import org.apache.hadoop.test.TestUtils;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -66,9 +67,11 @@ public class KnoxCLITest {
   @Test
   public void testRemoteConfigurationRegistryClientService() throws Exception {
     outContent.reset();
+
     KnoxCLI cli = new KnoxCLI();
     Configuration config = new GatewayConfigImpl();
-    config.set("gateway.remote.config.registry.test_client", "type=ZooKeeper;address=localhost:2181");
+    // Configure a client for the test local filesystem registry implementation
+    config.set("gateway.remote.config.registry.test_client", "type=LocalFileSystem;address=/test");
     cli.setConf(config);
 
     // This is only to get the gateway services initialized
@@ -84,6 +87,345 @@ public class KnoxCLITest {
   }
 
   @Test
+  public void testListRemoteConfigurationRegistryClients() throws Exception {
+    outContent.reset();
+
+    KnoxCLI cli = new KnoxCLI();
+    String[] args = { "list-registry-clients", "--master","master" };
+
+    Configuration config = new GatewayConfigImpl();
+    cli.setConf(config);
+
+    // Test with no registry clients configured
+    int rc = cli.run(args);
+    assertEquals(0, rc);
+    assertTrue(outContent.toString(), outContent.toString().isEmpty());
+
+    // Test with a single client configured
+    // Configure a client for the test local filesystem registry implementation
+    config.set("gateway.remote.config.registry.test_client", "type=LocalFileSystem;address=/test1");
+    cli.setConf(config);
+    outContent.reset();
+    rc = cli.run(args);
+    assertEquals(0, rc);
+    assertTrue(outContent.toString(), outContent.toString().contains("test_client"));
+
+    // Configure another client for the test local filesystem registry implementation
+    config.set("gateway.remote.config.registry.another_client", "type=LocalFileSystem;address=/test2");
+    cli.setConf(config);
+    outContent.reset();
+    rc = cli.run(args);
+    assertEquals(0, rc);
+    assertTrue(outContent.toString(), outContent.toString().contains("test_client"));
+    assertTrue(outContent.toString(), outContent.toString().contains("another_client"));
+  }
+
+  @Test
+  public void testRemoteConfigurationRegistryGetACLs() throws Exception {
+    outContent.reset();
+
+
+    final File testRoot = TestUtils.createTempDir(this.getClass().getName());
+    try {
+      final File testRegistry = new File(testRoot, "registryRoot");
+
+      final String providerConfigName = "my-provider-config.xml";
+      final String providerConfigContent = "<gateway/>\n";
+      final File testProviderConfig = new File(testRoot, providerConfigName);
+      final String[] uploadArgs = {"upload-provider-config", testProviderConfig.getAbsolutePath(),
+                                   "--registry-client", "test_client",
+                                   "--master", "master"};
+      FileUtils.writeStringToFile(testProviderConfig, providerConfigContent);
+
+
+      final String[] args = {"get-registry-acl", "/knox/config/shared-providers",
+                             "--registry-client", "test_client",
+                             "--master", "master"};
+
+      KnoxCLI cli = new KnoxCLI();
+      Configuration config = new GatewayConfigImpl();
+      // Configure a client for the test local filesystem registry implementation
+      config.set("gateway.remote.config.registry.test_client", "type=LocalFileSystem;address=" + testRegistry);
+      cli.setConf(config);
+
+      int rc = cli.run(uploadArgs);
+      assertEquals(0, rc);
+
+      // Run the test command
+      rc = cli.run(args);
+
+      // Validate the result
+      assertEquals(0, rc);
+      String result = outContent.toString();
+      assertEquals(result, 3, result.split("\n").length);
+    } finally {
+      FileUtils.forceDelete(testRoot);
+    }
+  }
+
+
+  @Test
+  public void testRemoteConfigurationRegistryUploadProviderConfig() throws Exception {
+    outContent.reset();
+
+    final String providerConfigName = "my-provider-config.xml";
+    final String providerConfigContent = "<gateway/>\n";
+
+    final File testRoot = TestUtils.createTempDir(this.getClass().getName());
+    try {
+      final File testRegistry = new File(testRoot, "registryRoot");
+      final File testProviderConfig = new File(testRoot, providerConfigName);
+
+      final String[] args = {"upload-provider-config", testProviderConfig.getAbsolutePath(),
+                             "--registry-client", "test_client",
+                             "--master", "master"};
+
+      FileUtils.writeStringToFile(testProviderConfig, providerConfigContent);
+
+      KnoxCLI cli = new KnoxCLI();
+      Configuration config = new GatewayConfigImpl();
+      // Configure a client for the test local filesystem registry implementation
+      config.set("gateway.remote.config.registry.test_client", "type=LocalFileSystem;address=" + testRegistry);
+      cli.setConf(config);
+
+      // Run the test command
+      int rc = cli.run(args);
+
+      // Validate the result
+      assertEquals(0, rc);
+      File registryFile = new File(testRegistry, "knox/config/shared-providers/" + providerConfigName);
+      assertTrue(registryFile.exists());
+      assertEquals(FileUtils.readFileToString(registryFile), providerConfigContent);
+    } finally {
+      FileUtils.forceDelete(testRoot);
+    }
+  }
+
+
+  @Test
+  public void testRemoteConfigurationRegistryUploadProviderConfigWithDestinationOverride() throws Exception {
+    outContent.reset();
+
+    final String providerConfigName = "my-provider-config.xml";
+    final String entryName = "my-providers.xml";
+    final String providerConfigContent = "<gateway/>\n";
+
+    final File testRoot = TestUtils.createTempDir(this.getClass().getName());
+    try {
+      final File testRegistry = new File(testRoot, "registryRoot");
+      final File testProviderConfig = new File(testRoot, providerConfigName);
+
+      final String[] args = {"upload-provider-config", testProviderConfig.getAbsolutePath(),
+                             "--entry-name", entryName,
+                             "--registry-client", "test_client",
+                             "--master", "master"};
+
+      FileUtils.writeStringToFile(testProviderConfig, providerConfigContent);
+
+      KnoxCLI cli = new KnoxCLI();
+      Configuration config = new GatewayConfigImpl();
+      // Configure a client for the test local filesystem registry implementation
+      config.set("gateway.remote.config.registry.test_client", "type=LocalFileSystem;address=" + testRegistry);
+      cli.setConf(config);
+
+      // Run the test command
+      int rc = cli.run(args);
+
+      // Validate the result
+      assertEquals(0, rc);
+      assertFalse((new File(testRegistry, "knox/config/shared-providers/" + providerConfigName)).exists());
+      File registryFile = new File(testRegistry, "knox/config/shared-providers/" + entryName);
+      assertTrue(registryFile.exists());
+      assertEquals(FileUtils.readFileToString(registryFile), providerConfigContent);
+    } finally {
+      FileUtils.forceDelete(testRoot);
+    }
+  }
+
+
+  @Test
+  public void testRemoteConfigurationRegistryUploadDescriptor() throws Exception {
+    outContent.reset();
+
+    final String descriptorName = "my-topology.json";
+    final String descriptorContent = testDescriptorContentJSON;
+
+    final File testRoot = TestUtils.createTempDir(this.getClass().getName());
+    try {
+      final File testRegistry = new File(testRoot, "registryRoot");
+      final File testDescriptor = new File(testRoot, descriptorName);
+
+      final String[] args = {"upload-descriptor", testDescriptor.getAbsolutePath(),
+                             "--registry-client", "test_client",
+                             "--master", "master"};
+
+      FileUtils.writeStringToFile(testDescriptor, descriptorContent);
+
+      KnoxCLI cli = new KnoxCLI();
+      Configuration config = new GatewayConfigImpl();
+      // Configure a client for the test local filesystem registry implementation
+      config.set("gateway.remote.config.registry.test_client", "type=LocalFileSystem;address=" + testRegistry);
+      cli.setConf(config);
+
+      // Run the test command
+      int rc = cli.run(args);
+
+      // Validate the result
+      assertEquals(0, rc);
+      File registryFile = new File(testRegistry, "knox/config/descriptors/" + descriptorName);
+      assertTrue(registryFile.exists());
+      assertEquals(FileUtils.readFileToString(registryFile), descriptorContent);
+    } finally {
+      FileUtils.forceDelete(testRoot);
+    }
+  }
+
+  @Test
+  public void testRemoteConfigurationRegistryUploadDescriptorWithDestinationOverride() throws Exception {
+    outContent.reset();
+
+    final String descriptorName = "my-topology.json";
+    final String entryName = "different-topology.json";
+    final String descriptorContent = testDescriptorContentJSON;
+
+    final File testRoot = TestUtils.createTempDir(this.getClass().getName());
+    try {
+      final File testRegistry = new File(testRoot, "registryRoot");
+      final File testDescriptor = new File(testRoot, descriptorName);
+
+      final String[] args = {"upload-descriptor", testDescriptor.getAbsolutePath(),
+                             "--entry-name", entryName,
+                             "--registry-client", "test_client",
+                             "--master", "master"};
+
+      FileUtils.writeStringToFile(testDescriptor, descriptorContent);
+
+      KnoxCLI cli = new KnoxCLI();
+      Configuration config = new GatewayConfigImpl();
+      // Configure a client for the test local filesystem registry implementation
+      config.set("gateway.remote.config.registry.test_client", "type=LocalFileSystem;address=" + testRegistry);
+      cli.setConf(config);
+
+      // Run the test command
+      int rc = cli.run(args);
+
+      // Validate the result
+      assertEquals(0, rc);
+      assertFalse((new File(testRegistry, "knox/config/descriptors/" + descriptorName)).exists());
+      File registryFile = new File(testRegistry, "knox/config/descriptors/" + entryName);
+      assertTrue(registryFile.exists());
+      assertEquals(FileUtils.readFileToString(registryFile), descriptorContent);
+    } finally {
+      FileUtils.forceDelete(testRoot);
+    }
+  }
+
+  @Test
+  public void testRemoteConfigurationRegistryDeleteProviderConfig() throws Exception {
+    outContent.reset();
+
+    // Create a provider config
+    final String providerConfigName = "my-provider-config.xml";
+    final String providerConfigContent = "<gateway/>\n";
+
+    final File testRoot = TestUtils.createTempDir(this.getClass().getName());
+    try {
+      final File testRegistry = new File(testRoot, "registryRoot");
+      final File testProviderConfig = new File(testRoot, providerConfigName);
+
+      final String[] createArgs = {"upload-provider-config", testProviderConfig.getAbsolutePath(),
+                                   "--registry-client", "test_client",
+                                   "--master", "master"};
+
+      FileUtils.writeStringToFile(testProviderConfig, providerConfigContent);
+
+      KnoxCLI cli = new KnoxCLI();
+      Configuration config = new GatewayConfigImpl();
+      // Configure a client for the test local filesystem registry implementation
+      config.set("gateway.remote.config.registry.test_client", "type=LocalFileSystem;address=" + testRegistry);
+      cli.setConf(config);
+
+      // Run the test command
+      int rc = cli.run(createArgs);
+
+      // Validate the result
+      assertEquals(0, rc);
+      File registryFile = new File(testRegistry, "knox/config/shared-providers/" + providerConfigName);
+      assertTrue(registryFile.exists());
+
+      outContent.reset();
+
+      // Delete the created provider config
+      final String[] deleteArgs = {"delete-provider-config", providerConfigName,
+                                   "--registry-client", "test_client",
+                                   "--master", "master"};
+      rc = cli.run(deleteArgs);
+      assertEquals(0, rc);
+      assertFalse(registryFile.exists());
+
+      // Try to delete a provider config that does not exist
+      rc = cli.run(new String[]{"delete-provider-config", "imaginary-providers.xml",
+                                "--registry-client", "test_client",
+                                "--master", "master"});
+      assertEquals(0, rc);
+    } finally {
+      FileUtils.forceDelete(testRoot);
+    }
+  }
+
+  @Test
+  public void testRemoteConfigurationRegistryDeleteDescriptor() throws Exception {
+    outContent.reset();
+
+    final String descriptorName = "my-topology.json";
+    final String descriptorContent = testDescriptorContentJSON;
+
+    final File testRoot = TestUtils.createTempDir(this.getClass().getName());
+    try {
+      final File testRegistry = new File(testRoot, "registryRoot");
+      final File testDescriptor = new File(testRoot, descriptorName);
+
+      final String[] createArgs = {"upload-descriptor", testDescriptor.getAbsolutePath(),
+                             "--registry-client", "test_client",
+                             "--master", "master"};
+
+      FileUtils.writeStringToFile(testDescriptor, descriptorContent);
+
+      KnoxCLI cli = new KnoxCLI();
+      Configuration config = new GatewayConfigImpl();
+      // Configure a client for the test local filesystem registry implementation
+      config.set("gateway.remote.config.registry.test_client", "type=LocalFileSystem;address=" + testRegistry);
+      cli.setConf(config);
+
+      // Run the test command
+      int rc = cli.run(createArgs);
+
+      // Validate the result
+      assertEquals(0, rc);
+      File registryFile = new File(testRegistry, "knox/config/descriptors/" + descriptorName);
+      assertTrue(registryFile.exists());
+
+      outContent.reset();
+
+      // Delete the created provider config
+      final String[] deleteArgs = {"delete-descriptor", descriptorName,
+                                   "--registry-client", "test_client",
+                                   "--master", "master"};
+      rc = cli.run(deleteArgs);
+      assertEquals(0, rc);
+      assertFalse(registryFile.exists());
+
+      // Try to delete a descriptor that does not exist
+      rc = cli.run(new String[]{"delete-descriptor", "bogus.json",
+                                "--registry-client", "test_client",
+                                "--master", "master"});
+      assertEquals(0, rc);
+    } finally {
+      FileUtils.forceDelete(testRoot);
+    }
+  }
+
+  @Test
   public void testSuccessfulAliasLifecycle() throws Exception {
     outContent.reset();
     String[] args1 = {"create-alias", "alias1", "--value", "testvalue1", "--master", "master"};
@@ -670,4 +1012,21 @@ public class KnoxCLITest {
 
   }
 
+  private static final String testDescriptorContentJSON = "{\n" +
+                                                          "  \"discovery-address\":\"http://localhost:8080\",\n" +
+                                                          "  \"discovery-user\":\"maria_dev\",\n" +
+                                                          "  \"discovery-pwd-alias\":\"sandbox.discovery.password\",\n" +
+                                                          "  \"provider-config-ref\":\"my-provider-config\",\n" +
+                                                          "  \"cluster\":\"Sandbox\",\n" +
+                                                          "  \"services\":[\n" +
+                                                          "    {\"name\":\"NAMENODE\"},\n" +
+                                                          "    {\"name\":\"JOBTRACKER\"},\n" +
+                                                          "    {\"name\":\"WEBHDFS\"},\n" +
+                                                          "    {\"name\":\"WEBHCAT\"},\n" +
+                                                          "    {\"name\":\"OOZIE\"},\n" +
+                                                          "    {\"name\":\"WEBHBASE\"},\n" +
+                                                          "    {\"name\":\"HIVE\"},\n" +
+                                                          "    {\"name\":\"RESOURCEMANAGER\"}\n" +
+                                                          "  ]\n" +
+                                                          "}";
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/828ea38f/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider b/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider
new file mode 100644
index 0000000..ffd9284
--- /dev/null
+++ b/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.hadoop.gateway.service.config.remote.LocalFileSystemRemoteConfigurationRegistryClientServiceProvider


[49/49] knox git commit: KNOX-998 - Merge from trunk 0.14.0 code

Posted by mo...@apache.org.
KNOX-998 - Merge from trunk 0.14.0 code


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/e766b3b7
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/e766b3b7
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/e766b3b7

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: e766b3b77bf2d3a0a00e4f8bf8ef261a5f8122fb
Parents: 22a7304
Author: Sandeep More <mo...@apache.org>
Authored: Thu Dec 14 16:11:49 2017 -0500
Committer: Sandeep More <mo...@apache.org>
Committed: Thu Dec 14 16:11:49 2017 -0500

----------------------------------------------------------------------
 .../discovery/ambari/AmbariClientCommon.java    | 102 ----
 ...bariClusterConfigurationMonitorProvider.java |  35 --
 .../ambari/AmbariConfigurationMonitor.java      | 525 ----------------
 .../topology/discovery/ambari/RESTInvoker.java  | 136 -----
 .../discovery/ambari/AmbariClientCommon.java    | 102 ++++
 ...bariClusterConfigurationMonitorProvider.java |  36 ++
 .../ambari/AmbariConfigurationMonitor.java      | 525 ++++++++++++++++
 .../topology/discovery/ambari/RESTInvoker.java  | 136 +++++
 ...iscovery.ClusterConfigurationMonitorProvider |  19 -
 ...iscovery.ClusterConfigurationMonitorProvider |  19 +
 .../ambari/AmbariConfigurationMonitorTest.java  | 319 ----------
 .../ambari/AmbariConfigurationMonitorTest.java  | 319 ++++++++++
 ...faultClusterConfigurationMonitorService.java |  81 ---
 .../DefaultConfigurationMonitorProvider.java    |  31 -
 .../DefaultRemoteConfigurationMonitor.java      | 228 -------
 .../RemoteConfigurationMonitorFactory.java      |  74 ---
 .../gateway/services/CLIGatewayServices.java    |   2 +-
 ...faultClusterConfigurationMonitorService.java |  81 +++
 .../DefaultConfigurationMonitorProvider.java    |  31 +
 .../DefaultRemoteConfigurationMonitor.java      | 228 +++++++
 .../RemoteConfigurationMonitorFactory.java      |  74 +++
 .../org/apache/knox/gateway/util/KnoxCLI.java   |  16 +-
 ...y.monitor.RemoteConfigurationMonitorProvider |  19 -
 ...y.monitor.RemoteConfigurationMonitorProvider |  19 +
 ...emoteConfigurationRegistryClientService.java | 263 --------
 ...figurationRegistryClientServiceProvider.java |  32 -
 .../ZooKeeperConfigurationMonitorTest.java      | 355 -----------
 ...emoteConfigurationRegistryClientService.java | 263 ++++++++
 ...figurationRegistryClientServiceProvider.java |  32 +
 .../ZooKeeperConfigurationMonitorTest.java      | 355 +++++++++++
 .../apache/knox/gateway/util/KnoxCLITest.java   |   2 +-
 ...teConfigurationRegistryClientServiceProvider |  19 -
 ...teConfigurationRegistryClientServiceProvider |  19 +
 .../services/ambariui/2.2.1/service.xml         |   0
 .../remote/RemoteConfigurationMessages.java     |  49 --
 ...nfigurationRegistryClientServiceFactory.java |  41 --
 ...figurationRegistryClientServiceProvider.java |  27 -
 .../RemoteConfigurationRegistryConfig.java      |  43 --
 .../DefaultRemoteConfigurationRegistries.java   | 104 ----
 .../config/RemoteConfigurationRegistries.java   |  33 -
 .../RemoteConfigurationRegistriesAccessor.java  |  60 --
 .../RemoteConfigurationRegistriesParser.java    |  48 --
 .../config/RemoteConfigurationRegistry.java     | 139 -----
 .../config/remote/zk/CuratorClientService.java  | 464 --------------
 .../RemoteConfigurationRegistryJAASConfig.java  | 179 ------
 .../remote/zk/ZooKeeperClientService.java       |  25 -
 .../zk/ZooKeeperClientServiceProvider.java      |  34 --
 .../remote/RemoteConfigurationMessages.java     |  49 ++
 ...nfigurationRegistryClientServiceFactory.java |  41 ++
 ...figurationRegistryClientServiceProvider.java |  27 +
 .../RemoteConfigurationRegistryConfig.java      |  43 ++
 .../DefaultRemoteConfigurationRegistries.java   | 104 ++++
 .../config/RemoteConfigurationRegistries.java   |  33 +
 .../RemoteConfigurationRegistriesAccessor.java  |  60 ++
 .../RemoteConfigurationRegistriesParser.java    |  48 ++
 .../config/RemoteConfigurationRegistry.java     | 139 +++++
 .../config/remote/zk/CuratorClientService.java  | 464 ++++++++++++++
 .../RemoteConfigurationRegistryJAASConfig.java  | 179 ++++++
 .../remote/zk/ZooKeeperClientService.java       |  25 +
 .../zk/ZooKeeperClientServiceProvider.java      |  34 ++
 ...teConfigurationRegistryClientServiceProvider |  19 -
 ...teConfigurationRegistryClientServiceProvider |  19 +
 ...efaultRemoteConfigurationRegistriesTest.java | 184 ------
 ...teConfigurationRegistryConfigParserTest.java | 108 ----
 .../util/RemoteRegistryConfigTestUtils.java     | 117 ----
 ...eConfigurationRegistryClientServiceTest.java | 424 -------------
 ...moteConfigurationRegistryJAASConfigTest.java | 255 --------
 ...efaultRemoteConfigurationRegistriesTest.java | 184 ++++++
 ...teConfigurationRegistryConfigParserTest.java | 115 ++++
 .../util/RemoteRegistryConfigTestUtils.java     | 117 ++++
 ...eConfigurationRegistryClientServiceTest.java | 424 +++++++++++++
 ...moteConfigurationRegistryJAASConfigTest.java | 255 ++++++++
 .../RemoteConfigurationRegistryClient.java      |  80 ---
 ...emoteConfigurationRegistryClientService.java |  28 -
 .../ClusterConfigurationMonitorService.java     |  43 --
 .../discovery/ClusterConfigurationMonitor.java  |  48 --
 .../ClusterConfigurationMonitorProvider.java    |  27 -
 .../monitor/RemoteConfigurationMonitor.java     |  24 -
 .../RemoteConfigurationMonitorProvider.java     |  34 --
 .../RemoteConfigurationRegistryClient.java      |  80 +++
 ...emoteConfigurationRegistryClientService.java |  28 +
 .../ClusterConfigurationMonitorService.java     |  43 ++
 .../discovery/ClusterConfigurationMonitor.java  |  48 ++
 .../ClusterConfigurationMonitorProvider.java    |  27 +
 .../monitor/RemoteConfigurationMonitor.java     |  24 +
 .../RemoteConfigurationMonitorProvider.java     |  34 ++
 .../SimpleDescriptorHandlerFuncTest.java        | 275 ---------
 .../monitor/RemoteConfigurationMonitorTest.java | 603 -------------------
 .../SimpleDescriptorHandlerFuncTest.java        | 275 +++++++++
 .../monitor/RemoteConfigurationMonitorTest.java | 603 +++++++++++++++++++
 ...eway.topology.discovery.ServiceDiscoveryType |  19 -
 ...eway.topology.discovery.ServiceDiscoveryType |  19 +
 92 files changed, 5790 insertions(+), 5782 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariClientCommon.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariClientCommon.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariClientCommon.java
deleted file mode 100644
index a2bf4ea..0000000
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariClientCommon.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import net.minidev.json.JSONArray;
-import net.minidev.json.JSONObject;
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
-
-import java.util.HashMap;
-import java.util.Map;
-
-class AmbariClientCommon {
-
-    static final String AMBARI_CLUSTERS_URI = "/api/v1/clusters";
-
-    static final String AMBARI_HOSTROLES_URI =
-                                    AMBARI_CLUSTERS_URI + "/%s/services?fields=components/host_components/HostRoles";
-
-    static final String AMBARI_SERVICECONFIGS_URI =
-                                    AMBARI_CLUSTERS_URI + "/%s/configurations/service_config_versions?is_current=true";
-
-    private static final AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
-
-    private RESTInvoker restClient;
-
-
-    AmbariClientCommon(AliasService aliasService) {
-        this(new RESTInvoker(aliasService));
-    }
-
-
-    AmbariClientCommon(RESTInvoker restInvoker) {
-        this.restClient = restInvoker;
-    }
-
-
-
-    Map<String, Map<String, AmbariCluster.ServiceConfiguration>> getActiveServiceConfigurations(String clusterName,
-                                                                                                ServiceDiscoveryConfig config) {
-        return getActiveServiceConfigurations(config.getAddress(),
-                                              clusterName,
-                                              config.getUser(),
-                                              config.getPasswordAlias());
-    }
-
-
-    Map<String, Map<String, AmbariCluster.ServiceConfiguration>> getActiveServiceConfigurations(String discoveryAddress,
-                                                                                                String clusterName,
-                                                                                                String discoveryUser,
-                                                                                                String discoveryPwdAlias) {
-        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigurations = new HashMap<>();
-
-        String serviceConfigsURL = String.format("%s" + AMBARI_SERVICECONFIGS_URI, discoveryAddress, clusterName);
-
-        JSONObject serviceConfigsJSON = restClient.invoke(serviceConfigsURL, discoveryUser, discoveryPwdAlias);
-        if (serviceConfigsJSON != null) {
-            // Process the service configurations
-            JSONArray serviceConfigs = (JSONArray) serviceConfigsJSON.get("items");
-            for (Object serviceConfig : serviceConfigs) {
-                String serviceName = (String) ((JSONObject) serviceConfig).get("service_name");
-                JSONArray configurations = (JSONArray) ((JSONObject) serviceConfig).get("configurations");
-                for (Object configuration : configurations) {
-                    String configType = (String) ((JSONObject) configuration).get("type");
-                    String configVersion = String.valueOf(((JSONObject) configuration).get("version"));
-
-                    Map<String, String> configProps = new HashMap<>();
-                    JSONObject configProperties = (JSONObject) ((JSONObject) configuration).get("properties");
-                    for (String propertyName : configProperties.keySet()) {
-                        configProps.put(propertyName, String.valueOf(((JSONObject) configProperties).get(propertyName)));
-                    }
-                    if (!serviceConfigurations.containsKey(serviceName)) {
-                        serviceConfigurations.put(serviceName, new HashMap<>());
-                    }
-                    serviceConfigurations.get(serviceName).put(configType,
-                                                               new AmbariCluster.ServiceConfiguration(configType,
-                                                                                                      configVersion,
-                                                                                                      configProps));
-                }
-            }
-        }
-
-        return serviceConfigurations;
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariClusterConfigurationMonitorProvider.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariClusterConfigurationMonitorProvider.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariClusterConfigurationMonitorProvider.java
deleted file mode 100644
index 3b31124..0000000
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariClusterConfigurationMonitorProvider.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitor;
-import org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitorProvider;
-
-public class AmbariClusterConfigurationMonitorProvider implements ClusterConfigurationMonitorProvider {
-
-    @Override
-    public String getType() {
-        return AmbariConfigurationMonitor.getType();
-    }
-
-    @Override
-    public ClusterConfigurationMonitor newInstance(GatewayConfig config, AliasService aliasService) {
-        return new AmbariConfigurationMonitor(config, aliasService);
-    }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariConfigurationMonitor.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariConfigurationMonitor.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariConfigurationMonitor.java
deleted file mode 100644
index e4b5e43..0000000
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariConfigurationMonitor.java
+++ /dev/null
@@ -1,525 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitor;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.concurrent.locks.ReadWriteLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
-
-
-class AmbariConfigurationMonitor implements ClusterConfigurationMonitor {
-
-    private static final String TYPE = "Ambari";
-
-    private static final String CLUSTERS_DATA_DIR_NAME = "clusters";
-
-    private static final String PERSISTED_FILE_COMMENT = "Generated File. Do Not Edit!";
-
-    private static final String PROP_CLUSTER_PREFIX = "cluster.";
-    private static final String PROP_CLUSTER_SOURCE = PROP_CLUSTER_PREFIX + "source";
-    private static final String PROP_CLUSTER_NAME   = PROP_CLUSTER_PREFIX + "name";
-    private static final String PROP_CLUSTER_USER   = PROP_CLUSTER_PREFIX + "user";
-    private static final String PROP_CLUSTER_ALIAS  = PROP_CLUSTER_PREFIX + "pwd.alias";
-
-    static final String INTERVAL_PROPERTY_NAME = "org.apache.hadoop.gateway.topology.discovery.ambari.monitor.interval";
-
-
-    private static final AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
-
-    // Ambari address
-    //    clusterName -> ServiceDiscoveryConfig
-    //
-    Map<String, Map<String, ServiceDiscoveryConfig>> clusterMonitorConfigurations = new HashMap<>();
-
-    // Ambari address
-    //    clusterName
-    //        configType -> version
-    //
-    Map<String, Map<String, Map<String, String>>> ambariClusterConfigVersions = new HashMap<>();
-
-    ReadWriteLock configVersionsLock = new ReentrantReadWriteLock();
-
-    private List<ConfigurationChangeListener> changeListeners = new ArrayList<>();
-
-    private AmbariClientCommon ambariClient;
-
-    PollingConfigAnalyzer internalMonitor;
-
-    GatewayConfig gatewayConfig = null;
-
-    static String getType() {
-        return TYPE;
-    }
-
-    AmbariConfigurationMonitor(GatewayConfig config, AliasService aliasService) {
-        this.gatewayConfig   = config;
-        this.ambariClient    = new AmbariClientCommon(aliasService);
-        this.internalMonitor = new PollingConfigAnalyzer(this);
-
-        // Override the default polling interval if it has been configured
-        int interval = config.getClusterMonitorPollingInterval(getType());
-        if (interval > 0) {
-            setPollingInterval(interval);
-        }
-
-        init();
-    }
-
-    @Override
-    public void setPollingInterval(int interval) {
-        internalMonitor.setInterval(interval);
-    }
-
-    private void init() {
-        loadDiscoveryConfiguration();
-        loadClusterVersionData();
-    }
-
-    /**
-     * Load any previously-persisted service discovery configurations.
-     * This is necessary for checking previously-deployed topologies.
-     */
-    private void loadDiscoveryConfiguration() {
-        File persistenceDir = getPersistenceDir();
-        if (persistenceDir != null) {
-            Collection<File> persistedConfigs = FileUtils.listFiles(persistenceDir, new String[]{"conf"}, false);
-            for (File persisted : persistedConfigs) {
-                Properties props = new Properties();
-                try {
-                    props.load(new FileInputStream(persisted));
-
-                    addDiscoveryConfig(props.getProperty(PROP_CLUSTER_NAME), new ServiceDiscoveryConfig() {
-                                                            public String getAddress() {
-                                                                return props.getProperty(PROP_CLUSTER_SOURCE);
-                                                            }
-
-                                                            public String getUser() {
-                                                                return props.getProperty(PROP_CLUSTER_USER);
-                                                            }
-
-                                                            public String getPasswordAlias() {
-                                                                return props.getProperty(PROP_CLUSTER_ALIAS);
-                                                            }
-                                                        });
-                } catch (IOException e) {
-                    log.failedToLoadClusterMonitorServiceDiscoveryConfig(getType(), e);
-                }
-            }
-        }
-    }
-
-    /**
-     * Load any previously-persisted cluster configuration version records, so the monitor will check
-     * previously-deployed topologies against the current cluster configuration.
-     */
-    private void loadClusterVersionData() {
-        File persistenceDir = getPersistenceDir();
-        if (persistenceDir != null) {
-            Collection<File> persistedConfigs = FileUtils.listFiles(getPersistenceDir(), new String[]{"ver"}, false);
-            for (File persisted : persistedConfigs) {
-                Properties props = new Properties();
-                try {
-                    props.load(new FileInputStream(persisted));
-
-                    String source = props.getProperty(PROP_CLUSTER_SOURCE);
-                    String clusterName = props.getProperty(PROP_CLUSTER_NAME);
-
-                    Map<String, String> configVersions = new HashMap<>();
-                    for (String name : props.stringPropertyNames()) {
-                        if (!name.startsWith(PROP_CLUSTER_PREFIX)) { // Ignore implementation-specific properties
-                            configVersions.put(name, props.getProperty(name));
-                        }
-                    }
-
-                    // Map the config versions to the cluster name
-                    addClusterConfigVersions(source, clusterName, configVersions);
-
-                } catch (IOException e) {
-                    log.failedToLoadClusterMonitorConfigVersions(getType(), e);
-                }
-            }
-        }
-    }
-
-    private void persistDiscoveryConfiguration(String clusterName, ServiceDiscoveryConfig sdc) {
-        File persistenceDir = getPersistenceDir();
-        if (persistenceDir != null) {
-
-            Properties props = new Properties();
-            props.setProperty(PROP_CLUSTER_NAME, clusterName);
-            props.setProperty(PROP_CLUSTER_SOURCE, sdc.getAddress());
-
-            String username = sdc.getUser();
-            if (username != null) {
-                props.setProperty(PROP_CLUSTER_USER, username);
-            }
-            String pwdAlias = sdc.getPasswordAlias();
-            if (pwdAlias != null) {
-                props.setProperty(PROP_CLUSTER_ALIAS, pwdAlias);
-            }
-
-            persist(props, getDiscoveryConfigPersistenceFile(sdc.getAddress(), clusterName));
-        }
-    }
-
-    private void persistClusterVersionData(String address, String clusterName, Map<String, String> configVersions) {
-        File persistenceDir = getPersistenceDir();
-        if (persistenceDir != null) {
-            Properties props = new Properties();
-            props.setProperty(PROP_CLUSTER_NAME, clusterName);
-            props.setProperty(PROP_CLUSTER_SOURCE, address);
-            for (String name : configVersions.keySet()) {
-                props.setProperty(name, configVersions.get(name));
-            }
-
-            persist(props, getConfigVersionsPersistenceFile(address, clusterName));
-        }
-    }
-
-    private void persist(Properties props, File dest) {
-        try {
-            props.store(new FileOutputStream(dest), PERSISTED_FILE_COMMENT);
-        } catch (Exception e) {
-            log.failedToPersistClusterMonitorData(getType(), dest.getAbsolutePath(), e);
-        }
-    }
-
-    private File getPersistenceDir() {
-        File persistenceDir = null;
-
-        File dataDir = new File(gatewayConfig.getGatewayDataDir());
-        if (dataDir.exists()) {
-            File clustersDir = new File(dataDir, CLUSTERS_DATA_DIR_NAME);
-            if (!clustersDir.exists()) {
-                clustersDir.mkdirs();
-            }
-            persistenceDir = clustersDir;
-        }
-
-        return persistenceDir;
-    }
-
-    private File getDiscoveryConfigPersistenceFile(String address, String clusterName) {
-        return getPersistenceFile(address, clusterName, "conf");
-    }
-
-    private File getConfigVersionsPersistenceFile(String address, String clusterName) {
-        return getPersistenceFile(address, clusterName, "ver");
-    }
-
-    private File getPersistenceFile(String address, String clusterName, String ext) {
-        String fileName = address.replace(":", "_").replace("/", "_") + "-" + clusterName + "." + ext;
-        return new File(getPersistenceDir(), fileName);
-    }
-
-    /**
-     * Add cluster configuration details to the monitor's in-memory record.
-     *
-     * @param address        An Ambari instance address.
-     * @param clusterName    The name of a cluster associated with the Ambari instance.
-     * @param configVersions A Map of configuration types and their corresponding versions.
-     */
-    private void addClusterConfigVersions(String address, String clusterName, Map<String, String> configVersions) {
-        configVersionsLock.writeLock().lock();
-        try {
-            ambariClusterConfigVersions.computeIfAbsent(address, k -> new HashMap<>())
-                                       .put(clusterName, configVersions);
-        } finally {
-            configVersionsLock.writeLock().unlock();
-        }
-    }
-
-    public void start() {
-        (new Thread(internalMonitor, "AmbariConfigurationMonitor")).start();
-    }
-
-    public void stop() {
-        internalMonitor.stop();
-    }
-
-    @Override
-    public void addListener(ConfigurationChangeListener listener) {
-        changeListeners.add(listener);
-    }
-
-    /**
-     * Add discovery configuration details for the specified cluster, so the monitor knows how to connect to check for
-     * changes.
-     *
-     * @param clusterName The name of the cluster.
-     * @param config      The associated service discovery configuration.
-     */
-    void addDiscoveryConfig(String clusterName, ServiceDiscoveryConfig config) {
-        clusterMonitorConfigurations.computeIfAbsent(config.getAddress(), k -> new HashMap<>()).put(clusterName, config);
-    }
-
-
-    /**
-     * Get the service discovery configuration associated with the specified Ambari instance and cluster.
-     *
-     * @param address     An Ambari instance address.
-     * @param clusterName The name of a cluster associated with the Ambari instance.
-     *
-     * @return The associated ServiceDiscoveryConfig object.
-     */
-    ServiceDiscoveryConfig getDiscoveryConfig(String address, String clusterName) {
-        ServiceDiscoveryConfig config = null;
-        if (clusterMonitorConfigurations.containsKey(address)) {
-            config = clusterMonitorConfigurations.get(address).get(clusterName);
-        }
-        return config;
-    }
-
-
-    /**
-     * Add cluster configuration data to the monitor, which it will use when determining if configuration has changed.
-     *
-     * @param cluster         An AmbariCluster object.
-     * @param discoveryConfig The discovery configuration associated with the cluster.
-     */
-    void addClusterConfigVersions(AmbariCluster cluster, ServiceDiscoveryConfig discoveryConfig) {
-
-        String clusterName = cluster.getName();
-
-        // Register the cluster discovery configuration for the monitor connections
-        persistDiscoveryConfiguration(clusterName, discoveryConfig);
-        addDiscoveryConfig(clusterName, discoveryConfig);
-
-        // Build the set of configuration versions
-        Map<String, String> configVersions = new HashMap<>();
-        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigs = cluster.getServiceConfigurations();
-        for (String serviceName : serviceConfigs.keySet()) {
-            Map<String, AmbariCluster.ServiceConfiguration> configTypeVersionMap = serviceConfigs.get(serviceName);
-            for (AmbariCluster.ServiceConfiguration config : configTypeVersionMap.values()) {
-                String configType = config.getType();
-                String version = config.getVersion();
-                configVersions.put(configType, version);
-            }
-        }
-
-        persistClusterVersionData(discoveryConfig.getAddress(), clusterName, configVersions);
-        addClusterConfigVersions(discoveryConfig.getAddress(), clusterName, configVersions);
-    }
-
-
-    /**
-     * Remove the configuration record for the specified Ambari instance and cluster name.
-     *
-     * @param address     An Ambari instance address.
-     * @param clusterName The name of a cluster associated with the Ambari instance.
-     *
-     * @return The removed data; A Map of configuration types and their corresponding versions.
-     */
-    Map<String, String> removeClusterConfigVersions(String address, String clusterName) {
-        Map<String, String> result = new HashMap<>();
-
-        configVersionsLock.writeLock().lock();
-        try {
-            if (ambariClusterConfigVersions.containsKey(address)) {
-                result.putAll(ambariClusterConfigVersions.get(address).remove(clusterName));
-            }
-        } finally {
-            configVersionsLock.writeLock().unlock();
-        }
-
-        // Delete the associated persisted record
-        File persisted = getConfigVersionsPersistenceFile(address, clusterName);
-        if (persisted.exists()) {
-            persisted.delete();
-        }
-
-        return result;
-    }
-
-    /**
-     * Get the cluster configuration details for the specified cluster and Ambari instance.
-     *
-     * @param address     An Ambari instance address.
-     * @param clusterName The name of a cluster associated with the Ambari instance.
-     *
-     * @return A Map of configuration types and their corresponding versions.
-     */
-    Map<String, String> getClusterConfigVersions(String address, String clusterName) {
-        Map<String, String> result = new HashMap<>();
-
-        configVersionsLock.readLock().lock();
-        try {
-            if (ambariClusterConfigVersions.containsKey(address)) {
-                result.putAll(ambariClusterConfigVersions.get(address).get(clusterName));
-            }
-        } finally {
-            configVersionsLock.readLock().unlock();
-        }
-
-        return result;
-    }
-
-
-    /**
-     * Get all the clusters the monitor knows about.
-     *
-     * @return A Map of Ambari instance addresses to associated cluster names.
-     */
-    Map<String, List<String>> getClusterNames() {
-        Map<String, List<String>> result = new HashMap<>();
-
-        configVersionsLock.readLock().lock();
-        try {
-            for (String address : ambariClusterConfigVersions.keySet()) {
-                List<String> clusterNames = new ArrayList<>();
-                clusterNames.addAll(ambariClusterConfigVersions.get(address).keySet());
-                result.put(address, clusterNames);
-            }
-        } finally {
-            configVersionsLock.readLock().unlock();
-        }
-
-        return result;
-
-    }
-
-
-    /**
-     * Notify registered change listeners.
-     *
-     * @param source      The address of the Ambari instance from which the cluster details were determined.
-     * @param clusterName The name of the cluster whose configuration details have changed.
-     */
-    void notifyChangeListeners(String source, String clusterName) {
-        for (ConfigurationChangeListener listener : changeListeners) {
-            listener.onConfigurationChange(source, clusterName);
-        }
-    }
-
-
-    /**
-     * Request the current active configuration version info from Ambari.
-     *
-     * @param address     The Ambari instance address.
-     * @param clusterName The name of the cluster for which the details are desired.
-     *
-     * @return A Map of service configuration types and their corresponding versions.
-     */
-    Map<String, String> getUpdatedConfigVersions(String address, String clusterName) {
-        Map<String, String> configVersions = new HashMap<>();
-
-        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigs =
-                    ambariClient.getActiveServiceConfigurations(clusterName, getDiscoveryConfig(address, clusterName));
-
-        for (Map<String, AmbariCluster.ServiceConfiguration> serviceConfig : serviceConfigs.values()) {
-            for (AmbariCluster.ServiceConfiguration config : serviceConfig.values()) {
-                configVersions.put(config.getType(), config.getVersion());
-            }
-        }
-
-        return configVersions;
-    }
-
-
-    /**
-     * The thread that polls Ambari for configuration details for clusters associated with discovered topologies,
-     * compares them with the current recorded values, and notifies any listeners when differences are discovered.
-     */
-    static final class PollingConfigAnalyzer implements Runnable {
-
-        private static final int DEFAULT_POLLING_INTERVAL = 60;
-
-        // Polling interval in seconds
-        private int interval = DEFAULT_POLLING_INTERVAL;
-
-        private AmbariConfigurationMonitor delegate;
-
-        private boolean isActive = false;
-
-        PollingConfigAnalyzer(AmbariConfigurationMonitor delegate) {
-            this.delegate = delegate;
-            this.interval = Integer.getInteger(INTERVAL_PROPERTY_NAME, PollingConfigAnalyzer.DEFAULT_POLLING_INTERVAL);
-        }
-
-        void setInterval(int interval) {
-            this.interval = interval;
-        }
-
-
-        void stop() {
-            isActive = false;
-        }
-
-        @Override
-        public void run() {
-            isActive = true;
-
-            log.startedAmbariConfigMonitor(interval);
-
-            while (isActive) {
-                for (Map.Entry<String, List<String>> entry : delegate.getClusterNames().entrySet()) {
-                    String address = entry.getKey();
-                    for (String clusterName : entry.getValue()) {
-                        Map<String, String> configVersions = delegate.getClusterConfigVersions(address, clusterName);
-                        if (configVersions != null && !configVersions.isEmpty()) {
-                            Map<String, String> updatedVersions = delegate.getUpdatedConfigVersions(address, clusterName);
-                            if (updatedVersions != null && !updatedVersions.isEmpty()) {
-                                boolean configHasChanged = false;
-
-                                // If the config sets don't match in size, then something has changed
-                                if (updatedVersions.size() != configVersions.size()) {
-                                    configHasChanged = true;
-                                } else {
-                                    // Perform the comparison of all the config versions
-                                    for (Map.Entry<String, String> configVersion : configVersions.entrySet()) {
-                                        if (!updatedVersions.get(configVersion.getKey()).equals(configVersion.getValue())) {
-                                            configHasChanged = true;
-                                            break;
-                                        }
-                                    }
-                                }
-
-                                // If a change has occurred, notify the listeners
-                                if (configHasChanged) {
-                                    delegate.notifyChangeListeners(address, clusterName);
-                                }
-                            }
-                        }
-                    }
-                }
-
-                try {
-                    Thread.sleep(interval * 1000);
-                } catch (InterruptedException e) {
-                    // Ignore
-                }
-            }
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/RESTInvoker.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/RESTInvoker.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/RESTInvoker.java
deleted file mode 100644
index 6a6fad8..0000000
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/RESTInvoker.java
+++ /dev/null
@@ -1,136 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import net.minidev.json.JSONObject;
-import net.minidev.json.JSONValue;
-import org.apache.hadoop.gateway.config.ConfigurationException;
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.hadoop.gateway.services.security.AliasServiceException;
-import org.apache.http.HttpEntity;
-import org.apache.http.HttpStatus;
-import org.apache.http.client.methods.CloseableHttpResponse;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.message.BasicHeader;
-import org.apache.http.util.EntityUtils;
-
-import java.io.IOException;
-
-class RESTInvoker {
-
-    private static final String DEFAULT_USER_ALIAS = "ambari.discovery.user";
-    private static final String DEFAULT_PWD_ALIAS  = "ambari.discovery.password";
-
-    private static final AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
-
-    private AliasService aliasService = null;
-
-    private CloseableHttpClient httpClient = org.apache.http.impl.client.HttpClients.createDefault();
-
-
-    RESTInvoker(AliasService aliasService) {
-        this.aliasService = aliasService;
-    }
-
-
-    JSONObject invoke(String url, String username, String passwordAlias) {
-        JSONObject result = null;
-
-        CloseableHttpResponse response = null;
-        try {
-            HttpGet request = new HttpGet(url);
-
-            // If no configured username, then use default username alias
-            String password = null;
-            if (username == null) {
-                if (aliasService != null) {
-                    try {
-                        char[] defaultUser = aliasService.getPasswordFromAliasForGateway(DEFAULT_USER_ALIAS);
-                        if (defaultUser != null) {
-                            username = new String(defaultUser);
-                        }
-                    } catch (AliasServiceException e) {
-                        log.aliasServiceUserError(DEFAULT_USER_ALIAS, e.getLocalizedMessage());
-                    }
-                }
-
-                // If username is still null
-                if (username == null) {
-                    log.aliasServiceUserNotFound();
-                    throw new ConfigurationException("No username is configured for Ambari service discovery.");
-                }
-            }
-
-            if (aliasService != null) {
-                // If no password alias is configured, then try the default alias
-                if (passwordAlias == null) {
-                    passwordAlias = DEFAULT_PWD_ALIAS;
-                }
-
-                try {
-                    char[] pwd = aliasService.getPasswordFromAliasForGateway(passwordAlias);
-                    if (pwd != null) {
-                        password = new String(pwd);
-                    }
-
-                } catch (AliasServiceException e) {
-                    log.aliasServicePasswordError(passwordAlias, e.getLocalizedMessage());
-                }
-            }
-
-            // If the password could not be determined
-            if (password == null) {
-                log.aliasServicePasswordNotFound();
-                throw new ConfigurationException("No password is configured for Ambari service discovery.");
-            }
-
-            // Add an auth header if credentials are available
-            String encodedCreds =
-                    org.apache.commons.codec.binary.Base64.encodeBase64String((username + ":" + password).getBytes());
-            request.addHeader(new BasicHeader("Authorization", "Basic " + encodedCreds));
-
-            response = httpClient.execute(request);
-
-            if (HttpStatus.SC_OK == response.getStatusLine().getStatusCode()) {
-                HttpEntity entity = response.getEntity();
-                if (entity != null) {
-                    result = (JSONObject) JSONValue.parse((EntityUtils.toString(entity)));
-                    log.debugJSON(result.toJSONString());
-                } else {
-                    log.noJSON(url);
-                }
-            } else {
-                log.unexpectedRestResponseStatusCode(url, response.getStatusLine().getStatusCode());
-            }
-
-        } catch (IOException e) {
-            log.restInvocationError(url, e);
-        } finally {
-            if(response != null) {
-                try {
-                    response.close();
-                } catch (IOException e) {
-                    // Ignore
-                }
-            }
-        }
-        return result;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariClientCommon.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariClientCommon.java b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariClientCommon.java
new file mode 100644
index 0000000..9e5dcb3
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariClientCommon.java
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+import net.minidev.json.JSONArray;
+import net.minidev.json.JSONObject;
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryConfig;
+
+import java.util.HashMap;
+import java.util.Map;
+
+class AmbariClientCommon {
+
+    static final String AMBARI_CLUSTERS_URI = "/api/v1/clusters";
+
+    static final String AMBARI_HOSTROLES_URI =
+                                    AMBARI_CLUSTERS_URI + "/%s/services?fields=components/host_components/HostRoles";
+
+    static final String AMBARI_SERVICECONFIGS_URI =
+                                    AMBARI_CLUSTERS_URI + "/%s/configurations/service_config_versions?is_current=true";
+
+    private static final AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+    private RESTInvoker restClient;
+
+
+    AmbariClientCommon(AliasService aliasService) {
+        this(new RESTInvoker(aliasService));
+    }
+
+
+    AmbariClientCommon(RESTInvoker restInvoker) {
+        this.restClient = restInvoker;
+    }
+
+
+
+    Map<String, Map<String, AmbariCluster.ServiceConfiguration>> getActiveServiceConfigurations(String clusterName,
+                                                                                                ServiceDiscoveryConfig config) {
+        return getActiveServiceConfigurations(config.getAddress(),
+                                              clusterName,
+                                              config.getUser(),
+                                              config.getPasswordAlias());
+    }
+
+
+    Map<String, Map<String, AmbariCluster.ServiceConfiguration>> getActiveServiceConfigurations(String discoveryAddress,
+                                                                                                String clusterName,
+                                                                                                String discoveryUser,
+                                                                                                String discoveryPwdAlias) {
+        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigurations = new HashMap<>();
+
+        String serviceConfigsURL = String.format("%s" + AMBARI_SERVICECONFIGS_URI, discoveryAddress, clusterName);
+
+        JSONObject serviceConfigsJSON = restClient.invoke(serviceConfigsURL, discoveryUser, discoveryPwdAlias);
+        if (serviceConfigsJSON != null) {
+            // Process the service configurations
+            JSONArray serviceConfigs = (JSONArray) serviceConfigsJSON.get("items");
+            for (Object serviceConfig : serviceConfigs) {
+                String serviceName = (String) ((JSONObject) serviceConfig).get("service_name");
+                JSONArray configurations = (JSONArray) ((JSONObject) serviceConfig).get("configurations");
+                for (Object configuration : configurations) {
+                    String configType = (String) ((JSONObject) configuration).get("type");
+                    String configVersion = String.valueOf(((JSONObject) configuration).get("version"));
+
+                    Map<String, String> configProps = new HashMap<>();
+                    JSONObject configProperties = (JSONObject) ((JSONObject) configuration).get("properties");
+                    for (String propertyName : configProperties.keySet()) {
+                        configProps.put(propertyName, String.valueOf(((JSONObject) configProperties).get(propertyName)));
+                    }
+                    if (!serviceConfigurations.containsKey(serviceName)) {
+                        serviceConfigurations.put(serviceName, new HashMap<>());
+                    }
+                    serviceConfigurations.get(serviceName).put(configType,
+                                                               new AmbariCluster.ServiceConfiguration(configType,
+                                                                                                      configVersion,
+                                                                                                      configProps));
+                }
+            }
+        }
+
+        return serviceConfigurations;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariClusterConfigurationMonitorProvider.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariClusterConfigurationMonitorProvider.java b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariClusterConfigurationMonitorProvider.java
new file mode 100644
index 0000000..95b0280
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariClusterConfigurationMonitorProvider.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.gateway.topology.discovery.ClusterConfigurationMonitor;
+import org.apache.knox.gateway.topology.discovery.ClusterConfigurationMonitorProvider;
+
+public class AmbariClusterConfigurationMonitorProvider implements
+    ClusterConfigurationMonitorProvider {
+
+    @Override
+    public String getType() {
+        return AmbariConfigurationMonitor.getType();
+    }
+
+    @Override
+    public ClusterConfigurationMonitor newInstance(GatewayConfig config, AliasService aliasService) {
+        return new AmbariConfigurationMonitor(config, aliasService);
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariConfigurationMonitor.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariConfigurationMonitor.java b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariConfigurationMonitor.java
new file mode 100644
index 0000000..c3aa27a
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariConfigurationMonitor.java
@@ -0,0 +1,525 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.gateway.topology.discovery.ClusterConfigurationMonitor;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryConfig;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+
+class AmbariConfigurationMonitor implements ClusterConfigurationMonitor {
+
+    private static final String TYPE = "Ambari";
+
+    private static final String CLUSTERS_DATA_DIR_NAME = "clusters";
+
+    private static final String PERSISTED_FILE_COMMENT = "Generated File. Do Not Edit!";
+
+    private static final String PROP_CLUSTER_PREFIX = "cluster.";
+    private static final String PROP_CLUSTER_SOURCE = PROP_CLUSTER_PREFIX + "source";
+    private static final String PROP_CLUSTER_NAME   = PROP_CLUSTER_PREFIX + "name";
+    private static final String PROP_CLUSTER_USER   = PROP_CLUSTER_PREFIX + "user";
+    private static final String PROP_CLUSTER_ALIAS  = PROP_CLUSTER_PREFIX + "pwd.alias";
+
+    static final String INTERVAL_PROPERTY_NAME = "org.apache.knox.gateway.topology.discovery.ambari.monitor.interval";
+
+
+    private static final AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+    // Ambari address
+    //    clusterName -> ServiceDiscoveryConfig
+    //
+    Map<String, Map<String, ServiceDiscoveryConfig>> clusterMonitorConfigurations = new HashMap<>();
+
+    // Ambari address
+    //    clusterName
+    //        configType -> version
+    //
+    Map<String, Map<String, Map<String, String>>> ambariClusterConfigVersions = new HashMap<>();
+
+    ReadWriteLock configVersionsLock = new ReentrantReadWriteLock();
+
+    private List<ConfigurationChangeListener> changeListeners = new ArrayList<>();
+
+    private AmbariClientCommon ambariClient;
+
+    PollingConfigAnalyzer internalMonitor;
+
+    GatewayConfig gatewayConfig = null;
+
+    static String getType() {
+        return TYPE;
+    }
+
+    AmbariConfigurationMonitor(GatewayConfig config, AliasService aliasService) {
+        this.gatewayConfig   = config;
+        this.ambariClient    = new AmbariClientCommon(aliasService);
+        this.internalMonitor = new PollingConfigAnalyzer(this);
+
+        // Override the default polling interval if it has been configured
+        int interval = config.getClusterMonitorPollingInterval(getType());
+        if (interval > 0) {
+            setPollingInterval(interval);
+        }
+
+        init();
+    }
+
+    @Override
+    public void setPollingInterval(int interval) {
+        internalMonitor.setInterval(interval);
+    }
+
+    private void init() {
+        loadDiscoveryConfiguration();
+        loadClusterVersionData();
+    }
+
+    /**
+     * Load any previously-persisted service discovery configurations.
+     * This is necessary for checking previously-deployed topologies.
+     */
+    private void loadDiscoveryConfiguration() {
+        File persistenceDir = getPersistenceDir();
+        if (persistenceDir != null) {
+            Collection<File> persistedConfigs = FileUtils.listFiles(persistenceDir, new String[]{"conf"}, false);
+            for (File persisted : persistedConfigs) {
+                Properties props = new Properties();
+                try {
+                    props.load(new FileInputStream(persisted));
+
+                    addDiscoveryConfig(props.getProperty(PROP_CLUSTER_NAME), new ServiceDiscoveryConfig() {
+                                                            public String getAddress() {
+                                                                return props.getProperty(PROP_CLUSTER_SOURCE);
+                                                            }
+
+                                                            public String getUser() {
+                                                                return props.getProperty(PROP_CLUSTER_USER);
+                                                            }
+
+                                                            public String getPasswordAlias() {
+                                                                return props.getProperty(PROP_CLUSTER_ALIAS);
+                                                            }
+                                                        });
+                } catch (IOException e) {
+                    log.failedToLoadClusterMonitorServiceDiscoveryConfig(getType(), e);
+                }
+            }
+        }
+    }
+
+    /**
+     * Load any previously-persisted cluster configuration version records, so the monitor will check
+     * previously-deployed topologies against the current cluster configuration.
+     */
+    private void loadClusterVersionData() {
+        File persistenceDir = getPersistenceDir();
+        if (persistenceDir != null) {
+            Collection<File> persistedConfigs = FileUtils.listFiles(getPersistenceDir(), new String[]{"ver"}, false);
+            for (File persisted : persistedConfigs) {
+                Properties props = new Properties();
+                try {
+                    props.load(new FileInputStream(persisted));
+
+                    String source = props.getProperty(PROP_CLUSTER_SOURCE);
+                    String clusterName = props.getProperty(PROP_CLUSTER_NAME);
+
+                    Map<String, String> configVersions = new HashMap<>();
+                    for (String name : props.stringPropertyNames()) {
+                        if (!name.startsWith(PROP_CLUSTER_PREFIX)) { // Ignore implementation-specific properties
+                            configVersions.put(name, props.getProperty(name));
+                        }
+                    }
+
+                    // Map the config versions to the cluster name
+                    addClusterConfigVersions(source, clusterName, configVersions);
+
+                } catch (IOException e) {
+                    log.failedToLoadClusterMonitorConfigVersions(getType(), e);
+                }
+            }
+        }
+    }
+
+    private void persistDiscoveryConfiguration(String clusterName, ServiceDiscoveryConfig sdc) {
+        File persistenceDir = getPersistenceDir();
+        if (persistenceDir != null) {
+
+            Properties props = new Properties();
+            props.setProperty(PROP_CLUSTER_NAME, clusterName);
+            props.setProperty(PROP_CLUSTER_SOURCE, sdc.getAddress());
+
+            String username = sdc.getUser();
+            if (username != null) {
+                props.setProperty(PROP_CLUSTER_USER, username);
+            }
+            String pwdAlias = sdc.getPasswordAlias();
+            if (pwdAlias != null) {
+                props.setProperty(PROP_CLUSTER_ALIAS, pwdAlias);
+            }
+
+            persist(props, getDiscoveryConfigPersistenceFile(sdc.getAddress(), clusterName));
+        }
+    }
+
+    private void persistClusterVersionData(String address, String clusterName, Map<String, String> configVersions) {
+        File persistenceDir = getPersistenceDir();
+        if (persistenceDir != null) {
+            Properties props = new Properties();
+            props.setProperty(PROP_CLUSTER_NAME, clusterName);
+            props.setProperty(PROP_CLUSTER_SOURCE, address);
+            for (String name : configVersions.keySet()) {
+                props.setProperty(name, configVersions.get(name));
+            }
+
+            persist(props, getConfigVersionsPersistenceFile(address, clusterName));
+        }
+    }
+
+    private void persist(Properties props, File dest) {
+        try {
+            props.store(new FileOutputStream(dest), PERSISTED_FILE_COMMENT);
+        } catch (Exception e) {
+            log.failedToPersistClusterMonitorData(getType(), dest.getAbsolutePath(), e);
+        }
+    }
+
+    private File getPersistenceDir() {
+        File persistenceDir = null;
+
+        File dataDir = new File(gatewayConfig.getGatewayDataDir());
+        if (dataDir.exists()) {
+            File clustersDir = new File(dataDir, CLUSTERS_DATA_DIR_NAME);
+            if (!clustersDir.exists()) {
+                clustersDir.mkdirs();
+            }
+            persistenceDir = clustersDir;
+        }
+
+        return persistenceDir;
+    }
+
+    private File getDiscoveryConfigPersistenceFile(String address, String clusterName) {
+        return getPersistenceFile(address, clusterName, "conf");
+    }
+
+    private File getConfigVersionsPersistenceFile(String address, String clusterName) {
+        return getPersistenceFile(address, clusterName, "ver");
+    }
+
+    private File getPersistenceFile(String address, String clusterName, String ext) {
+        String fileName = address.replace(":", "_").replace("/", "_") + "-" + clusterName + "." + ext;
+        return new File(getPersistenceDir(), fileName);
+    }
+
+    /**
+     * Add cluster configuration details to the monitor's in-memory record.
+     *
+     * @param address        An Ambari instance address.
+     * @param clusterName    The name of a cluster associated with the Ambari instance.
+     * @param configVersions A Map of configuration types and their corresponding versions.
+     */
+    private void addClusterConfigVersions(String address, String clusterName, Map<String, String> configVersions) {
+        configVersionsLock.writeLock().lock();
+        try {
+            ambariClusterConfigVersions.computeIfAbsent(address, k -> new HashMap<>())
+                                       .put(clusterName, configVersions);
+        } finally {
+            configVersionsLock.writeLock().unlock();
+        }
+    }
+
+    public void start() {
+        (new Thread(internalMonitor, "AmbariConfigurationMonitor")).start();
+    }
+
+    public void stop() {
+        internalMonitor.stop();
+    }
+
+    @Override
+    public void addListener(ConfigurationChangeListener listener) {
+        changeListeners.add(listener);
+    }
+
+    /**
+     * Add discovery configuration details for the specified cluster, so the monitor knows how to connect to check for
+     * changes.
+     *
+     * @param clusterName The name of the cluster.
+     * @param config      The associated service discovery configuration.
+     */
+    void addDiscoveryConfig(String clusterName, ServiceDiscoveryConfig config) {
+        clusterMonitorConfigurations.computeIfAbsent(config.getAddress(), k -> new HashMap<>()).put(clusterName, config);
+    }
+
+
+    /**
+     * Get the service discovery configuration associated with the specified Ambari instance and cluster.
+     *
+     * @param address     An Ambari instance address.
+     * @param clusterName The name of a cluster associated with the Ambari instance.
+     *
+     * @return The associated ServiceDiscoveryConfig object.
+     */
+    ServiceDiscoveryConfig getDiscoveryConfig(String address, String clusterName) {
+        ServiceDiscoveryConfig config = null;
+        if (clusterMonitorConfigurations.containsKey(address)) {
+            config = clusterMonitorConfigurations.get(address).get(clusterName);
+        }
+        return config;
+    }
+
+
+    /**
+     * Add cluster configuration data to the monitor, which it will use when determining if configuration has changed.
+     *
+     * @param cluster         An AmbariCluster object.
+     * @param discoveryConfig The discovery configuration associated with the cluster.
+     */
+    void addClusterConfigVersions(AmbariCluster cluster, ServiceDiscoveryConfig discoveryConfig) {
+
+        String clusterName = cluster.getName();
+
+        // Register the cluster discovery configuration for the monitor connections
+        persistDiscoveryConfiguration(clusterName, discoveryConfig);
+        addDiscoveryConfig(clusterName, discoveryConfig);
+
+        // Build the set of configuration versions
+        Map<String, String> configVersions = new HashMap<>();
+        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigs = cluster.getServiceConfigurations();
+        for (String serviceName : serviceConfigs.keySet()) {
+            Map<String, AmbariCluster.ServiceConfiguration> configTypeVersionMap = serviceConfigs.get(serviceName);
+            for (AmbariCluster.ServiceConfiguration config : configTypeVersionMap.values()) {
+                String configType = config.getType();
+                String version = config.getVersion();
+                configVersions.put(configType, version);
+            }
+        }
+
+        persistClusterVersionData(discoveryConfig.getAddress(), clusterName, configVersions);
+        addClusterConfigVersions(discoveryConfig.getAddress(), clusterName, configVersions);
+    }
+
+
+    /**
+     * Remove the configuration record for the specified Ambari instance and cluster name.
+     *
+     * @param address     An Ambari instance address.
+     * @param clusterName The name of a cluster associated with the Ambari instance.
+     *
+     * @return The removed data; A Map of configuration types and their corresponding versions.
+     */
+    Map<String, String> removeClusterConfigVersions(String address, String clusterName) {
+        Map<String, String> result = new HashMap<>();
+
+        configVersionsLock.writeLock().lock();
+        try {
+            if (ambariClusterConfigVersions.containsKey(address)) {
+                result.putAll(ambariClusterConfigVersions.get(address).remove(clusterName));
+            }
+        } finally {
+            configVersionsLock.writeLock().unlock();
+        }
+
+        // Delete the associated persisted record
+        File persisted = getConfigVersionsPersistenceFile(address, clusterName);
+        if (persisted.exists()) {
+            persisted.delete();
+        }
+
+        return result;
+    }
+
+    /**
+     * Get the cluster configuration details for the specified cluster and Ambari instance.
+     *
+     * @param address     An Ambari instance address.
+     * @param clusterName The name of a cluster associated with the Ambari instance.
+     *
+     * @return A Map of configuration types and their corresponding versions.
+     */
+    Map<String, String> getClusterConfigVersions(String address, String clusterName) {
+        Map<String, String> result = new HashMap<>();
+
+        configVersionsLock.readLock().lock();
+        try {
+            if (ambariClusterConfigVersions.containsKey(address)) {
+                result.putAll(ambariClusterConfigVersions.get(address).get(clusterName));
+            }
+        } finally {
+            configVersionsLock.readLock().unlock();
+        }
+
+        return result;
+    }
+
+
+    /**
+     * Get all the clusters the monitor knows about.
+     *
+     * @return A Map of Ambari instance addresses to associated cluster names.
+     */
+    Map<String, List<String>> getClusterNames() {
+        Map<String, List<String>> result = new HashMap<>();
+
+        configVersionsLock.readLock().lock();
+        try {
+            for (String address : ambariClusterConfigVersions.keySet()) {
+                List<String> clusterNames = new ArrayList<>();
+                clusterNames.addAll(ambariClusterConfigVersions.get(address).keySet());
+                result.put(address, clusterNames);
+            }
+        } finally {
+            configVersionsLock.readLock().unlock();
+        }
+
+        return result;
+
+    }
+
+
+    /**
+     * Notify registered change listeners.
+     *
+     * @param source      The address of the Ambari instance from which the cluster details were determined.
+     * @param clusterName The name of the cluster whose configuration details have changed.
+     */
+    void notifyChangeListeners(String source, String clusterName) {
+        for (ConfigurationChangeListener listener : changeListeners) {
+            listener.onConfigurationChange(source, clusterName);
+        }
+    }
+
+
+    /**
+     * Request the current active configuration version info from Ambari.
+     *
+     * @param address     The Ambari instance address.
+     * @param clusterName The name of the cluster for which the details are desired.
+     *
+     * @return A Map of service configuration types and their corresponding versions.
+     */
+    Map<String, String> getUpdatedConfigVersions(String address, String clusterName) {
+        Map<String, String> configVersions = new HashMap<>();
+
+        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigs =
+                    ambariClient.getActiveServiceConfigurations(clusterName, getDiscoveryConfig(address, clusterName));
+
+        for (Map<String, AmbariCluster.ServiceConfiguration> serviceConfig : serviceConfigs.values()) {
+            for (AmbariCluster.ServiceConfiguration config : serviceConfig.values()) {
+                configVersions.put(config.getType(), config.getVersion());
+            }
+        }
+
+        return configVersions;
+    }
+
+
+    /**
+     * The thread that polls Ambari for configuration details for clusters associated with discovered topologies,
+     * compares them with the current recorded values, and notifies any listeners when differences are discovered.
+     */
+    static final class PollingConfigAnalyzer implements Runnable {
+
+        private static final int DEFAULT_POLLING_INTERVAL = 60;
+
+        // Polling interval in seconds
+        private int interval = DEFAULT_POLLING_INTERVAL;
+
+        private AmbariConfigurationMonitor delegate;
+
+        private boolean isActive = false;
+
+        PollingConfigAnalyzer(AmbariConfigurationMonitor delegate) {
+            this.delegate = delegate;
+            this.interval = Integer.getInteger(INTERVAL_PROPERTY_NAME, PollingConfigAnalyzer.DEFAULT_POLLING_INTERVAL);
+        }
+
+        void setInterval(int interval) {
+            this.interval = interval;
+        }
+
+
+        void stop() {
+            isActive = false;
+        }
+
+        @Override
+        public void run() {
+            isActive = true;
+
+            log.startedAmbariConfigMonitor(interval);
+
+            while (isActive) {
+                for (Map.Entry<String, List<String>> entry : delegate.getClusterNames().entrySet()) {
+                    String address = entry.getKey();
+                    for (String clusterName : entry.getValue()) {
+                        Map<String, String> configVersions = delegate.getClusterConfigVersions(address, clusterName);
+                        if (configVersions != null && !configVersions.isEmpty()) {
+                            Map<String, String> updatedVersions = delegate.getUpdatedConfigVersions(address, clusterName);
+                            if (updatedVersions != null && !updatedVersions.isEmpty()) {
+                                boolean configHasChanged = false;
+
+                                // If the config sets don't match in size, then something has changed
+                                if (updatedVersions.size() != configVersions.size()) {
+                                    configHasChanged = true;
+                                } else {
+                                    // Perform the comparison of all the config versions
+                                    for (Map.Entry<String, String> configVersion : configVersions.entrySet()) {
+                                        if (!updatedVersions.get(configVersion.getKey()).equals(configVersion.getValue())) {
+                                            configHasChanged = true;
+                                            break;
+                                        }
+                                    }
+                                }
+
+                                // If a change has occurred, notify the listeners
+                                if (configHasChanged) {
+                                    delegate.notifyChangeListeners(address, clusterName);
+                                }
+                            }
+                        }
+                    }
+                }
+
+                try {
+                    Thread.sleep(interval * 1000);
+                } catch (InterruptedException e) {
+                    // Ignore
+                }
+            }
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/RESTInvoker.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/RESTInvoker.java b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/RESTInvoker.java
new file mode 100644
index 0000000..8830115
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/RESTInvoker.java
@@ -0,0 +1,136 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+import net.minidev.json.JSONObject;
+import net.minidev.json.JSONValue;
+import org.apache.knox.gateway.config.ConfigurationException;
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.gateway.services.security.AliasServiceException;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpStatus;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.message.BasicHeader;
+import org.apache.http.util.EntityUtils;
+
+import java.io.IOException;
+
+class RESTInvoker {
+
+    private static final String DEFAULT_USER_ALIAS = "ambari.discovery.user";
+    private static final String DEFAULT_PWD_ALIAS  = "ambari.discovery.password";
+
+    private static final AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+    private AliasService aliasService = null;
+
+    private CloseableHttpClient httpClient = org.apache.http.impl.client.HttpClients.createDefault();
+
+
+    RESTInvoker(AliasService aliasService) {
+        this.aliasService = aliasService;
+    }
+
+
+    JSONObject invoke(String url, String username, String passwordAlias) {
+        JSONObject result = null;
+
+        CloseableHttpResponse response = null;
+        try {
+            HttpGet request = new HttpGet(url);
+
+            // If no configured username, then use default username alias
+            String password = null;
+            if (username == null) {
+                if (aliasService != null) {
+                    try {
+                        char[] defaultUser = aliasService.getPasswordFromAliasForGateway(DEFAULT_USER_ALIAS);
+                        if (defaultUser != null) {
+                            username = new String(defaultUser);
+                        }
+                    } catch (AliasServiceException e) {
+                        log.aliasServiceUserError(DEFAULT_USER_ALIAS, e.getLocalizedMessage());
+                    }
+                }
+
+                // If username is still null
+                if (username == null) {
+                    log.aliasServiceUserNotFound();
+                    throw new ConfigurationException("No username is configured for Ambari service discovery.");
+                }
+            }
+
+            if (aliasService != null) {
+                // If no password alias is configured, then try the default alias
+                if (passwordAlias == null) {
+                    passwordAlias = DEFAULT_PWD_ALIAS;
+                }
+
+                try {
+                    char[] pwd = aliasService.getPasswordFromAliasForGateway(passwordAlias);
+                    if (pwd != null) {
+                        password = new String(pwd);
+                    }
+
+                } catch (AliasServiceException e) {
+                    log.aliasServicePasswordError(passwordAlias, e.getLocalizedMessage());
+                }
+            }
+
+            // If the password could not be determined
+            if (password == null) {
+                log.aliasServicePasswordNotFound();
+                throw new ConfigurationException("No password is configured for Ambari service discovery.");
+            }
+
+            // Add an auth header if credentials are available
+            String encodedCreds =
+                    org.apache.commons.codec.binary.Base64.encodeBase64String((username + ":" + password).getBytes());
+            request.addHeader(new BasicHeader("Authorization", "Basic " + encodedCreds));
+
+            response = httpClient.execute(request);
+
+            if (HttpStatus.SC_OK == response.getStatusLine().getStatusCode()) {
+                HttpEntity entity = response.getEntity();
+                if (entity != null) {
+                    result = (JSONObject) JSONValue.parse((EntityUtils.toString(entity)));
+                    log.debugJSON(result.toJSONString());
+                } else {
+                    log.noJSON(url);
+                }
+            } else {
+                log.unexpectedRestResponseStatusCode(url, response.getStatusLine().getStatusCode());
+            }
+
+        } catch (IOException e) {
+            log.restInvocationError(url, e);
+        } finally {
+            if(response != null) {
+                try {
+                    response.close();
+                } catch (IOException e) {
+                    // Ignore
+                }
+            }
+        }
+        return result;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitorProvider
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitorProvider b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitorProvider
deleted file mode 100644
index d9b2b05..0000000
--- a/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitorProvider
+++ /dev/null
@@ -1,19 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.hadoop.gateway.topology.discovery.ambari.AmbariClusterConfigurationMonitorProvider
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.knox.gateway.topology.discovery.ClusterConfigurationMonitorProvider
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.knox.gateway.topology.discovery.ClusterConfigurationMonitorProvider b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.knox.gateway.topology.discovery.ClusterConfigurationMonitorProvider
new file mode 100644
index 0000000..280485f
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.knox.gateway.topology.discovery.ClusterConfigurationMonitorProvider
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.knox.gateway.topology.discovery.ambari.AmbariClusterConfigurationMonitorProvider
\ No newline at end of file


[35/49] knox git commit: Merge branch 'master' into KNOX-998-Package_Restructuring

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-server/src/test/java/org/apache/knox/gateway/util/KnoxCLITest.java
----------------------------------------------------------------------
diff --cc gateway-server/src/test/java/org/apache/knox/gateway/util/KnoxCLITest.java
index 4c4d419,0000000..902327c
mode 100644,000000..100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/util/KnoxCLITest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/util/KnoxCLITest.java
@@@ -1,649 -1,0 +1,1032 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.util;
 +
 +import com.mycila.xmltool.XMLDoc;
 +import com.mycila.xmltool.XMLTag;
 +import org.apache.commons.io.FileUtils;
++import org.apache.knox.conf.Configuration;
 +import org.apache.knox.gateway.config.impl.GatewayConfigImpl;
 +import org.apache.knox.gateway.services.GatewayServices;
++import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClient;
++import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClientService;
 +import org.apache.knox.gateway.services.security.AliasService;
 +import org.apache.knox.gateway.services.security.MasterService;
++import org.apache.knox.test.TestUtils;
 +import org.junit.Before;
 +import org.junit.Test;
 +
 +import java.io.ByteArrayOutputStream;
 +import java.io.File;
 +import java.io.FileOutputStream;
 +import java.io.IOException;
 +import java.io.PrintStream;
 +import java.net.URL;
 +import java.util.UUID;
 +
 +import static org.hamcrest.CoreMatchers.containsString;
 +import static org.hamcrest.CoreMatchers.is;
 +import static org.hamcrest.CoreMatchers.not;
 +import static org.hamcrest.CoreMatchers.notNullValue;
 +import static org.junit.Assert.assertEquals;
 +import static org.junit.Assert.assertFalse;
 +import static org.junit.Assert.assertNotNull;
++import static org.junit.Assert.assertNull;
 +import static org.junit.Assert.assertThat;
 +import static org.junit.Assert.assertTrue;
 +
 +/**
 + * @author larry
 + *
 + */
 +public class KnoxCLITest {
 +  private final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
 +  private final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
 +
 +  @Before
 +  public void setup() throws Exception {
 +    System.setOut(new PrintStream(outContent));
 +    System.setErr(new PrintStream(errContent));
 +  }
 +
 +  @Test
-   public void testSuccessfulAlaisLifecycle() throws Exception {
++  public void testRemoteConfigurationRegistryClientService() throws Exception {
++    outContent.reset();
++
++    KnoxCLI cli = new KnoxCLI();
++    Configuration config = new GatewayConfigImpl();
++    // Configure a client for the test local filesystem registry implementation
++    config.set("gateway.remote.config.registry.test_client", "type=LocalFileSystem;address=/test");
++    cli.setConf(config);
++
++    // This is only to get the gateway services initialized
++    cli.run(new String[]{"version"});
++
++    RemoteConfigurationRegistryClientService service =
++                                   cli.getGatewayServices().getService(GatewayServices.REMOTE_REGISTRY_CLIENT_SERVICE);
++    assertNotNull(service);
++    RemoteConfigurationRegistryClient client = service.get("test_client");
++    assertNotNull(client);
++
++    assertNull(service.get("bogus"));
++  }
++
++  @Test
++  public void testListRemoteConfigurationRegistryClients() throws Exception {
++    outContent.reset();
++
++    KnoxCLI cli = new KnoxCLI();
++    String[] args = { "list-registry-clients", "--master","master" };
++
++    Configuration config = new GatewayConfigImpl();
++    cli.setConf(config);
++
++    // Test with no registry clients configured
++    int rc = cli.run(args);
++    assertEquals(0, rc);
++    assertTrue(outContent.toString(), outContent.toString().isEmpty());
++
++    // Test with a single client configured
++    // Configure a client for the test local filesystem registry implementation
++    config.set("gateway.remote.config.registry.test_client", "type=LocalFileSystem;address=/test1");
++    cli.setConf(config);
++    outContent.reset();
++    rc = cli.run(args);
++    assertEquals(0, rc);
++    assertTrue(outContent.toString(), outContent.toString().contains("test_client"));
++
++    // Configure another client for the test local filesystem registry implementation
++    config.set("gateway.remote.config.registry.another_client", "type=LocalFileSystem;address=/test2");
++    cli.setConf(config);
++    outContent.reset();
++    rc = cli.run(args);
++    assertEquals(0, rc);
++    assertTrue(outContent.toString(), outContent.toString().contains("test_client"));
++    assertTrue(outContent.toString(), outContent.toString().contains("another_client"));
++  }
++
++  @Test
++  public void testRemoteConfigurationRegistryGetACLs() throws Exception {
++    outContent.reset();
++
++
++    final File testRoot = TestUtils.createTempDir(this.getClass().getName());
++    try {
++      final File testRegistry = new File(testRoot, "registryRoot");
++
++      final String providerConfigName = "my-provider-config.xml";
++      final String providerConfigContent = "<gateway/>\n";
++      final File testProviderConfig = new File(testRoot, providerConfigName);
++      final String[] uploadArgs = {"upload-provider-config", testProviderConfig.getAbsolutePath(),
++                                   "--registry-client", "test_client",
++                                   "--master", "master"};
++      FileUtils.writeStringToFile(testProviderConfig, providerConfigContent);
++
++
++      final String[] args = {"get-registry-acl", "/knox/config/shared-providers",
++                             "--registry-client", "test_client",
++                             "--master", "master"};
++
++      KnoxCLI cli = new KnoxCLI();
++      Configuration config = new GatewayConfigImpl();
++      // Configure a client for the test local filesystem registry implementation
++      config.set("gateway.remote.config.registry.test_client", "type=LocalFileSystem;address=" + testRegistry);
++      cli.setConf(config);
++
++      int rc = cli.run(uploadArgs);
++      assertEquals(0, rc);
++
++      // Run the test command
++      rc = cli.run(args);
++
++      // Validate the result
++      assertEquals(0, rc);
++      String result = outContent.toString();
++      assertEquals(result, 3, result.split("\n").length);
++    } finally {
++      FileUtils.forceDelete(testRoot);
++    }
++  }
++
++
++  @Test
++  public void testRemoteConfigurationRegistryUploadProviderConfig() throws Exception {
++    outContent.reset();
++
++    final String providerConfigName = "my-provider-config.xml";
++    final String providerConfigContent = "<gateway/>\n";
++
++    final File testRoot = TestUtils.createTempDir(this.getClass().getName());
++    try {
++      final File testRegistry = new File(testRoot, "registryRoot");
++      final File testProviderConfig = new File(testRoot, providerConfigName);
++
++      final String[] args = {"upload-provider-config", testProviderConfig.getAbsolutePath(),
++                             "--registry-client", "test_client",
++                             "--master", "master"};
++
++      FileUtils.writeStringToFile(testProviderConfig, providerConfigContent);
++
++      KnoxCLI cli = new KnoxCLI();
++      Configuration config = new GatewayConfigImpl();
++      // Configure a client for the test local filesystem registry implementation
++      config.set("gateway.remote.config.registry.test_client", "type=LocalFileSystem;address=" + testRegistry);
++      cli.setConf(config);
++
++      // Run the test command
++      int rc = cli.run(args);
++
++      // Validate the result
++      assertEquals(0, rc);
++      File registryFile = new File(testRegistry, "knox/config/shared-providers/" + providerConfigName);
++      assertTrue(registryFile.exists());
++      assertEquals(FileUtils.readFileToString(registryFile), providerConfigContent);
++    } finally {
++      FileUtils.forceDelete(testRoot);
++    }
++  }
++
++
++  @Test
++  public void testRemoteConfigurationRegistryUploadProviderConfigWithDestinationOverride() throws Exception {
++    outContent.reset();
++
++    final String providerConfigName = "my-provider-config.xml";
++    final String entryName = "my-providers.xml";
++    final String providerConfigContent = "<gateway/>\n";
++
++    final File testRoot = TestUtils.createTempDir(this.getClass().getName());
++    try {
++      final File testRegistry = new File(testRoot, "registryRoot");
++      final File testProviderConfig = new File(testRoot, providerConfigName);
++
++      final String[] args = {"upload-provider-config", testProviderConfig.getAbsolutePath(),
++                             "--entry-name", entryName,
++                             "--registry-client", "test_client",
++                             "--master", "master"};
++
++      FileUtils.writeStringToFile(testProviderConfig, providerConfigContent);
++
++      KnoxCLI cli = new KnoxCLI();
++      Configuration config = new GatewayConfigImpl();
++      // Configure a client for the test local filesystem registry implementation
++      config.set("gateway.remote.config.registry.test_client", "type=LocalFileSystem;address=" + testRegistry);
++      cli.setConf(config);
++
++      // Run the test command
++      int rc = cli.run(args);
++
++      // Validate the result
++      assertEquals(0, rc);
++      assertFalse((new File(testRegistry, "knox/config/shared-providers/" + providerConfigName)).exists());
++      File registryFile = new File(testRegistry, "knox/config/shared-providers/" + entryName);
++      assertTrue(registryFile.exists());
++      assertEquals(FileUtils.readFileToString(registryFile), providerConfigContent);
++    } finally {
++      FileUtils.forceDelete(testRoot);
++    }
++  }
++
++
++  @Test
++  public void testRemoteConfigurationRegistryUploadDescriptor() throws Exception {
++    outContent.reset();
++
++    final String descriptorName = "my-topology.json";
++    final String descriptorContent = testDescriptorContentJSON;
++
++    final File testRoot = TestUtils.createTempDir(this.getClass().getName());
++    try {
++      final File testRegistry = new File(testRoot, "registryRoot");
++      final File testDescriptor = new File(testRoot, descriptorName);
++
++      final String[] args = {"upload-descriptor", testDescriptor.getAbsolutePath(),
++                             "--registry-client", "test_client",
++                             "--master", "master"};
++
++      FileUtils.writeStringToFile(testDescriptor, descriptorContent);
++
++      KnoxCLI cli = new KnoxCLI();
++      Configuration config = new GatewayConfigImpl();
++      // Configure a client for the test local filesystem registry implementation
++      config.set("gateway.remote.config.registry.test_client", "type=LocalFileSystem;address=" + testRegistry);
++      cli.setConf(config);
++
++      // Run the test command
++      int rc = cli.run(args);
++
++      // Validate the result
++      assertEquals(0, rc);
++      File registryFile = new File(testRegistry, "knox/config/descriptors/" + descriptorName);
++      assertTrue(registryFile.exists());
++      assertEquals(FileUtils.readFileToString(registryFile), descriptorContent);
++    } finally {
++      FileUtils.forceDelete(testRoot);
++    }
++  }
++
++  @Test
++  public void testRemoteConfigurationRegistryUploadDescriptorWithDestinationOverride() throws Exception {
++    outContent.reset();
++
++    final String descriptorName = "my-topology.json";
++    final String entryName = "different-topology.json";
++    final String descriptorContent = testDescriptorContentJSON;
++
++    final File testRoot = TestUtils.createTempDir(this.getClass().getName());
++    try {
++      final File testRegistry = new File(testRoot, "registryRoot");
++      final File testDescriptor = new File(testRoot, descriptorName);
++
++      final String[] args = {"upload-descriptor", testDescriptor.getAbsolutePath(),
++                             "--entry-name", entryName,
++                             "--registry-client", "test_client",
++                             "--master", "master"};
++
++      FileUtils.writeStringToFile(testDescriptor, descriptorContent);
++
++      KnoxCLI cli = new KnoxCLI();
++      Configuration config = new GatewayConfigImpl();
++      // Configure a client for the test local filesystem registry implementation
++      config.set("gateway.remote.config.registry.test_client", "type=LocalFileSystem;address=" + testRegistry);
++      cli.setConf(config);
++
++      // Run the test command
++      int rc = cli.run(args);
++
++      // Validate the result
++      assertEquals(0, rc);
++      assertFalse((new File(testRegistry, "knox/config/descriptors/" + descriptorName)).exists());
++      File registryFile = new File(testRegistry, "knox/config/descriptors/" + entryName);
++      assertTrue(registryFile.exists());
++      assertEquals(FileUtils.readFileToString(registryFile), descriptorContent);
++    } finally {
++      FileUtils.forceDelete(testRoot);
++    }
++  }
++
++  @Test
++  public void testRemoteConfigurationRegistryDeleteProviderConfig() throws Exception {
++    outContent.reset();
++
++    // Create a provider config
++    final String providerConfigName = "my-provider-config.xml";
++    final String providerConfigContent = "<gateway/>\n";
++
++    final File testRoot = TestUtils.createTempDir(this.getClass().getName());
++    try {
++      final File testRegistry = new File(testRoot, "registryRoot");
++      final File testProviderConfig = new File(testRoot, providerConfigName);
++
++      final String[] createArgs = {"upload-provider-config", testProviderConfig.getAbsolutePath(),
++                                   "--registry-client", "test_client",
++                                   "--master", "master"};
++
++      FileUtils.writeStringToFile(testProviderConfig, providerConfigContent);
++
++      KnoxCLI cli = new KnoxCLI();
++      Configuration config = new GatewayConfigImpl();
++      // Configure a client for the test local filesystem registry implementation
++      config.set("gateway.remote.config.registry.test_client", "type=LocalFileSystem;address=" + testRegistry);
++      cli.setConf(config);
++
++      // Run the test command
++      int rc = cli.run(createArgs);
++
++      // Validate the result
++      assertEquals(0, rc);
++      File registryFile = new File(testRegistry, "knox/config/shared-providers/" + providerConfigName);
++      assertTrue(registryFile.exists());
++
++      outContent.reset();
++
++      // Delete the created provider config
++      final String[] deleteArgs = {"delete-provider-config", providerConfigName,
++                                   "--registry-client", "test_client",
++                                   "--master", "master"};
++      rc = cli.run(deleteArgs);
++      assertEquals(0, rc);
++      assertFalse(registryFile.exists());
++
++      // Try to delete a provider config that does not exist
++      rc = cli.run(new String[]{"delete-provider-config", "imaginary-providers.xml",
++                                "--registry-client", "test_client",
++                                "--master", "master"});
++      assertEquals(0, rc);
++    } finally {
++      FileUtils.forceDelete(testRoot);
++    }
++  }
++
++  @Test
++  public void testRemoteConfigurationRegistryDeleteDescriptor() throws Exception {
++    outContent.reset();
++
++    final String descriptorName = "my-topology.json";
++    final String descriptorContent = testDescriptorContentJSON;
++
++    final File testRoot = TestUtils.createTempDir(this.getClass().getName());
++    try {
++      final File testRegistry = new File(testRoot, "registryRoot");
++      final File testDescriptor = new File(testRoot, descriptorName);
++
++      final String[] createArgs = {"upload-descriptor", testDescriptor.getAbsolutePath(),
++                             "--registry-client", "test_client",
++                             "--master", "master"};
++
++      FileUtils.writeStringToFile(testDescriptor, descriptorContent);
++
++      KnoxCLI cli = new KnoxCLI();
++      Configuration config = new GatewayConfigImpl();
++      // Configure a client for the test local filesystem registry implementation
++      config.set("gateway.remote.config.registry.test_client", "type=LocalFileSystem;address=" + testRegistry);
++      cli.setConf(config);
++
++      // Run the test command
++      int rc = cli.run(createArgs);
++
++      // Validate the result
++      assertEquals(0, rc);
++      File registryFile = new File(testRegistry, "knox/config/descriptors/" + descriptorName);
++      assertTrue(registryFile.exists());
++
++      outContent.reset();
++
++      // Delete the created provider config
++      final String[] deleteArgs = {"delete-descriptor", descriptorName,
++                                   "--registry-client", "test_client",
++                                   "--master", "master"};
++      rc = cli.run(deleteArgs);
++      assertEquals(0, rc);
++      assertFalse(registryFile.exists());
++
++      // Try to delete a descriptor that does not exist
++      rc = cli.run(new String[]{"delete-descriptor", "bogus.json",
++                                "--registry-client", "test_client",
++                                "--master", "master"});
++      assertEquals(0, rc);
++    } finally {
++      FileUtils.forceDelete(testRoot);
++    }
++  }
++
++  @Test
++  public void testSuccessfulAliasLifecycle() throws Exception {
 +    outContent.reset();
 +    String[] args1 = {"create-alias", "alias1", "--value", "testvalue1", "--master", "master"};
 +    int rc = 0;
 +    KnoxCLI cli = new KnoxCLI();
 +    cli.setConf(new GatewayConfigImpl());
 +    rc = cli.run(args1);
 +    assertEquals(0, rc);
 +    assertTrue(outContent.toString(), outContent.toString().contains("alias1 has been successfully " +
 +        "created."));
 +
 +    outContent.reset();
 +    String[] args2 = {"list-alias", "--master", 
 +        "master"};
 +    rc = cli.run(args2);
 +    assertEquals(0, rc);
 +    assertTrue(outContent.toString(), outContent.toString().contains("alias1"));
 +
 +    outContent.reset();
 +    String[] args4 = {"delete-alias", "alias1", "--master", 
 +      "master"};
 +    rc = cli.run(args4);
 +    assertEquals(0, rc);
 +    assertTrue(outContent.toString(), outContent.toString().contains("alias1 has been successfully " +
 +        "deleted."));
 +
 +    outContent.reset();
 +    rc = cli.run(args2);
 +    assertEquals(0, rc);
 +    assertFalse(outContent.toString(), outContent.toString().contains("alias1"));
 +  }
 +  
 +  @Test
 +  public void testListAndDeleteOfAliasForInvalidClusterName() throws Exception {
 +    outContent.reset();
 +    String[] args1 =
 +        { "create-alias", "alias1", "--cluster", "cluster1", "--value", "testvalue1", "--master",
 +            "master" };
 +    int rc = 0;
 +    KnoxCLI cli = new KnoxCLI();
 +    cli.setConf(new GatewayConfigImpl());
 +    rc = cli.run(args1);
 +    assertEquals(0, rc);
 +    assertTrue(outContent.toString(), outContent.toString().contains(
 +      "alias1 has been successfully " + "created."));
 +
 +    outContent.reset();
 +    String[] args2 = { "list-alias", "--cluster", "Invalidcluster1", "--master", "master" };
 +    rc = cli.run(args2);
 +    assertEquals(0, rc);
 +    System.out.println(outContent.toString());
 +    assertTrue(outContent.toString(),
 +      outContent.toString().contains("Invalid cluster name provided: Invalidcluster1"));
 +
 +    outContent.reset();
 +    String[] args4 =
 +        { "delete-alias", "alias1", "--cluster", "Invalidcluster1", "--master", "master" };
 +    rc = cli.run(args4);
 +    assertEquals(0, rc);
 +    assertTrue(outContent.toString(),
 +      outContent.toString().contains("Invalid cluster name provided: Invalidcluster1"));
 +
 +  }
 +
 +  @Test
 +  public void testDeleteOfNonExistAliasFromUserDefinedCluster() throws Exception {
 +    KnoxCLI cli = new KnoxCLI();
 +    cli.setConf(new GatewayConfigImpl());
 +    try {
 +      int rc = 0;
 +      outContent.reset();
 +      String[] args1 =
 +          { "create-alias", "alias1", "--cluster", "cluster1", "--value", "testvalue1", "--master",
 +              "master" };
 +      cli.run(args1);
 +
 +      // Delete invalid alias from the cluster
 +      outContent.reset();
 +      String[] args2 = { "delete-alias", "alias2", "--cluster", "cluster1", "--master", "master" };
 +      rc = cli.run(args2);
 +      assertEquals(0, rc);
 +      assertTrue(outContent.toString().contains("No such alias exists in the cluster."));
 +    } finally {
 +      outContent.reset();
 +      String[] args1 = { "delete-alias", "alias1", "--cluster", "cluster1", "--master", "master" };
 +      cli.run(args1);
 +    }
 +  }
 +
 +  @Test
 +  public void testDeleteOfNonExistAliasFromDefaultCluster() throws Exception {
 +    KnoxCLI cli = new KnoxCLI();
 +    cli.setConf(new GatewayConfigImpl());
 +    try {
 +      int rc = 0;
 +      outContent.reset();
 +      String[] args1 = { "create-alias", "alias1", "--value", "testvalue1", "--master", "master" };
 +      cli.run(args1);
 +
 +      // Delete invalid alias from the cluster
 +      outContent.reset();
 +      String[] args2 = { "delete-alias", "alias2", "--master", "master" };
 +      rc = cli.run(args2);
 +      assertEquals(0, rc);
 +      assertTrue(outContent.toString().contains("No such alias exists in the cluster."));
 +    } finally {
 +      outContent.reset();
 +      String[] args1 = { "delete-alias", "alias1", "--master", "master" };
 +      cli.run(args1);
 +    }
 +  }
 +
 +  @Test
 +  public void testForInvalidArgument() throws Exception {
 +    outContent.reset();
 +    String[] args1 = { "--value", "testvalue1", "--master", "master" };
 +    KnoxCLI cli = new KnoxCLI();
 +    cli.setConf(new GatewayConfigImpl());
 +    int rc = cli.run(args1);
 +    assertEquals(-2, rc);
 +    assertTrue(outContent.toString().contains("ERROR: Invalid Command"));
 +  }
 +
 +  @Test
 +  public void testListAndDeleteOfAliasForValidClusterName() throws Exception {
 +    outContent.reset();
 +    String[] args1 =
 +        { "create-alias", "alias1", "--cluster", "cluster1", "--value", "testvalue1", "--master",
 +            "master" };
 +    int rc = 0;
 +    KnoxCLI cli = new KnoxCLI();
 +    cli.setConf(new GatewayConfigImpl());
 +    rc = cli.run(args1);
 +    assertEquals(0, rc);
 +    assertTrue(outContent.toString(), outContent.toString().contains(
 +      "alias1 has been successfully " + "created."));
 +
 +    outContent.reset();
 +    String[] args2 = { "list-alias", "--cluster", "cluster1", "--master", "master" };
 +    rc = cli.run(args2);
 +    assertEquals(0, rc);
 +    System.out.println(outContent.toString());
 +    assertTrue(outContent.toString(), outContent.toString().contains("alias1"));
 +
 +    outContent.reset();
 +    String[] args4 =
 +        { "delete-alias", "alias1", "--cluster", "cluster1", "--master", "master" };
 +    rc = cli.run(args4);
 +    assertEquals(0, rc);
 +    assertTrue(outContent.toString(), outContent.toString().contains(
 +      "alias1 has been successfully " + "deleted."));
 +
 +    outContent.reset();
 +    rc = cli.run(args2);
 +    assertEquals(0, rc);
 +    assertFalse(outContent.toString(), outContent.toString().contains("alias1"));
 +
 +  }
 +
 +  @Test
 +  public void testGatewayAndClusterStores() throws Exception {
 +    GatewayConfigImpl config = new GatewayConfigImpl();
 +    FileUtils.deleteQuietly( new File( config.getGatewaySecurityDir() ) );
 +
 +    outContent.reset();
 +    String[] gwCreateArgs = {"create-alias", "alias1", "--value", "testvalue1", "--master", "master"};
 +    int rc = 0;
 +    KnoxCLI cli = new KnoxCLI();
 +    cli.setConf( config );
 +    rc = cli.run(gwCreateArgs);
 +    assertEquals(0, rc);
 +    assertTrue(outContent.toString(), outContent.toString().contains("alias1 has been successfully " +
 +        "created."));
 +
 +    AliasService as = cli.getGatewayServices().getService(GatewayServices.ALIAS_SERVICE);
 +
 +    outContent.reset();
 +    String[] clusterCreateArgs = {"create-alias", "alias2", "--value", "testvalue1", "--cluster", "test", 
 +        "--master", "master"};
 +    cli = new KnoxCLI();
 +    cli.setConf( config );
 +    rc = cli.run(clusterCreateArgs);
 +    assertEquals(0, rc);
 +    assertTrue(outContent.toString(), outContent.toString().contains("alias2 has been successfully " +
 +        "created."));
 +
 +    outContent.reset();
 +    String[] args2 = {"list-alias", "--master", "master"};
 +    cli = new KnoxCLI();
 +    rc = cli.run(args2);
 +    assertEquals(0, rc);
 +    assertFalse(outContent.toString(), outContent.toString().contains("alias2"));
 +    assertTrue(outContent.toString(), outContent.toString().contains("alias1"));
 +
 +    char[] passwordChars = as.getPasswordFromAliasForCluster("test", "alias2");
 +    assertNotNull(passwordChars);
 +    assertTrue(new String(passwordChars), "testvalue1".equals(new String(passwordChars)));
 +
 +    outContent.reset();
 +    String[] args1 = {"list-alias", "--cluster", "test", "--master", "master"};
 +    cli = new KnoxCLI();
 +    rc = cli.run(args1);
 +    assertEquals(0, rc);
 +    assertFalse(outContent.toString(), outContent.toString().contains("alias1"));
 +    assertTrue(outContent.toString(), outContent.toString().contains("alias2"));
 +
 +    outContent.reset();
 +    String[] args4 = {"delete-alias", "alias1", "--master", "master"};
 +    cli = new KnoxCLI();
 +    rc = cli.run(args4);
 +    assertEquals(0, rc);
 +    assertTrue(outContent.toString(), outContent.toString().contains("alias1 has been successfully " +
 +        "deleted."));
 +    
 +    outContent.reset();
 +    String[] args5 = {"delete-alias", "alias2", "--cluster", "test", "--master", "master"};
 +    cli = new KnoxCLI();
 +    rc = cli.run(args5);
 +    assertEquals(0, rc);
 +    assertTrue(outContent.toString(), outContent.toString().contains("alias2 has been successfully " +
 +        "deleted."));
 +  }
 +
 +  private void createTestMaster() throws Exception {
 +    outContent.reset();
 +    String[] args = new String[]{ "create-master", "--master", "master", "--force" };
 +    KnoxCLI cli = new KnoxCLI();
 +    int rc = cli.run(args);
 +    assertThat( rc, is( 0 ) );
 +    MasterService ms = cli.getGatewayServices().getService("MasterService");
 +    String master = String.copyValueOf( ms.getMasterSecret() );
 +    assertThat( master, is( "master" ) );
 +    assertThat( outContent.toString(), containsString( "Master secret has been persisted to disk." ) );
 +  }
 +
 +  @Test
 +  public void testCreateSelfSignedCert() throws Exception {
 +    GatewayConfigImpl config = new GatewayConfigImpl();
 +    FileUtils.deleteQuietly( new File( config.getGatewaySecurityDir() ) );
 +    createTestMaster();
 +    outContent.reset();
 +    KnoxCLI cli = new KnoxCLI();
 +    cli.setConf( config );
 +    String[] gwCreateArgs = {"create-cert", "--hostname", "hostname1", "--master", "master"};
 +    int rc = 0;
 +    rc = cli.run(gwCreateArgs);
 +    assertEquals(0, rc);
 +    assertTrue(outContent.toString(), outContent.toString().contains("gateway-identity has been successfully " +
 +        "created."));
 +  }
 +
 +  @Test
 +  public void testExportCert() throws Exception {
 +    GatewayConfigImpl config = new GatewayConfigImpl();
 +    FileUtils.deleteQuietly( new File( config.getGatewaySecurityDir() ) );
 +    createTestMaster();
 +    outContent.reset();
 +    KnoxCLI cli = new KnoxCLI();
 +    cli.setConf( config );
 +    String[] gwCreateArgs = {"create-cert", "--hostname", "hostname1", "--master", "master"};
 +    int rc = 0;
 +    rc = cli.run(gwCreateArgs);
 +    assertEquals(0, rc);
 +    assertTrue(outContent.toString(), outContent.toString().contains("gateway-identity has been successfully " +
 +        "created."));
 +
 +    outContent.reset();
 +    String[] gwCreateArgs2 = {"export-cert", "--type", "PEM"};
 +    rc = 0;
 +    rc = cli.run(gwCreateArgs2);
 +    assertEquals(0, rc);
 +    assertTrue(outContent.toString(), outContent.toString().contains("Certificate gateway-identity has been successfully exported to"));
 +    assertTrue(outContent.toString(), outContent.toString().contains("gateway-identity.pem"));
 +
 +    outContent.reset();
 +    String[] gwCreateArgs2_5 = {"export-cert"};
 +    rc = 0;
 +    rc = cli.run(gwCreateArgs2_5);
 +    assertEquals(0, rc);
 +    assertTrue(outContent.toString(), outContent.toString().contains("Certificate gateway-identity has been successfully exported to"));
 +    assertTrue(outContent.toString(), outContent.toString().contains("gateway-identity.pem"));
 +
 +    outContent.reset();
 +    String[] gwCreateArgs3 = {"export-cert", "--type", "JKS"};
 +    rc = 0;
 +    rc = cli.run(gwCreateArgs3);
 +    assertEquals(0, rc);
 +    assertTrue(outContent.toString(), outContent.toString().contains("Certificate gateway-identity has been successfully exported to"));
 +    assertTrue(outContent.toString(), outContent.toString().contains("gateway-client-trust.jks"));
 +
 +    outContent.reset();
 +    String[] gwCreateArgs4 = {"export-cert", "--type", "invalid"};
 +    rc = 0;
 +    rc = cli.run(gwCreateArgs4);
 +    assertEquals(0, rc);
 +    assertTrue(outContent.toString(), outContent.toString().contains("Invalid type for export file provided."));
 +  }
 +
 +  @Test
 +  public void testCreateMaster() throws Exception {
 +    GatewayConfigImpl config = new GatewayConfigImpl();
 +    FileUtils.deleteQuietly( new File( config.getGatewaySecurityDir() ) );
 +    outContent.reset();
 +    String[] args = {"create-master", "--master", "master"};
 +    int rc = 0;
 +    KnoxCLI cli = new KnoxCLI();
 +    cli.setConf( config );
 +    rc = cli.run(args);
 +    assertEquals(0, rc);
 +    MasterService ms = cli.getGatewayServices().getService("MasterService");
 +    // assertTrue(ms.getClass().getName(), ms.getClass().getName().equals("kjdfhgjkhfdgjkh"));
 +    assertTrue( new String( ms.getMasterSecret() ), "master".equals( new String( ms.getMasterSecret() ) ) );
 +    assertTrue(outContent.toString(), outContent.toString().contains("Master secret has been persisted to disk."));
 +  }
 +
 +  @Test
 +  public void testCreateMasterGenerate() throws Exception {
 +    String[] args = {"create-master", "--generate" };
 +    int rc = 0;
 +    GatewayConfigImpl config = new GatewayConfigImpl();
 +    File masterFile = new File( config.getGatewaySecurityDir(), "master" );
 +
 +    // Need to delete the master file so that the change isn't ignored.
 +    if( masterFile.exists() ) {
 +      assertThat( "Failed to delete existing master file.", masterFile.delete(), is( true ) );
 +    }
 +    outContent.reset();
 +    KnoxCLI cli = new KnoxCLI();
 +    cli.setConf(config);
 +    rc = cli.run(args);
 +    assertThat( rc, is( 0 ) );
 +    MasterService ms = cli.getGatewayServices().getService("MasterService");
 +    String master = String.copyValueOf( ms.getMasterSecret() );
 +    assertThat( master.length(), is( 36 ) );
 +    assertThat( master.indexOf( '-' ), is( 8 ) );
 +    assertThat( master.indexOf( '-', 9 ), is( 13 ) );
 +    assertThat( master.indexOf( '-', 14 ), is( 18 ) );
 +    assertThat( master.indexOf( '-', 19 ), is( 23 ) );
 +    assertThat( UUID.fromString( master ), notNullValue() );
 +    assertThat( outContent.toString(), containsString( "Master secret has been persisted to disk." ) );
 +
 +    // Need to delete the master file so that the change isn't ignored.
 +    if( masterFile.exists() ) {
 +      assertThat( "Failed to delete existing master file.", masterFile.delete(), is( true ) );
 +    }
 +    outContent.reset();
 +    cli = new KnoxCLI();
 +    rc = cli.run(args);
 +    ms = cli.getGatewayServices().getService("MasterService");
 +    String master2 = String.copyValueOf( ms.getMasterSecret() );
 +    assertThat( master2.length(), is( 36 ) );
 +    assertThat( UUID.fromString( master2 ), notNullValue() );
 +    assertThat( master2, not( is( master ) ) );
 +    assertThat( rc, is( 0 ) );
 +    assertThat(outContent.toString(), containsString("Master secret has been persisted to disk."));
 +  }
 +
 +  @Test
 +  public void testCreateMasterForce() throws Exception {
 +    GatewayConfigImpl config = new GatewayConfigImpl();
 +    File masterFile = new File( config.getGatewaySecurityDir(), "master" );
 +
 +    // Need to delete the master file so that the change isn't ignored.
 +    if( masterFile.exists() ) {
 +      assertThat( "Failed to delete existing master file.", masterFile.delete(), is( true ) );
 +    }
 +
 +    KnoxCLI cli = new KnoxCLI();
 +    cli.setConf(config);
 +    MasterService ms;
 +    int rc = 0;
 +    outContent.reset();
 +
 +    String[] args = { "create-master", "--master", "test-master-1" };
 +
 +    rc = cli.run(args);
 +    assertThat( rc, is( 0 ) );
 +    ms = cli.getGatewayServices().getService("MasterService");
 +    String master = String.copyValueOf( ms.getMasterSecret() );
 +    assertThat( master, is( "test-master-1" ) );
 +    assertThat( outContent.toString(), containsString( "Master secret has been persisted to disk." ) );
 +
 +    outContent.reset();
 +    rc = cli.run(args);
 +    assertThat( rc, is(0 ) );
 +    assertThat( outContent.toString(), containsString( "Master secret is already present on disk." ) );
 +
 +    outContent.reset();
 +    args = new String[]{ "create-master", "--master", "test-master-2", "--force" };
 +    rc = cli.run(args);
 +    assertThat( rc, is( 0 ) );
 +    ms = cli.getGatewayServices().getService("MasterService");
 +    master = String.copyValueOf( ms.getMasterSecret() );
 +    assertThat( master, is( "test-master-2" ) );
 +    assertThat( outContent.toString(), containsString( "Master secret has been persisted to disk." ) );
 +  }
 +
 +  @Test
 +  public void testListTopology() throws Exception {
 +
 +    GatewayConfigMock config = new GatewayConfigMock();
 +    URL topoURL = ClassLoader.getSystemResource("conf-demo/conf/topologies/admin.xml");
 +    config.setConfDir( new File(topoURL.getFile()).getParentFile().getParent() );
 +    String args[] = {"list-topologies", "--master", "knox"};
 +
 +    KnoxCLI cli = new KnoxCLI();
 +    cli.setConf( config );
 +
 +    cli.run( args );
 +    assertThat(outContent.toString(), containsString("sandbox"));
 +    assertThat(outContent.toString(), containsString("admin"));
 +  }
 +
 +  private class GatewayConfigMock extends GatewayConfigImpl{
 +    private String confDir;
 +    public void setConfDir(String location) {
 +      confDir = location;
 +    }
 +
 +    @Override
 +    public String getGatewayConfDir(){
 +      return confDir;
 +    }
 +  }
 +
 +  private static XMLTag createBadTopology() {
 +    XMLTag xml = XMLDoc.newDocument(true)
 +        .addRoot( "topology" )
 +        .addTag( "gateway" )
 +
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "authentication" )
 +        .addTag( "name" ).addText( "ShiroProvider" )
 +        .addTag( "enabled" ).addText( "123" )
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "" )
 +        .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
 +        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
 +        .addTag( "value" ).addText( "ldap://localhost:8443" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
 +        .addTag( "value" ).addText( "simple" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "urls./**" )
 +        .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "identity-assertion" )
 +        .addTag( "enabled" ).addText( "vvv" )
 +        .addTag( "name" ).addText( "Default" ).gotoParent()
 +        .addTag( "provider" )
 +        .gotoRoot()
 +        .addTag( "service" )
 +        .addTag( "role" ).addText( "test-service-role" )
 +        .gotoRoot();
 +    return xml;
 +  }
 +
 +  private static XMLTag createGoodTopology() {
 +    XMLTag xml = XMLDoc.newDocument( true )
 +        .addRoot( "topology" )
 +        .addTag( "gateway" )
 +
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "authentication" )
 +        .addTag( "name" ).addText( "ShiroProvider" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm" )
 +        .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
 +        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
 +        .addTag( "value" ).addText( "ldap://localhost:8443").gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
 +        .addTag( "value" ).addText( "simple" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "urls./**" )
 +        .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "identity-assertion" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "name" ).addText( "Default" ).gotoParent()
 +        .addTag( "provider" )
 +        .gotoRoot()
 +        .addTag( "service" )
 +        .addTag( "role" ).addText( "test-service-role" )
 +        .gotoRoot();
 +    return xml;
 +  }
 +
 +  private File writeTestTopology( String name, XMLTag xml ) throws IOException {
 +    // Create the test topology.
 +
 +    GatewayConfigMock config = new GatewayConfigMock();
 +    URL topoURL = ClassLoader.getSystemResource("conf-demo/conf/topologies/admin.xml");
 +    config.setConfDir( new File(topoURL.getFile()).getParentFile().getParent() );
 +
 +    File tempFile = new File( config.getGatewayTopologyDir(), name + ".xml." + UUID.randomUUID() );
 +    FileOutputStream stream = new FileOutputStream( tempFile );
 +    xml.toStream( stream );
 +    stream.close();
 +    File descriptor = new File( config.getGatewayTopologyDir(), name + ".xml" );
 +    tempFile.renameTo( descriptor );
 +    return descriptor;
 +  }
 +
 +  @Test
 +  public void testValidateTopology() throws Exception {
 +
 +    GatewayConfigMock config = new GatewayConfigMock();
 +    URL topoURL = ClassLoader.getSystemResource("conf-demo/conf/topologies/admin.xml");
 +    config.setConfDir( new File(topoURL.getFile()).getParentFile().getParent() );
 +    String args[] = {"validate-topology", "--master", "knox", "--cluster", "sandbox"};
 +
 +    KnoxCLI cli = new KnoxCLI();
 +    cli.setConf( config );
 +    cli.run( args );
 +
 +    assertThat(outContent.toString(), containsString(config.getGatewayTopologyDir()));
 +    assertThat(outContent.toString(), containsString("sandbox"));
 +    assertThat(outContent.toString(), containsString("success"));
 +    outContent.reset();
 +
 +
 +    String args2[] = {"validate-topology", "--master", "knox", "--cluster", "NotATopology"};
 +    cli.run(args2);
 +
 +    assertThat(outContent.toString(), containsString("NotATopology"));
 +    assertThat(outContent.toString(), containsString("does not exist"));
 +    outContent.reset();
 +
 +    String args3[] = {"validate-topology", "--master", "knox", "--path", config.getGatewayTopologyDir() + "/admin.xml"};
 +    cli.run(args3);
 +
 +    assertThat(outContent.toString(), containsString("admin"));
 +    assertThat(outContent.toString(), containsString("success"));
 +    outContent.reset();
 +
 +    String args4[] = {"validate-topology", "--master", "knox", "--path", "not/a/path"};
 +    cli.run(args4);
 +    assertThat(outContent.toString(), containsString("does not exist"));
 +    assertThat(outContent.toString(), containsString("not/a/path"));
 +  }
 +
 +  @Test
 +  public void testValidateTopologyOutput() throws Exception {
 +
 +    File bad = writeTestTopology( "test-cluster-bad", createBadTopology() );
 +    File good = writeTestTopology( "test-cluster-good", createGoodTopology() );
 +
 +    GatewayConfigMock config = new GatewayConfigMock();
 +    URL topoURL = ClassLoader.getSystemResource("conf-demo/conf/topologies/admin.xml");
 +    config.setConfDir( new File(topoURL.getFile()).getParentFile().getParent() );
 +    String args[] = {"validate-topology", "--master", "knox", "--cluster", "test-cluster-bad"};
 +
 +    KnoxCLI cli = new KnoxCLI();
 +    cli.setConf( config );
 +    cli.run( args );
 +
 +    assertThat(outContent.toString(), containsString(config.getGatewayTopologyDir()));
 +    assertThat(outContent.toString(), containsString("test-cluster-bad"));
 +    assertThat(outContent.toString(), containsString("unsuccessful"));
 +    assertThat(outContent.toString(), containsString("Invalid content"));
 +    assertThat(outContent.toString(), containsString("Line"));
 +
 +
 +    outContent.reset();
 +
 +    String args2[] = {"validate-topology", "--master", "knox", "--cluster", "test-cluster-good"};
 +
 +    cli.run(args2);
 +
 +    assertThat(outContent.toString(), containsString(config.getGatewayTopologyDir()));
 +    assertThat(outContent.toString(), containsString("success"));
 +    assertThat(outContent.toString(), containsString("test-cluster-good"));
 +
 +
 +  }
 +
++  private static final String testDescriptorContentJSON = "{\n" +
++                                                          "  \"discovery-address\":\"http://localhost:8080\",\n" +
++                                                          "  \"discovery-user\":\"maria_dev\",\n" +
++                                                          "  \"discovery-pwd-alias\":\"sandbox.discovery.password\",\n" +
++                                                          "  \"provider-config-ref\":\"my-provider-config\",\n" +
++                                                          "  \"cluster\":\"Sandbox\",\n" +
++                                                          "  \"services\":[\n" +
++                                                          "    {\"name\":\"NAMENODE\"},\n" +
++                                                          "    {\"name\":\"JOBTRACKER\"},\n" +
++                                                          "    {\"name\":\"WEBHDFS\"},\n" +
++                                                          "    {\"name\":\"WEBHCAT\"},\n" +
++                                                          "    {\"name\":\"OOZIE\"},\n" +
++                                                          "    {\"name\":\"WEBHBASE\"},\n" +
++                                                          "    {\"name\":\"HIVE\"},\n" +
++                                                          "    {\"name\":\"RESOURCEMANAGER\"}\n" +
++                                                          "  ]\n" +
++                                                          "}";
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-server/src/test/java/org/apache/knox/gateway/websockets/BadUrlTest.java
----------------------------------------------------------------------
diff --cc gateway-server/src/test/java/org/apache/knox/gateway/websockets/BadUrlTest.java
index 3aceadd,0000000..2ca5ede
mode 100644,000000..100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/websockets/BadUrlTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/websockets/BadUrlTest.java
@@@ -1,309 -1,0 +1,320 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.websockets;
 +
 +import java.io.File;
 +import java.io.FileOutputStream;
 +import java.io.IOException;
 +import java.net.URI;
 +import java.net.URL;
 +import java.util.ArrayList;
++import java.util.Collections;
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.concurrent.TimeUnit;
 +
 +import javax.websocket.CloseReason;
 +import javax.websocket.ContainerProvider;
 +import javax.websocket.WebSocketContainer;
 +
 +import org.apache.commons.io.FileUtils;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.config.impl.GatewayConfigImpl;
 +import org.apache.knox.gateway.deploy.DeploymentFactory;
 +import org.apache.knox.gateway.services.DefaultGatewayServices;
 +import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.knox.gateway.services.topology.TopologyService;
 +import org.apache.knox.gateway.topology.TopologyEvent;
 +import org.apache.knox.gateway.topology.TopologyListener;
 +import org.apache.knox.test.TestUtils;
 +import org.easymock.EasyMock;
 +import org.eclipse.jetty.server.Server;
 +import org.eclipse.jetty.server.ServerConnector;
 +import org.eclipse.jetty.server.handler.ContextHandler;
 +import org.eclipse.jetty.server.handler.HandlerCollection;
 +import org.hamcrest.CoreMatchers;
 +import org.junit.AfterClass;
 +import org.junit.Assert;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +
 +import com.mycila.xmltool.XMLDoc;
 +import com.mycila.xmltool.XMLTag;
 +
 +/**
 + * Test for bad URLs.
 + * <p>
 + * This test will set up a bad URL through the topology, so this test case will
 + * attempt to test the bad url case and also the plumbing around it.
 + * @since 0.10
 + */
 +public class BadUrlTest {
 +
 +  /**
 +   * Non-existant backend websocket server
 +   */
 +  private static String BACKEND = "http://localhost:9999";
 +
 +  /**
 +   * Mock Gateway server
 +   */
 +  private static Server gatewayServer;
 +
 +  /**
 +   * Mock gateway config
 +   */
 +  private static GatewayConfig gatewayConfig;
 +
 +  private static GatewayServices services;
 +
 +  /**
 +   * URI for gateway server
 +   */
 +  private static URI serverUri;
 +
 +  private static File topoDir;
 +
 +  public BadUrlTest() {
 +    super();
 +  }
 +
 +  @BeforeClass
 +  public static void startServers() throws Exception {
 +
 +    startGatewayServer();
 +
 +  }
 +
 +  @AfterClass
 +  public static void stopServers() {
 +    try {
 +      gatewayServer.stop();
 +    } catch (final Exception e) {
 +      e.printStackTrace(System.err);
 +    }
 +
 +    /* Cleanup the created files */
 +    FileUtils.deleteQuietly(topoDir);
 +
 +  }
 +
 +  /**
 +   * Test websocket proxying through gateway.
 +   *
 +   * @throws Exception
 +   */
 +
 +  @Test
 +  public void testBadUrl() throws Exception {
 +    WebSocketContainer container = ContainerProvider.getWebSocketContainer();
 +
 +    WebsocketClient client = new WebsocketClient();
 +
 +    container.connectToServer(client,
 +        new URI(serverUri.toString() + "gateway/websocket/ws"));
 +
 +    client.awaitClose(CloseReason.CloseCodes.UNEXPECTED_CONDITION.getCode(),
 +        1000, TimeUnit.MILLISECONDS);
 +
 +    Assert.assertThat(client.close.getCloseCode().getCode(),
 +        CoreMatchers.is(CloseReason.CloseCodes.UNEXPECTED_CONDITION.getCode()));
 +
 +  }
 +
 +
 +  /**
 +   * Start Gateway Server.
 +   *
 +   * @throws Exception
 +   */
 +  private static void startGatewayServer() throws Exception {
 +    gatewayServer = new Server();
 +    final ServerConnector connector = new ServerConnector(gatewayServer);
 +    gatewayServer.addConnector(connector);
 +
 +    /* workaround so we can add our handler later at runtime */
 +    HandlerCollection handlers = new HandlerCollection(true);
 +
 +    /* add some initial handlers */
 +    ContextHandler context = new ContextHandler();
 +    context.setContextPath("/");
 +    handlers.addHandler(context);
 +
 +    gatewayServer.setHandler(handlers);
 +
 +    // Start Server
 +    gatewayServer.start();
 +
 +    String host = connector.getHost();
 +    if (host == null) {
 +      host = "localhost";
 +    }
 +    int port = connector.getLocalPort();
 +    serverUri = new URI(String.format("ws://%s:%d/", host, port));
 +
 +    /* Setup websocket handler */
 +    setupGatewayConfig(BACKEND);
 +
 +    final GatewayWebsocketHandler gatewayWebsocketHandler = new GatewayWebsocketHandler(
 +        gatewayConfig, services);
 +    handlers.addHandler(gatewayWebsocketHandler);
 +    gatewayWebsocketHandler.start();
 +  }
 +
 +  /**
 +   * Initialize the configs and components required for this test.
 +   *
 +   * @param backend
 +   * @throws IOException
 +   */
 +  private static void setupGatewayConfig(final String backend)
 +      throws IOException {
 +    services = new DefaultGatewayServices();
 +
 +    topoDir = createDir();
 +    URL serviceUrl = ClassLoader.getSystemResource("websocket-services");
 +
 +    final File descriptor = new File(topoDir, "websocket.xml");
 +    final FileOutputStream stream = new FileOutputStream(descriptor);
 +    createKnoxTopology(backend).toStream(stream);
 +    stream.close();
 +
 +    final TestTopologyListener topoListener = new TestTopologyListener();
 +
 +    final Map<String, String> options = new HashMap<>();
 +    options.put("persist-master", "false");
 +    options.put("master", "password");
 +
 +    gatewayConfig = EasyMock.createNiceMock(GatewayConfig.class);
 +    EasyMock.expect(gatewayConfig.getGatewayTopologyDir())
 +        .andReturn(topoDir.toString()).anyTimes();
 +
++    EasyMock.expect(gatewayConfig.getGatewayProvidersConfigDir())
++            .andReturn(topoDir.getAbsolutePath() + "/shared-providers").anyTimes();
++
++    EasyMock.expect(gatewayConfig.getGatewayDescriptorsDir())
++            .andReturn(topoDir.getAbsolutePath() + "/descriptors").anyTimes();
++
 +    EasyMock.expect(gatewayConfig.getGatewayServicesDir())
 +        .andReturn(serviceUrl.getFile()).anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getEphemeralDHKeySize()).andReturn("2048")
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getGatewaySecurityDir())
 +        .andReturn(topoDir.toString()).anyTimes();
 +
 +    /* Websocket configs */
 +    EasyMock.expect(gatewayConfig.isWebsocketEnabled()).andReturn(true)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketMaxTextMessageSize())
 +        .andReturn(GatewayConfigImpl.DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketMaxBinaryMessageSize())
 +        .andReturn(GatewayConfigImpl.DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_SIZE)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketMaxTextMessageBufferSize())
 +        .andReturn(
 +            GatewayConfigImpl.DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketMaxBinaryMessageBufferSize())
 +        .andReturn(
 +            GatewayConfigImpl.DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketInputBufferSize())
 +        .andReturn(GatewayConfigImpl.DEFAULT_WEBSOCKET_INPUT_BUFFER_SIZE)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketAsyncWriteTimeout())
 +        .andReturn(GatewayConfigImpl.DEFAULT_WEBSOCKET_ASYNC_WRITE_TIMEOUT)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketIdleTimeout())
 +        .andReturn(GatewayConfigImpl.DEFAULT_WEBSOCKET_IDLE_TIMEOUT).anyTimes();
 +
++    EasyMock.expect(gatewayConfig.getRemoteRegistryConfigurationNames())
++            .andReturn(Collections.emptyList())
++            .anyTimes();
++
 +    EasyMock.replay(gatewayConfig);
 +
 +    try {
 +      services.init(gatewayConfig, options);
 +    } catch (ServiceLifecycleException e) {
 +      e.printStackTrace();
 +    }
 +
 +    DeploymentFactory.setGatewayServices(services);
 +    final TopologyService monitor = services
 +        .getService(GatewayServices.TOPOLOGY_SERVICE);
 +    monitor.addTopologyChangeListener(topoListener);
 +    monitor.reloadTopologies();
 +
 +  }
 +
 +  private static File createDir() throws IOException {
 +    return TestUtils
 +        .createTempDir(WebsocketEchoTest.class.getSimpleName() + "-");
 +  }
 +
 +  /**
 +   * Intentionally add bad URL
 +   *
 +   * @param backend
 +   * @return
 +   */
 +  private static XMLTag createKnoxTopology(final String backend) {
 +    XMLTag xml = XMLDoc.newDocument(true).addRoot("topology").addTag("service")
 +        .addTag("role").addText("WEBSOCKET").addTag("url").addText(backend)
 +        .gotoParent().gotoRoot();
 +    // System.out.println( "GATEWAY=" + xml.toString() );
 +    return xml;
 +  }
 +
 +  private static class TestTopologyListener implements TopologyListener {
 +
 +    public ArrayList<List<TopologyEvent>> events = new ArrayList<List<TopologyEvent>>();
 +
 +    @Override
 +    public void handleTopologyEvent(List<TopologyEvent> events) {
 +      this.events.add(events);
 +
 +      synchronized (this) {
 +        for (TopologyEvent event : events) {
 +          if (!event.getType().equals(TopologyEvent.Type.DELETED)) {
 +
 +            /* for this test we only care about this part */
 +            DeploymentFactory.createDeployment(gatewayConfig,
 +                event.getTopology());
 +
 +          }
 +        }
 +
 +      }
 +
 +    }
 +
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-server/src/test/java/org/apache/knox/gateway/websockets/WebsocketEchoTest.java
----------------------------------------------------------------------
diff --cc gateway-server/src/test/java/org/apache/knox/gateway/websockets/WebsocketEchoTest.java
index 268e14b,0000000..64ad87c
mode 100644,000000..100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/websockets/WebsocketEchoTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/websockets/WebsocketEchoTest.java
@@@ -1,388 -1,0 +1,399 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.websockets;
 +
 +import static org.hamcrest.CoreMatchers.is;
 +import static org.hamcrest.MatcherAssert.assertThat;
 +
 +import java.io.File;
 +import java.io.FileOutputStream;
 +import java.io.IOException;
 +import java.net.URI;
 +import java.net.URL;
 +import java.util.ArrayList;
++import java.util.Collections;
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.concurrent.TimeUnit;
 +
 +import javax.websocket.ContainerProvider;
 +import javax.websocket.Session;
 +import javax.websocket.WebSocketContainer;
 +
 +import org.apache.commons.io.FileUtils;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.config.impl.GatewayConfigImpl;
 +import org.apache.knox.gateway.deploy.DeploymentFactory;
 +import org.apache.knox.gateway.services.DefaultGatewayServices;
 +import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.knox.gateway.services.topology.TopologyService;
 +import org.apache.knox.gateway.topology.TopologyEvent;
 +import org.apache.knox.gateway.topology.TopologyListener;
 +import org.apache.knox.test.TestUtils;
 +import org.easymock.EasyMock;
 +import org.eclipse.jetty.server.Server;
 +import org.eclipse.jetty.server.ServerConnector;
 +import org.eclipse.jetty.server.handler.ContextHandler;
 +import org.eclipse.jetty.server.handler.HandlerCollection;
 +import org.junit.AfterClass;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +
 +import com.mycila.xmltool.XMLDoc;
 +import com.mycila.xmltool.XMLTag;
 +
 +/**
 + * A basic test that attempts to proxy websocket connections through Knox
 + * gateway.
 + * <p>
 + * The way the test is set up is as follows: <br/>
 + * <ul>
 + * <li>A Mock Websocket server is setup which simply echos the responses sent by
 + * client.
 + * <li>Knox Gateway is set up with websocket handler
 + * {@link GatewayWebsocketHandler} that can proxy the requests.
 + * <li>Appropriate Topology and service definition files are set up with the
 + * address of the Websocket server.
 + * <li>A mock client is setup to connect to gateway.
 + * </ul>
 + * 
 + * The test is to confirm whether the message is sent all the way to the backend
 + * Websocket server through Knox and back.
 + * 
 + * 
 + * @since 0.10
 + */
 +public class WebsocketEchoTest {
 +
 +  /**
 +   * Simulate backend websocket
 +   */
 +  private static Server backendServer;
 +  /**
 +   * URI for backend websocket server
 +   */
 +  private static URI backendServerUri;
 +
 +  /**
 +   * Mock Gateway server
 +   */
 +  private static Server gatewayServer;
 +
 +  /**
 +   * Mock gateway config
 +   */
 +  private static GatewayConfig gatewayConfig;
 +
 +  private static GatewayServices services;
 +
 +  /**
 +   * URI for gateway server
 +   */
 +  private static URI serverUri;
 +
 +  private static File topoDir;
 +
 +  public WebsocketEchoTest() {
 +    super();
 +  }
 +
 +  @BeforeClass
 +  public static void startServers() throws Exception {
 +
 +    startWebsocketServer();
 +    startGatewayServer();
 +
 +  }
 +
 +  @AfterClass
 +  public static void stopServers() {
 +    try {
 +      gatewayServer.stop();
 +      backendServer.stop();
 +    } catch (final Exception e) {
 +      e.printStackTrace(System.err);
 +    }
 +
 +    /* Cleanup the created files */
 +    FileUtils.deleteQuietly(topoDir);
 +
 +  }
 +
 +  /**
 +   * Test direct connection to websocket server without gateway
 +   * 
 +   * @throws Exception
 +   */
 +  @Test
 +  public void testDirectEcho() throws Exception {
 +
 +    WebSocketContainer container = ContainerProvider.getWebSocketContainer();
 +    WebsocketClient client = new WebsocketClient();
 +
 +    Session session = container.connectToServer(client, backendServerUri);
 +
 +    session.getBasicRemote().sendText("Echo");
 +    client.messageQueue.awaitMessages(1, 1000, TimeUnit.MILLISECONDS);
 +
 +  }
 +
 +  /**
 +   * Test websocket proxying through gateway.
 +   * 
 +   * @throws Exception
 +   */
 +  @Test
 +  public void testGatewayEcho() throws Exception {
 +    WebSocketContainer container = ContainerProvider.getWebSocketContainer();
 +
 +    WebsocketClient client = new WebsocketClient();
 +    Session session = container.connectToServer(client,
 +        new URI(serverUri.toString() + "gateway/websocket/ws"));
 +
 +    session.getBasicRemote().sendText("Echo");
 +    client.messageQueue.awaitMessages(1, 1000, TimeUnit.MILLISECONDS);
 +
 +    assertThat(client.messageQueue.get(0), is("Echo"));
 +
 +  }
 +
 +  /**
 +   * Test websocket rewrite rules proxying through gateway.
 +   *
 +   * @throws Exception
 +   */
 +  @Test
 +  public void testGatewayRewriteEcho() throws Exception {
 +    WebSocketContainer container = ContainerProvider.getWebSocketContainer();
 +
 +    WebsocketClient client = new WebsocketClient();
 +    Session session = container.connectToServer(client,
 +            new URI(serverUri.toString() + "gateway/websocket/123foo456bar/channels"));
 +
 +    session.getBasicRemote().sendText("Echo");
 +    client.messageQueue.awaitMessages(1, 1000, TimeUnit.MILLISECONDS);
 +
 +    assertThat(client.messageQueue.get(0), is("Echo"));
 +
 +  }
 +
 +  /**
 +   * Start Mock Websocket server that acts as backend.
 +   * 
 +   * @throws Exception
 +   */
 +  private static void startWebsocketServer() throws Exception {
 +
 +    backendServer = new Server();
 +    ServerConnector connector = new ServerConnector(backendServer);
 +    backendServer.addConnector(connector);
 +
 +    final WebsocketEchoHandler handler = new WebsocketEchoHandler();
 +
 +    ContextHandler context = new ContextHandler();
 +    context.setContextPath("/");
 +    context.setHandler(handler);
 +    backendServer.setHandler(context);
 +
 +    // Start Server
 +    backendServer.start();
 +
 +    String host = connector.getHost();
 +    if (host == null) {
 +      host = "localhost";
 +    }
 +    int port = connector.getLocalPort();
 +    backendServerUri = new URI(String.format("ws://%s:%d/ws", host, port));
 +
 +  }
 +
 +  /**
 +   * Start Gateway Server.
 +   * 
 +   * @throws Exception
 +   */
 +  private static void startGatewayServer() throws Exception {
 +    gatewayServer = new Server();
 +    final ServerConnector connector = new ServerConnector(gatewayServer);
 +    gatewayServer.addConnector(connector);
 +
 +    /* workaround so we can add our handler later at runtime */
 +    HandlerCollection handlers = new HandlerCollection(true);
 +
 +    /* add some initial handlers */
 +    ContextHandler context = new ContextHandler();
 +    context.setContextPath("/");
 +    handlers.addHandler(context);
 +
 +    gatewayServer.setHandler(handlers);
 +
 +    // Start Server
 +    gatewayServer.start();
 +
 +    String host = connector.getHost();
 +    if (host == null) {
 +      host = "localhost";
 +    }
 +    int port = connector.getLocalPort();
 +    serverUri = new URI(String.format("ws://%s:%d/", host, port));
 +
 +    /* Setup websocket handler */
 +    setupGatewayConfig(backendServerUri.toString());
 +
 +    final GatewayWebsocketHandler gatewayWebsocketHandler = new GatewayWebsocketHandler(
 +        gatewayConfig, services);
 +    handlers.addHandler(gatewayWebsocketHandler);
 +    gatewayWebsocketHandler.start();
 +  }
 +
 +  /**
 +   * Initialize the configs and components required for this test.
 +   * 
 +   * @param backend
 +   * @throws IOException
 +   */
 +  private static void setupGatewayConfig(final String backend)
 +      throws IOException {
 +    services = new DefaultGatewayServices();
 +
 +    topoDir = createDir();
 +    URL serviceUrl = ClassLoader.getSystemResource("websocket-services");
 +
 +    final File descriptor = new File(topoDir, "websocket.xml");
 +    final FileOutputStream stream = new FileOutputStream(descriptor);
 +    createKnoxTopology(backend).toStream(stream);
 +    stream.close();
 +
 +    final TestTopologyListener topoListener = new TestTopologyListener();
 +
 +    final Map<String, String> options = new HashMap<>();
 +    options.put("persist-master", "false");
 +    options.put("master", "password");
 +
 +    gatewayConfig = EasyMock.createNiceMock(GatewayConfig.class);
 +    EasyMock.expect(gatewayConfig.getGatewayTopologyDir())
 +        .andReturn(topoDir.toString()).anyTimes();
 +
++    EasyMock.expect(gatewayConfig.getGatewayProvidersConfigDir())
++            .andReturn(topoDir.getAbsolutePath() + "/shared-providers").anyTimes();
++
++    EasyMock.expect(gatewayConfig.getGatewayDescriptorsDir())
++            .andReturn(topoDir.getAbsolutePath() + "/descriptors").anyTimes();
++
 +    EasyMock.expect(gatewayConfig.getGatewayServicesDir())
 +        .andReturn(serviceUrl.getFile()).anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getEphemeralDHKeySize()).andReturn("2048")
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getGatewaySecurityDir())
 +        .andReturn(topoDir.toString()).anyTimes();
 +
 +    /* Websocket configs */
 +    EasyMock.expect(gatewayConfig.isWebsocketEnabled()).andReturn(true)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketMaxTextMessageSize())
 +        .andReturn(GatewayConfigImpl.DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketMaxBinaryMessageSize())
 +        .andReturn(GatewayConfigImpl.DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_SIZE)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketMaxTextMessageBufferSize())
 +        .andReturn(
 +            GatewayConfigImpl.DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketMaxBinaryMessageBufferSize())
 +        .andReturn(
 +            GatewayConfigImpl.DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketInputBufferSize())
 +        .andReturn(GatewayConfigImpl.DEFAULT_WEBSOCKET_INPUT_BUFFER_SIZE)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketAsyncWriteTimeout())
 +        .andReturn(GatewayConfigImpl.DEFAULT_WEBSOCKET_ASYNC_WRITE_TIMEOUT)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketIdleTimeout())
 +        .andReturn(GatewayConfigImpl.DEFAULT_WEBSOCKET_IDLE_TIMEOUT).anyTimes();
 +
++    EasyMock.expect(gatewayConfig.getRemoteRegistryConfigurationNames())
++            .andReturn(Collections.emptyList())
++            .anyTimes();
++
 +    EasyMock.replay(gatewayConfig);
 +
 +    try {
 +      services.init(gatewayConfig, options);
 +    } catch (ServiceLifecycleException e) {
 +      e.printStackTrace();
 +    }
 +
 +    DeploymentFactory.setGatewayServices(services);
 +    final TopologyService monitor = services
 +        .getService(GatewayServices.TOPOLOGY_SERVICE);
 +    monitor.addTopologyChangeListener(topoListener);
 +    monitor.reloadTopologies();
 +
 +  }
 +
 +  private static File createDir() throws IOException {
 +    return TestUtils
 +        .createTempDir(WebsocketEchoTest.class.getSimpleName() + "-");
 +  }
 +
 +  private static XMLTag createKnoxTopology(final String backend) {
 +    XMLTag xml = XMLDoc.newDocument(true).addRoot("topology").addTag("service")
 +        .addTag("role").addText("WEBSOCKET").addTag("url").addText(backend)
 +        .gotoParent().gotoRoot();
 +    // System.out.println( "GATEWAY=" + xml.toString() );
 +    return xml;
 +  }
 +
 +  private static class TestTopologyListener implements TopologyListener {
 +
 +    public ArrayList<List<TopologyEvent>> events = new ArrayList<List<TopologyEvent>>();
 +
 +    @Override
 +    public void handleTopologyEvent(List<TopologyEvent> events) {
 +      this.events.add(events);
 +
 +      synchronized (this) {
 +        for (TopologyEvent event : events) {
 +          if (!event.getType().equals(TopologyEvent.Type.DELETED)) {
 +
 +            /* for this test we only care about this part */
 +            DeploymentFactory.createDeployment(gatewayConfig,
 +                event.getTopology());
 +
 +          }
 +        }
 +
 +      }
 +
 +    }
 +
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-server/src/test/java/org/apache/knox/gateway/websockets/WebsocketMultipleConnectionTest.java
----------------------------------------------------------------------
diff --cc gateway-server/src/test/java/org/apache/knox/gateway/websockets/WebsocketMultipleConnectionTest.java
index 42bc9c3,0000000..5e5006c
mode 100644,000000..100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/websockets/WebsocketMultipleConnectionTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/websockets/WebsocketMultipleConnectionTest.java
@@@ -1,389 -1,0 +1,400 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.websockets;
 +
 +import java.io.File;
 +import java.io.FileOutputStream;
 +import java.io.IOException;
 +import java.lang.management.ManagementFactory;
 +import java.lang.management.MemoryMXBean;
 +import java.net.URI;
 +import java.net.URL;
 +import java.util.ArrayList;
++import java.util.Collections;
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.concurrent.CountDownLatch;
 +import java.util.concurrent.TimeUnit;
 +
 +import javax.websocket.ContainerProvider;
 +import javax.websocket.Endpoint;
 +import javax.websocket.EndpointConfig;
 +import javax.websocket.MessageHandler;
 +import javax.websocket.Session;
 +import javax.websocket.WebSocketContainer;
 +
 +import org.apache.commons.io.FileUtils;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.config.impl.GatewayConfigImpl;
 +import org.apache.knox.gateway.deploy.DeploymentFactory;
 +import org.apache.knox.gateway.services.DefaultGatewayServices;
 +import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.knox.gateway.services.topology.TopologyService;
 +import org.apache.knox.gateway.topology.TopologyEvent;
 +import org.apache.knox.gateway.topology.TopologyListener;
 +import org.apache.knox.test.TestUtils;
 +import org.easymock.EasyMock;
 +import org.eclipse.jetty.server.Server;
 +import org.eclipse.jetty.server.ServerConnector;
 +import org.eclipse.jetty.server.handler.ContextHandler;
 +import org.eclipse.jetty.server.handler.HandlerCollection;
 +import org.eclipse.jetty.util.thread.QueuedThreadPool;
 +import org.junit.AfterClass;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +
 +import com.mycila.xmltool.XMLDoc;
 +import com.mycila.xmltool.XMLTag;
 +
 +/**
 + * Test how Knox holds up under multiple concurrent connections.
 + *
 + */
 +public class WebsocketMultipleConnectionTest {
 +  /**
 +   * Simulate backend websocket
 +   */
 +  private static Server backendServer;
 +  /**
 +   * URI for backend websocket server
 +   */
 +  private static URI backendServerUri;
 +
 +  /**
 +   * Mock Gateway server
 +   */
 +  private static Server gatewayServer;
 +
 +  /**
 +   * Mock gateway config
 +   */
 +  private static GatewayConfig gatewayConfig;
 +
 +  private static GatewayServices services;
 +
 +  /**
 +   * URI for gateway server
 +   */
 +  private static URI serverUri;
 +
 +  private static File topoDir;
 +
 +  /**
 +   * Maximum number of open connections to test.
 +   */
 +  private static int MAX_CONNECTIONS = 100;
 +
 +  public WebsocketMultipleConnectionTest() {
 +    super();
 +  }
 +
 +  @BeforeClass
 +  public static void startServers() throws Exception {
 +
 +    startWebsocketServer();
 +    startGatewayServer();
 +
 +  }
 +
 +  @AfterClass
 +  public static void stopServers() {
 +    try {
 +      gatewayServer.stop();
 +      backendServer.stop();
 +    } catch (final Exception e) {
 +      e.printStackTrace(System.err);
 +    }
 +
 +    /* Cleanup the created files */
 +    FileUtils.deleteQuietly(topoDir);
 +
 +  }
 +
 +  /**
 +   * Test websocket proxying through gateway.
 +   * 
 +   * @throws Exception
 +   */
 +  @Test
 +  public void testMultipleConnections() throws Exception {
 +    WebSocketContainer container = ContainerProvider.getWebSocketContainer();
 +
 +    final CountDownLatch latch = new CountDownLatch(MAX_CONNECTIONS);
 +
 +    Session[] sessions = new Session[MAX_CONNECTIONS];
 +
 +    MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
 +
 +    System.gc();
 +    final long heapt1 = memoryMXBean.getHeapMemoryUsage().getUsed();
 +    final long nonHeapt1 = memoryMXBean.getNonHeapMemoryUsage().getUsed();
 +
 +    for (int i = 0; i < MAX_CONNECTIONS; i++) {
 +
 +      sessions[i] = container.connectToServer(new WebsocketClient() {
 +
 +        @Override
 +        public void onMessage(String message) {
 +          latch.countDown();
 +
 +        }
 +
 +      }, new URI(serverUri.toString() + "gateway/websocket/ws"));
 +
 +    }
 +
 +    for (int i = 0; i < MAX_CONNECTIONS; i++) {
 +      /* make sure the session is active and valid before trying to connect */
 +      if(sessions[i].isOpen() && sessions[i].getBasicRemote() != null) {
 +        sessions[i].getBasicRemote().sendText("OK");
 +      }
 +    }
 +
 +    latch.await(5 * MAX_CONNECTIONS, TimeUnit.MILLISECONDS);
 +
 +    System.gc();
 +
 +    final long heapUsed = memoryMXBean.getHeapMemoryUsage().getUsed() - heapt1;
 +    final long nonHeapUsed = memoryMXBean.getNonHeapMemoryUsage().getUsed()
 +        - nonHeapt1;
 +
 +    System.out.println("heapUsed = " + heapUsed);
 +    System.out.println("nonHeapUsed = " + nonHeapUsed);
 +
 +    /* 90 KB per connection */
 +    /*
 +    long expected = 90 * 1024 * MAX_CONNECTIONS;
 +    assertThat("heap used", heapUsed, lessThan(expected));
 +    */
 +  }
 +
 +  /**
 +   * Start Mock Websocket server that acts as backend.
 +   * 
 +   * @throws Exception
 +   */
 +  private static void startWebsocketServer() throws Exception {
 +
 +    backendServer = new Server(new QueuedThreadPool(254));
 +    ServerConnector connector = new ServerConnector(backendServer);
 +    backendServer.addConnector(connector);
 +
 +    final WebsocketEchoHandler handler = new WebsocketEchoHandler();
 +
 +    ContextHandler context = new ContextHandler();
 +    context.setContextPath("/");
 +    context.setHandler(handler);
 +    backendServer.setHandler(context);
 +
 +    // Start Server
 +    backendServer.start();
 +
 +    String host = connector.getHost();
 +    if (host == null) {
 +      host = "localhost";
 +    }
 +    int port = connector.getLocalPort();
 +    backendServerUri = new URI(String.format("ws://%s:%d/ws", host, port));
 +
 +  }
 +
 +  /**
 +   * Start Gateway Server.
 +   * 
 +   * @throws Exception
 +   */
 +  private static void startGatewayServer() throws Exception {
 +    /* use default Max threads */
 +    gatewayServer = new Server(new QueuedThreadPool(254));
 +    final ServerConnector connector = new ServerConnector(gatewayServer);
 +    gatewayServer.addConnector(connector);
 +
 +    /* workaround so we can add our handler later at runtime */
 +    HandlerCollection handlers = new HandlerCollection(true);
 +
 +    /* add some initial handlers */
 +    ContextHandler context = new ContextHandler();
 +    context.setContextPath("/");
 +    handlers.addHandler(context);
 +
 +    gatewayServer.setHandler(handlers);
 +
 +    // Start Server
 +    gatewayServer.start();
 +
 +    String host = connector.getHost();
 +    if (host == null) {
 +      host = "localhost";
 +    }
 +    int port = connector.getLocalPort();
 +    serverUri = new URI(String.format("ws://%s:%d/", host, port));
 +
 +    /* Setup websocket handler */
 +    setupGatewayConfig(backendServerUri.toString());
 +
 +    final GatewayWebsocketHandler gatewayWebsocketHandler = new GatewayWebsocketHandler(
 +        gatewayConfig, services);
 +    handlers.addHandler(gatewayWebsocketHandler);
 +    gatewayWebsocketHandler.start();
 +  }
 +
 +  /**
 +   * Initialize the configs and components required for this test.
 +   * 
 +   * @param backend
 +   * @throws IOException
 +   */
 +  private static void setupGatewayConfig(final String backend)
 +      throws IOException {
 +    services = new DefaultGatewayServices();
 +
 +    topoDir = createDir();
 +    URL serviceUrl = ClassLoader.getSystemResource("websocket-services");
 +
 +    final File descriptor = new File(topoDir, "websocket.xml");
 +    final FileOutputStream stream = new FileOutputStream(descriptor);
 +    createKnoxTopology(backend).toStream(stream);
 +    stream.close();
 +
 +    final TestTopologyListener topoListener = new TestTopologyListener();
 +
 +    final Map<String, String> options = new HashMap<>();
 +    options.put("persist-master", "false");
 +    options.put("master", "password");
 +
 +    gatewayConfig = EasyMock.createNiceMock(GatewayConfig.class);
 +    EasyMock.expect(gatewayConfig.getGatewayTopologyDir())
 +        .andReturn(topoDir.toString()).anyTimes();
 +
++    EasyMock.expect(gatewayConfig.getGatewayProvidersConfigDir())
++            .andReturn(topoDir.getAbsolutePath() + "/shared-providers").anyTimes();
++
++    EasyMock.expect(gatewayConfig.getGatewayDescriptorsDir())
++            .andReturn(topoDir.getAbsolutePath() + "/descriptors").anyTimes();
++
 +    EasyMock.expect(gatewayConfig.getGatewayServicesDir())
 +        .andReturn(serviceUrl.getFile()).anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getEphemeralDHKeySize()).andReturn("2048")
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getGatewaySecurityDir())
 +        .andReturn(topoDir.toString()).anyTimes();
 +
 +    /* Websocket configs */
 +    EasyMock.expect(gatewayConfig.isWebsocketEnabled()).andReturn(true)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketMaxTextMessageSize())
 +        .andReturn(GatewayConfigImpl.DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketMaxBinaryMessageSize())
 +        .andReturn(GatewayConfigImpl.DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_SIZE)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketMaxTextMessageBufferSize())
 +        .andReturn(
 +            GatewayConfigImpl.DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketMaxBinaryMessageBufferSize())
 +        .andReturn(
 +            GatewayConfigImpl.DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketInputBufferSize())
 +        .andReturn(GatewayConfigImpl.DEFAULT_WEBSOCKET_INPUT_BUFFER_SIZE)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketAsyncWriteTimeout())
 +        .andReturn(GatewayConfigImpl.DEFAULT_WEBSOCKET_ASYNC_WRITE_TIMEOUT)
 +        .anyTimes();
 +
 +    EasyMock.expect(gatewayConfig.getWebsocketIdleTimeout())
 +        .andReturn(GatewayConfigImpl.DEFAULT_WEBSOCKET_IDLE_TIMEOUT).anyTimes();
 +
++    EasyMock.expect(gatewayConfig.getRemoteRegistryConfigurationNames())
++            .andReturn(Collections.emptyList())
++            .anyTimes();
++
 +    EasyMock.replay(gatewayConfig);
 +
 +    try {
 +      services.init(gatewayConfig, options);
 +    } catch (ServiceLifecycleException e) {
 +      e.printStackTrace();
 +    }
 +
 +    DeploymentFactory.setGatewayServices(services);
 +    final TopologyService monitor = services
 +        .getService(GatewayServices.TOPOLOGY_SERVICE);
 +    monitor.addTopologyChangeListener(topoListener);
 +    monitor.reloadTopologies();
 +
 +  }
 +
 +  private static File createDir() throws IOException {
 +    return TestUtils
 +        .createTempDir(WebsocketEchoTest.class.getSimpleName() + "-");
 +  }
 +
 +  private static XMLTag createKnoxTopology(final String backend) {
 +    XMLTag xml = XMLDoc.newDocument(true).addRoot("topology").addTag("service")
 +        .addTag("role").addText("WEBSOCKET").addTag("url").addText(backend)
 +        .gotoParent().gotoRoot();
 +    // System.out.println( "GATEWAY=" + xml.toString() );
 +    return xml;
 +  }
 +
 +  private static class TestTopologyListener implements TopologyListener {
 +
 +    public ArrayList<List<TopologyEvent>> events = new ArrayList<List<TopologyEvent>>();
 +
 +    @Override
 +    public void handleTopologyEvent(List<TopologyEvent> events) {
 +      this.events.add(events);
 +
 +      synchronized (this) {
 +        for (TopologyEvent event : events) {
 +          if (!event.getType().equals(TopologyEvent.Type.DELETED)) {
 +
 +            /* for this test we only care about this part */
 +            DeploymentFactory.createDeployment(gatewayConfig,
 +                event.getTopology());
 +
 +          }
 +        }
 +
 +      }
 +
 +    }
 +
 +  }
 +
 +  private static abstract class WebsocketClient extends Endpoint
 +      implements MessageHandler.Whole<String> {
 +    @Override
 +    public void onOpen(Session session, EndpointConfig config) {
 +      session.addMessageHandler(this);
 +    }
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml
----------------------------------------------------------------------
diff --cc gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml
index c6135ae,0000000..e69de29
mode 100644,000000..100644
--- a/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml
+++ b/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml


[21/49] knox git commit: KNOX-1117 - HostMap Provider configuration comment in topologies included with Knox has typos (Phil Zampino via Sandeep More)

Posted by mo...@apache.org.
KNOX-1117 - HostMap Provider configuration comment in topologies included with Knox has typos (Phil Zampino via Sandeep More)

Signed-off-by: Sandeep More <mo...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/7b211463
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/7b211463
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/7b211463

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 7b2114638cabcdcbdb72534c14ae349dc9ef7a42
Parents: a8fbf80
Author: Phil Zampino <pz...@gmail.com>
Authored: Thu Nov 16 10:31:35 2017 -0500
Committer: Sandeep More <mo...@apache.org>
Committed: Fri Dec 1 11:49:42 2017 -0500

----------------------------------------------------------------------
 gateway-release/home/conf/topologies/admin.xml  | 21 ++++----------------
 .../home/conf/topologies/knoxsso.xml            |  5 ++++-
 .../home/conf/topologies/manager.xml            | 21 ++++----------------
 .../home/conf/topologies/sandbox.xml            | 21 ++++----------------
 4 files changed, 16 insertions(+), 52 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/7b211463/gateway-release/home/conf/topologies/admin.xml
----------------------------------------------------------------------
diff --git a/gateway-release/home/conf/topologies/admin.xml b/gateway-release/home/conf/topologies/admin.xml
index 2f3d9a9..8a0231e 100644
--- a/gateway-release/home/conf/topologies/admin.xml
+++ b/gateway-release/home/conf/topologies/admin.xml
@@ -78,27 +78,14 @@
             <enabled>true</enabled>
         </provider>
 
-        <!--
-        Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.
-        For example, a hadoop service running in AWS may return a response that includes URLs containing the
-        some AWS internal host name.  If the client needs to make a subsequent request to the host identified
-        in those URLs they need to be mapped to external host names that the client Knox can use to connect.
-
-        If the external hostname and internal host names are same turn of this provider by setting the value of
-        enabled parameter as false.
-
-        The name parameter specifies the external host names in a comma separated list.
-        The value parameter specifies corresponding internal host names in a comma separated list.
-
-        Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out
-        of box sandbox.xml.  This is because Sandbox uses port mapping to allow clients to connect to the
-        Hadoop services using localhost.  In real clusters, external host names would almost never be localhost.
-        -->
         <provider>
             <role>hostmap</role>
             <name>static</name>
             <enabled>true</enabled>
-            <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>
+            <param>
+                <name>localhost</name>
+                <value>sandbox,sandbox.hortonworks.com</value>
+            </param>
         </provider>
 
     </gateway>

http://git-wip-us.apache.org/repos/asf/knox/blob/7b211463/gateway-release/home/conf/topologies/knoxsso.xml
----------------------------------------------------------------------
diff --git a/gateway-release/home/conf/topologies/knoxsso.xml b/gateway-release/home/conf/topologies/knoxsso.xml
index c0b48ce..c0f4d63 100644
--- a/gateway-release/home/conf/topologies/knoxsso.xml
+++ b/gateway-release/home/conf/topologies/knoxsso.xml
@@ -89,7 +89,10 @@
             <role>hostmap</role>
             <name>static</name>
             <enabled>true</enabled>
-            <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>
+            <param>
+                <name>localhost</name>
+                <value>sandbox,sandbox.hortonworks.com</value>
+            </param>
         </provider>
 
     </gateway>

http://git-wip-us.apache.org/repos/asf/knox/blob/7b211463/gateway-release/home/conf/topologies/manager.xml
----------------------------------------------------------------------
diff --git a/gateway-release/home/conf/topologies/manager.xml b/gateway-release/home/conf/topologies/manager.xml
index 08416c3..736888b 100644
--- a/gateway-release/home/conf/topologies/manager.xml
+++ b/gateway-release/home/conf/topologies/manager.xml
@@ -89,27 +89,14 @@
             <enabled>true</enabled>
         </provider>
 
-        <!--
-        Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.
-        For example, a hadoop service running in AWS may return a response that includes URLs containing the
-        some AWS internal host name.  If the client needs to make a subsequent request to the host identified
-        in those URLs they need to be mapped to external host names that the client Knox can use to connect.
-
-        If the external hostname and internal host names are same turn of this provider by setting the value of
-        enabled parameter as false.
-
-        The name parameter specifies the external host names in a comma separated list.
-        The value parameter specifies corresponding internal host names in a comma separated list.
-
-        Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out
-        of box sandbox.xml.  This is because Sandbox uses port mapping to allow clients to connect to the
-        Hadoop services using localhost.  In real clusters, external host names would almost never be localhost.
-        -->
         <provider>
             <role>hostmap</role>
             <name>static</name>
             <enabled>true</enabled>
-            <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>
+            <param>
+                <name>localhost</name>
+                <value>sandbox,sandbox.hortonworks.com</value>
+            </param>
         </provider>
 
     </gateway>

http://git-wip-us.apache.org/repos/asf/knox/blob/7b211463/gateway-release/home/conf/topologies/sandbox.xml
----------------------------------------------------------------------
diff --git a/gateway-release/home/conf/topologies/sandbox.xml b/gateway-release/home/conf/topologies/sandbox.xml
index dabee2b..10493f0 100644
--- a/gateway-release/home/conf/topologies/sandbox.xml
+++ b/gateway-release/home/conf/topologies/sandbox.xml
@@ -68,27 +68,14 @@
             <enabled>true</enabled>
         </provider>
 
-        <!--
-        Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.
-        For example, a hadoop service running in AWS may return a response that includes URLs containing the
-        some AWS internal host name.  If the client needs to make a subsequent request to the host identified
-        in those URLs they need to be mapped to external host names that the client Knox can use to connect.
-
-        If the external hostname and internal host names are same turn of this provider by setting the value of
-        enabled parameter as false.
-
-        The name parameter specifies the external host names in a comma separated list.
-        The value parameter specifies corresponding internal host names in a comma separated list.
-
-        Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out
-        of box sandbox.xml.  This is because Sandbox uses port mapping to allow clients to connect to the
-        Hadoop services using localhost.  In real clusters, external host names would almost never be localhost.
-        -->
         <provider>
             <role>hostmap</role>
             <name>static</name>
             <enabled>true</enabled>
-            <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>
+            <param>
+                <name>localhost</name>
+                <value>sandbox,sandbox.hortonworks.com</value>
+            </param>
         </provider>
 
     </gateway>


[43/49] knox git commit: KNOX-998 - Merge from trunk 0.14.0 code

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-test/src/test/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorTest.java
deleted file mode 100644
index dd75028..0000000
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorTest.java
+++ /dev/null
@@ -1,603 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.monitor;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.CuratorFrameworkFactory;
-import org.apache.curator.retry.ExponentialBackoffRetry;
-import org.apache.curator.test.InstanceSpec;
-import org.apache.curator.test.TestingCluster;
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.service.config.remote.zk.ZooKeeperClientService;
-import org.apache.hadoop.gateway.service.config.remote.zk.ZooKeeperClientServiceProvider;
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.zookeeper.CreateMode;
-import org.apache.zookeeper.ZooDefs;
-import org.apache.zookeeper.data.ACL;
-import org.apache.zookeeper.data.Id;
-import org.easymock.EasyMock;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-import java.io.File;
-import java.io.FileWriter;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-/**
- * Test the RemoteConfigurationMonitor functionality with SASL configured, and znode ACLs applied.
- *
- * The expected implementation is org.apache.hadoop.gateway.topology.monitor.zk.ZooKeeperConfigMonitor
- *
- * Digest-based SASL is used for this test, but since that is dictated solely by the JAAS config, Kerberos-based SASL
- * should work in exactly the same way, simply by modifying the SASL config.
- */
-public class RemoteConfigurationMonitorTest {
-
-    private static final String PATH_KNOX = "/knox";
-    private static final String PATH_KNOX_CONFIG = PATH_KNOX + "/config";
-    private static final String PATH_KNOX_PROVIDERS = PATH_KNOX_CONFIG + "/shared-providers";
-    private static final String PATH_KNOX_DESCRIPTORS = PATH_KNOX_CONFIG + "/descriptors";
-
-    private static final String PATH_AUTH_TEST = "/auth_test/child_node";
-
-
-    private static final String ALT_USERNAME = "notyou";
-    private static final String ZK_USERNAME = "testsasluser";
-    private static final String ZK_PASSWORD = "testsaslpwd";
-
-    private static final ACL ANY_AUTHENTICATED_USER_ALL = new ACL(ZooDefs.Perms.ALL, new Id("auth", ""));
-    private static final ACL SASL_TESTUSER_ALL = new ACL(ZooDefs.Perms.ALL, new Id("sasl", ZK_USERNAME));
-
-    private static File testTmp;
-    private static File providersDir;
-    private static File descriptorsDir;
-
-    private static TestingCluster zkCluster;
-
-    private static CuratorFramework client;
-
-    @BeforeClass
-    public static void setupSuite() throws Exception {
-        testTmp = TestUtils.createTempDir(RemoteConfigurationMonitorTest.class.getName());
-        File confDir = TestUtils.createTempDir(testTmp + "/conf");
-        providersDir = TestUtils.createTempDir(confDir + "/shared-providers");
-        descriptorsDir = TestUtils.createTempDir(confDir + "/descriptors");
-    }
-
-    @AfterClass
-    public static void tearDownSuite() throws Exception {
-        // Delete the working dir
-        testTmp.delete();
-    }
-
-    @Before
-    public void setupTest() throws Exception {
-        configureAndStartZKCluster();
-    }
-
-    @After
-    public void tearDownTest() throws Exception {
-        // Clean up the ZK nodes, and close the client
-        if (client != null) {
-            if (client.checkExists().forPath(PATH_KNOX) != null) {
-                client.delete().deletingChildrenIfNeeded().forPath(PATH_KNOX);
-            }
-            client.close();
-        }
-
-        // Shutdown the ZK cluster
-        zkCluster.close();
-    }
-
-    /**
-     * Create and persist a JAAS configuration file, defining the SASL config for both the ZooKeeper cluster instances
-     * and ZooKeeper clients.
-     *
-     * @param username The digest username
-     * @param password The digest password
-     *
-     * @return The JAAS configuration file
-     */
-    private static File setupDigestSaslConfig(String username, String password) throws Exception {
-        File saslConfigFile = new File(testTmp, "server-jaas.conf");
-        FileWriter fw = new FileWriter(saslConfigFile);
-        fw.write("Server {\n" +
-                "    org.apache.zookeeper.server.auth.DigestLoginModule required\n" +
-                "    user_" + username + " =\"" + password + "\";\n" +
-                "};\n" +
-                "Client {\n" +
-                "    org.apache.zookeeper.server.auth.DigestLoginModule required\n" +
-                "    username=\"" + username + "\"\n" +
-                "    password=\"" + password + "\";\n" +
-                "};\n");
-        fw.close();
-        return saslConfigFile;
-    }
-
-    /**
-     * Configure and start the ZooKeeper test cluster, and create the znodes monitored by the RemoteConfigurationMonitor.
-     */
-    private static void configureAndStartZKCluster() throws Exception {
-        // Configure security for the ZK cluster instances
-        Map<String, Object> customInstanceSpecProps = new HashMap<>();
-        customInstanceSpecProps.put("authProvider.1", "org.apache.zookeeper.server.auth.SASLAuthenticationProvider");
-        customInstanceSpecProps.put("requireClientAuthScheme", "sasl");
-
-        // Define the test cluster
-        List<InstanceSpec> instanceSpecs = new ArrayList<>();
-        for (int i = 0 ; i < 3 ; i++) {
-            InstanceSpec is = new InstanceSpec(null, -1, -1, -1, false, (i+1), -1, -1, customInstanceSpecProps);
-            instanceSpecs.add(is);
-        }
-        zkCluster = new TestingCluster(instanceSpecs);
-
-        // Configure auth for the ZooKeeper servers and the clients
-        File saslConfigFile = setupDigestSaslConfig(ZK_USERNAME, ZK_PASSWORD);
-
-        // This system property is used by the ZooKeeper cluster instances, the test driver client, and the
-        // RemoteConfigurationMonitor implementation for SASL authentication/authorization
-        System.setProperty("java.security.auth.login.config", saslConfigFile.getAbsolutePath());
-
-        // Start the cluster
-        zkCluster.start();
-
-        // Create the client for the test cluster
-        client = CuratorFrameworkFactory.builder()
-                                        .connectString(zkCluster.getConnectString())
-                                        .retryPolicy(new ExponentialBackoffRetry(100, 3))
-                                        .build();
-        assertNotNull(client);
-        client.start();
-
-        // Create test config nodes with an ACL for a sasl user that is NOT configured for the test client
-        List<ACL> acls = Arrays.asList(new ACL(ZooDefs.Perms.ALL, new Id("sasl", ALT_USERNAME)),
-                                       new ACL(ZooDefs.Perms.READ, ZooDefs.Ids.ANYONE_ID_UNSAFE));
-        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_AUTH_TEST);
-        assertNotNull("Failed to create node:" + PATH_AUTH_TEST,
-                      client.checkExists().forPath(PATH_AUTH_TEST));
-    }
-
-
-    private static void validateKnoxConfigNodeACLs(List<ACL> expectedACLS, List<ACL> actualACLs) throws Exception {
-        assertEquals(expectedACLS.size(), actualACLs.size());
-        int matchedCount = 0;
-        for (ACL expected : expectedACLS) {
-            for (ACL actual : actualACLs) {
-                Id expectedId = expected.getId();
-                Id actualId = actual.getId();
-                if (actualId.getScheme().equals(expectedId.getScheme()) && actualId.getId().equals(expectedId.getId())) {
-                    matchedCount++;
-                    assertEquals(expected.getPerms(), actual.getPerms());
-                    break;
-                }
-            }
-        }
-        assertEquals("ACL mismatch despite being same quantity.", expectedACLS.size(), matchedCount);
-    }
-
-
-    @Test
-    public void testZooKeeperConfigMonitorSASLNodesExistWithUnacceptableACL() throws Exception {
-        final String configMonitorName = "zkConfigClient";
-        final String alias = "zkPass";
-
-        // Setup the base GatewayConfig mock
-        GatewayConfig gc = EasyMock.createNiceMock(GatewayConfig.class);
-        EasyMock.expect(gc.getGatewayProvidersConfigDir()).andReturn(providersDir.getAbsolutePath()).anyTimes();
-        EasyMock.expect(gc.getGatewayDescriptorsDir()).andReturn(descriptorsDir.getAbsolutePath()).anyTimes();
-        EasyMock.expect(gc.getRemoteRegistryConfigurationNames())
-                .andReturn(Collections.singletonList(configMonitorName))
-                .anyTimes();
-        final String registryConfig =
-                GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
-                        GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString() + ";" +
-                        GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL + "=" + ZK_USERNAME + ";" +
-                        GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE + "=Digest;" +
-                        GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS + "=" + alias;
-        EasyMock.expect(gc.getRemoteRegistryConfiguration(configMonitorName))
-                .andReturn(registryConfig).anyTimes();
-        EasyMock.expect(gc.getRemoteConfigurationMonitorClientName()).andReturn(configMonitorName).anyTimes();
-        EasyMock.replay(gc);
-
-        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
-        EasyMock.expect(aliasService.getPasswordFromAliasForGateway(alias))
-                .andReturn(ZK_PASSWORD.toCharArray())
-                .anyTimes();
-        EasyMock.replay(aliasService);
-
-        RemoteConfigurationRegistryClientService clientService = (new ZooKeeperClientServiceProvider()).newInstance();
-        clientService.setAliasService(aliasService);
-        clientService.init(gc, Collections.emptyMap());
-        clientService.start();
-
-        RemoteConfigurationMonitorFactory.setClientService(clientService);
-
-        RemoteConfigurationMonitor cm = RemoteConfigurationMonitorFactory.get(gc);
-        assertNotNull("Failed to load RemoteConfigurationMonitor", cm);
-
-        final ACL ANY_AUTHENTICATED_USER_ALL = new ACL(ZooDefs.Perms.ALL, new Id("auth", ""));
-        List<ACL> acls = Arrays.asList(ANY_AUTHENTICATED_USER_ALL, new ACL(ZooDefs.Perms.WRITE, ZooDefs.Ids.ANYONE_ID_UNSAFE));
-        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX);
-        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_CONFIG);
-        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_PROVIDERS);
-        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_DESCRIPTORS);
-
-        // Make sure both ACLs were applied
-        List<ACL> preACLs = client.getACL().forPath(PATH_KNOX);
-        assertEquals(2, preACLs.size());
-
-        // Check that the config nodes really do exist (the monitor will NOT create them if they're present)
-        assertNotNull(client.checkExists().forPath(PATH_KNOX));
-        assertNotNull(client.checkExists().forPath(PATH_KNOX_CONFIG));
-        assertNotNull(client.checkExists().forPath(PATH_KNOX_PROVIDERS));
-        assertNotNull(client.checkExists().forPath(PATH_KNOX_DESCRIPTORS));
-
-        try {
-            cm.start();
-        } catch (Exception e) {
-            fail("Failed to start monitor: " + e.getMessage());
-        }
-
-        // Validate the expected ACLs on the Knox config znodes (make sure the monitor removed the world:anyone ACL)
-        List<ACL> expectedACLs = Collections.singletonList(SASL_TESTUSER_ALL);
-        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX));
-        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_CONFIG));
-        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_PROVIDERS));
-        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_DESCRIPTORS));
-    }
-
-
-    @Test
-    public void testZooKeeperConfigMonitorSASLNodesExistWithAcceptableACL() throws Exception {
-        final String configMonitorName = "zkConfigClient";
-        final String alias = "zkPass";
-
-        // Setup the base GatewayConfig mock
-        GatewayConfig gc = EasyMock.createNiceMock(GatewayConfig.class);
-        EasyMock.expect(gc.getGatewayProvidersConfigDir()).andReturn(providersDir.getAbsolutePath()).anyTimes();
-        EasyMock.expect(gc.getGatewayDescriptorsDir()).andReturn(descriptorsDir.getAbsolutePath()).anyTimes();
-        EasyMock.expect(gc.getRemoteRegistryConfigurationNames())
-                .andReturn(Collections.singletonList(configMonitorName))
-                .anyTimes();
-        final String registryConfig =
-                GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
-                        GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString() + ";" +
-                        GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL + "=" + ZK_USERNAME + ";" +
-                        GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE + "=Digest;" +
-                        GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS + "=" + alias;
-        EasyMock.expect(gc.getRemoteRegistryConfiguration(configMonitorName))
-                .andReturn(registryConfig).anyTimes();
-        EasyMock.expect(gc.getRemoteConfigurationMonitorClientName()).andReturn(configMonitorName).anyTimes();
-        EasyMock.replay(gc);
-
-        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
-        EasyMock.expect(aliasService.getPasswordFromAliasForGateway(alias))
-                .andReturn(ZK_PASSWORD.toCharArray())
-                .anyTimes();
-        EasyMock.replay(aliasService);
-
-        RemoteConfigurationRegistryClientService clientService = (new ZooKeeperClientServiceProvider()).newInstance();
-        clientService.setAliasService(aliasService);
-        clientService.init(gc, Collections.emptyMap());
-        clientService.start();
-
-        RemoteConfigurationMonitorFactory.setClientService(clientService);
-
-        RemoteConfigurationMonitor cm = RemoteConfigurationMonitorFactory.get(gc);
-        assertNotNull("Failed to load RemoteConfigurationMonitor", cm);
-
-        List<ACL> acls = Arrays.asList(ANY_AUTHENTICATED_USER_ALL);
-        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX);
-        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_CONFIG);
-        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_PROVIDERS);
-        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_DESCRIPTORS);
-
-        // Check that the config nodes really do exist (the monitor will NOT create them if they're present)
-        assertNotNull(client.checkExists().forPath(PATH_KNOX));
-        assertNotNull(client.checkExists().forPath(PATH_KNOX_CONFIG));
-        assertNotNull(client.checkExists().forPath(PATH_KNOX_PROVIDERS));
-        assertNotNull(client.checkExists().forPath(PATH_KNOX_DESCRIPTORS));
-
-        try {
-            cm.start();
-        } catch (Exception e) {
-            fail("Failed to start monitor: " + e.getMessage());
-        }
-
-        // Test auth violation
-        clientService.get(configMonitorName).createEntry("/auth_test/child_node/test1");
-        assertNull("Creation should have been prevented since write access is not granted to the test client.",
-                client.checkExists().forPath("/auth_test/child_node/test1"));
-        assertTrue("Creation should have been prevented since write access is not granted to the test client.",
-                client.getChildren().forPath("/auth_test/child_node").isEmpty());
-
-        // Validate the expected ACLs on the Knox config znodes (make sure the monitor didn't change them)
-        List<ACL> expectedACLs = Collections.singletonList(SASL_TESTUSER_ALL);
-        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX));
-        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_CONFIG));
-        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_PROVIDERS));
-        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_DESCRIPTORS));
-    }
-
-
-    @Test
-    public void testZooKeeperConfigMonitorSASLCreateNodes() throws Exception {
-        final String configMonitorName = "zkConfigClient";
-        final String alias = "zkPass";
-
-        // Setup the base GatewayConfig mock
-        GatewayConfig gc = EasyMock.createNiceMock(GatewayConfig.class);
-        EasyMock.expect(gc.getGatewayProvidersConfigDir()).andReturn(providersDir.getAbsolutePath()).anyTimes();
-        EasyMock.expect(gc.getGatewayDescriptorsDir()).andReturn(descriptorsDir.getAbsolutePath()).anyTimes();
-        EasyMock.expect(gc.getRemoteRegistryConfigurationNames())
-                .andReturn(Collections.singletonList(configMonitorName))
-                .anyTimes();
-        final String registryConfig =
-                            GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
-                            GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString() + ";" +
-                            GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL + "=" + ZK_USERNAME + ";" +
-                            GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE + "=Digest;" +
-                            GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS + "=" + alias;
-        EasyMock.expect(gc.getRemoteRegistryConfiguration(configMonitorName))
-                .andReturn(registryConfig).anyTimes();
-        EasyMock.expect(gc.getRemoteConfigurationMonitorClientName()).andReturn(configMonitorName).anyTimes();
-        EasyMock.replay(gc);
-
-        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
-        EasyMock.expect(aliasService.getPasswordFromAliasForGateway(alias))
-                .andReturn(ZK_PASSWORD.toCharArray())
-                .anyTimes();
-        EasyMock.replay(aliasService);
-
-        RemoteConfigurationRegistryClientService clientService = (new ZooKeeperClientServiceProvider()).newInstance();
-        clientService.setAliasService(aliasService);
-        clientService.init(gc, Collections.emptyMap());
-        clientService.start();
-
-        RemoteConfigurationMonitorFactory.setClientService(clientService);
-
-        RemoteConfigurationMonitor cm = RemoteConfigurationMonitorFactory.get(gc);
-        assertNotNull("Failed to load RemoteConfigurationMonitor", cm);
-
-        // Check that the config nodes really don't yet exist (the monitor will create them if they're not present)
-        assertNull(client.checkExists().forPath(PATH_KNOX));
-        assertNull(client.checkExists().forPath(PATH_KNOX_CONFIG));
-        assertNull(client.checkExists().forPath(PATH_KNOX_PROVIDERS));
-        assertNull(client.checkExists().forPath(PATH_KNOX_DESCRIPTORS));
-
-        try {
-            cm.start();
-        } catch (Exception e) {
-            fail("Failed to start monitor: " + e.getMessage());
-        }
-
-        // Test auth violation
-        clientService.get(configMonitorName).createEntry("/auth_test/child_node/test1");
-        assertNull("Creation should have been prevented since write access is not granted to the test client.",
-                   client.checkExists().forPath("/auth_test/child_node/test1"));
-        assertTrue("Creation should have been prevented since write access is not granted to the test client.",
-                   client.getChildren().forPath("/auth_test/child_node").isEmpty());
-
-        // Validate the expected ACLs on the Knox config znodes (make sure the monitor created them correctly)
-        List<ACL> expectedACLs = Collections.singletonList(SASL_TESTUSER_ALL);
-        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX));
-        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_CONFIG));
-        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_PROVIDERS));
-        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_DESCRIPTORS));
-
-        // Test the Knox config nodes, for which authentication should be sufficient for access
-        try {
-            final String pc_one_znode = getProviderPath("providers-config1.xml");
-            final File pc_one         = new File(providersDir, "providers-config1.xml");
-            final String pc_two_znode = getProviderPath("providers-config2.xml");
-            final File pc_two         = new File(providersDir, "providers-config2.xml");
-
-            client.create().withMode(CreateMode.PERSISTENT).forPath(pc_one_znode, TEST_PROVIDERS_CONFIG_1.getBytes());
-            Thread.sleep(100);
-            assertTrue(pc_one.exists());
-            assertEquals(TEST_PROVIDERS_CONFIG_1, FileUtils.readFileToString(pc_one));
-
-            client.create().withMode(CreateMode.PERSISTENT).forPath(getProviderPath("providers-config2.xml"), TEST_PROVIDERS_CONFIG_2.getBytes());
-            Thread.sleep(100);
-            assertTrue(pc_two.exists());
-            assertEquals(TEST_PROVIDERS_CONFIG_2, FileUtils.readFileToString(pc_two));
-
-            client.setData().forPath(pc_two_znode, TEST_PROVIDERS_CONFIG_1.getBytes());
-            Thread.sleep(100);
-            assertTrue(pc_two.exists());
-            assertEquals(TEST_PROVIDERS_CONFIG_1, FileUtils.readFileToString(pc_two));
-
-            client.delete().forPath(pc_two_znode);
-            Thread.sleep(100);
-            assertFalse(pc_two.exists());
-
-            client.delete().forPath(pc_one_znode);
-            Thread.sleep(100);
-            assertFalse(pc_one.exists());
-
-            final String desc_one_znode   = getDescriptorPath("test1.json");
-            final String desc_two_znode   = getDescriptorPath("test2.json");
-            final String desc_three_znode = getDescriptorPath("test3.json");
-            final File desc_one           = new File(descriptorsDir, "test1.json");
-            final File desc_two           = new File(descriptorsDir, "test2.json");
-            final File desc_three         = new File(descriptorsDir, "test3.json");
-
-            client.create().withMode(CreateMode.PERSISTENT).forPath(desc_one_znode, TEST_DESCRIPTOR_1.getBytes());
-            Thread.sleep(100);
-            assertTrue(desc_one.exists());
-            assertEquals(TEST_DESCRIPTOR_1, FileUtils.readFileToString(desc_one));
-
-            client.create().withMode(CreateMode.PERSISTENT).forPath(desc_two_znode, TEST_DESCRIPTOR_1.getBytes());
-            Thread.sleep(100);
-            assertTrue(desc_two.exists());
-            assertEquals(TEST_DESCRIPTOR_1, FileUtils.readFileToString(desc_two));
-
-            client.setData().forPath(desc_two_znode, TEST_DESCRIPTOR_2.getBytes());
-            Thread.sleep(100);
-            assertTrue(desc_two.exists());
-            assertEquals(TEST_DESCRIPTOR_2, FileUtils.readFileToString(desc_two));
-
-            client.create().withMode(CreateMode.PERSISTENT).forPath(desc_three_znode, TEST_DESCRIPTOR_1.getBytes());
-            Thread.sleep(100);
-            assertTrue(desc_three.exists());
-            assertEquals(TEST_DESCRIPTOR_1, FileUtils.readFileToString(desc_three));
-
-            client.delete().forPath(desc_two_znode);
-            Thread.sleep(100);
-            assertFalse("Expected test2.json to have been deleted.", desc_two.exists());
-
-            client.delete().forPath(desc_three_znode);
-            Thread.sleep(100);
-            assertFalse(desc_three.exists());
-
-            client.delete().forPath(desc_one_znode);
-            Thread.sleep(100);
-            assertFalse(desc_one.exists());
-        } finally {
-            cm.stop();
-        }
-    }
-
-    private static String getDescriptorPath(String descriptorName) {
-        return PATH_KNOX_DESCRIPTORS + "/" + descriptorName;
-    }
-
-    private static String getProviderPath(String providerConfigName) {
-        return PATH_KNOX_PROVIDERS + "/" + providerConfigName;
-    }
-
-
-    private static final String TEST_PROVIDERS_CONFIG_1 =
-                    "<gateway>\n" +
-                    "    <provider>\n" +
-                    "        <role>identity-assertion</role>\n" +
-                    "        <name>Default</name>\n" +
-                    "        <enabled>true</enabled>\n" +
-                    "    </provider>\n" +
-                    "    <provider>\n" +
-                    "        <role>hostmap</role>\n" +
-                    "        <name>static</name>\n" +
-                    "        <enabled>true</enabled>\n" +
-                    "        <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" +
-                    "    </provider>\n" +
-                    "</gateway>\n";
-
-    private static final String TEST_PROVIDERS_CONFIG_2 =
-                    "<gateway>\n" +
-                    "    <provider>\n" +
-                    "        <role>authentication</role>\n" +
-                    "        <name>ShiroProvider</name>\n" +
-                    "        <enabled>true</enabled>\n" +
-                    "        <param>\n" +
-                    "            <name>sessionTimeout</name>\n" +
-                    "            <value>30</value>\n" +
-                    "        </param>\n" +
-                    "        <param>\n" +
-                    "            <name>main.ldapRealm</name>\n" +
-                    "            <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>\n" +
-                    "        </param>\n" +
-                    "        <param>\n" +
-                    "            <name>main.ldapContextFactory</name>\n" +
-                    "            <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
-                    "        </param>\n" +
-                    "        <param>\n" +
-                    "            <name>main.ldapRealm.contextFactory</name>\n" +
-                    "            <value>$ldapContextFactory</value>\n" +
-                    "        </param>\n" +
-                    "        <param>\n" +
-                    "            <name>main.ldapRealm.userDnTemplate</name>\n" +
-                    "            <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
-                    "        </param>\n" +
-                    "        <param>\n" +
-                    "            <name>main.ldapRealm.contextFactory.url</name>\n" +
-                    "            <value>ldap://localhost:33389</value>\n" +
-                    "        </param>\n" +
-                    "        <param>\n" +
-                    "            <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
-                    "            <value>simple</value>\n" +
-                    "        </param>\n" +
-                    "        <param>\n" +
-                    "            <name>urls./**</name>\n" +
-                    "            <value>authcBasic</value>\n" +
-                    "        </param>\n" +
-                    "    </provider>\n" +
-                    "</gateway>\n";
-
-    private static final String TEST_DESCRIPTOR_1 =
-                    "{\n" +
-                    "  \"discovery-type\":\"AMBARI\",\n" +
-                    "  \"discovery-address\":\"http://sandbox.hortonworks.com:8080\",\n" +
-                    "  \"discovery-user\":\"maria_dev\",\n" +
-                    "  \"discovery-pwd-alias\":\"sandbox.ambari.discovery.password\",\n" +
-                    "  \"provider-config-ref\":\"sandbox-providers.xml\",\n" +
-                    "  \"cluster\":\"Sandbox\",\n" +
-                    "  \"services\":[\n" +
-                    "    {\"name\":\"NODEUI\"},\n" +
-                    "    {\"name\":\"YARNUI\"},\n" +
-                    "    {\"name\":\"HDFSUI\"},\n" +
-                    "    {\"name\":\"OOZIEUI\"},\n" +
-                    "    {\"name\":\"HBASEUI\"},\n" +
-                    "    {\"name\":\"NAMENODE\"},\n" +
-                    "    {\"name\":\"JOBTRACKER\"},\n" +
-                    "    {\"name\":\"WEBHDFS\"},\n" +
-                    "    {\"name\":\"WEBHCAT\"},\n" +
-                    "    {\"name\":\"OOZIE\"},\n" +
-                    "    {\"name\":\"WEBHBASE\"},\n" +
-                    "    {\"name\":\"RESOURCEMANAGER\"},\n" +
-                    "    {\"name\":\"AMBARI\", \"urls\":[\"http://c6401.ambari.apache.org:8080\"]},\n" +
-                    "    {\"name\":\"AMBARIUI\", \"urls\":[\"http://c6401.ambari.apache.org:8080\"]}\n" +
-                    "  ]\n" +
-                    "}\n";
-
-    private static final String TEST_DESCRIPTOR_2 =
-                    "{\n" +
-                    "  \"discovery-type\":\"AMBARI\",\n" +
-                    "  \"discovery-address\":\"http://sandbox.hortonworks.com:8080\",\n" +
-                    "  \"discovery-user\":\"maria_dev\",\n" +
-                    "  \"discovery-pwd-alias\":\"sandbox.ambari.discovery.password\",\n" +
-                    "  \"provider-config-ref\":\"sandbox-providers.xml\",\n" +
-                    "  \"cluster\":\"Sandbox\",\n" +
-                    "  \"services\":[\n" +
-                    "    {\"name\":\"NAMENODE\"},\n" +
-                    "    {\"name\":\"JOBTRACKER\"},\n" +
-                    "    {\"name\":\"WEBHDFS\"},\n" +
-                    "    {\"name\":\"WEBHCAT\"},\n" +
-                    "    {\"name\":\"OOZIE\"},\n" +
-                    "    {\"name\":\"WEBHBASE\"},\n" +
-                    "    {\"name\":\"RESOURCEMANAGER\"}\n" +
-                    "  ]\n" +
-                    "}\n";
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-test/src/test/java/org/apache/knox/gateway/SimpleDescriptorHandlerFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/SimpleDescriptorHandlerFuncTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/SimpleDescriptorHandlerFuncTest.java
new file mode 100644
index 0000000..5b29e19
--- /dev/null
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/SimpleDescriptorHandlerFuncTest.java
@@ -0,0 +1,275 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.services.GatewayServices;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.gateway.services.security.KeystoreService;
+import org.apache.knox.gateway.services.security.MasterService;
+import org.apache.knox.gateway.services.topology.TopologyService;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryType;
+import org.apache.knox.gateway.topology.simple.SimpleDescriptor;
+import org.apache.knox.gateway.topology.simple.SimpleDescriptorHandler;
+import org.apache.knox.test.TestUtils;
+import org.easymock.Capture;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import java.io.File;
+import java.net.InetSocketAddress;
+import java.security.KeyStore;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.capture;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+public class SimpleDescriptorHandlerFuncTest {
+
+
+  private static final String TEST_PROVIDER_CONFIG =
+      "    <gateway>\n" +
+          "        <provider>\n" +
+          "            <role>authentication</role>\n" +
+          "            <name>ShiroProvider</name>\n" +
+          "            <enabled>true</enabled>\n" +
+          "            <param>\n" +
+          "                <name>sessionTimeout</name>\n" +
+          "                <value>30</value>\n" +
+          "            </param>\n" +
+          "            <param>\n" +
+          "                <name>main.ldapRealm</name>\n" +
+          "                <value>org.apache.knox.gateway.shirorealm.KnoxLdapRealm</value>\n" +
+          "            </param>\n" +
+          "            <param>\n" +
+          "                <name>main.ldapContextFactory</name>\n" +
+          "                <value>org.apache.knox.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
+          "            </param>\n" +
+          "            <param>\n" +
+          "                <name>main.ldapRealm.contextFactory</name>\n" +
+          "                <value>$ldapContextFactory</value>\n" +
+          "            </param>\n" +
+          "            <param>\n" +
+          "                <name>main.ldapRealm.userDnTemplate</name>\n" +
+          "                <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
+          "            </param>\n" +
+          "            <param>\n" +
+          "                <name>main.ldapRealm.contextFactory.url</name>\n" +
+          "                <value>ldap://localhost:33389</value>\n" +
+          "            </param>\n" +
+          "            <param>\n" +
+          "                <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
+          "                <value>simple</value>\n" +
+          "            </param>\n" +
+          "            <param>\n" +
+          "                <name>urls./**</name>\n" +
+          "                <value>authcBasic</value>\n" +
+          "            </param>\n" +
+          "        </provider>\n" +
+          "\n" +
+          "        <provider>\n" +
+          "            <role>identity-assertion</role>\n" +
+          "            <name>Default</name>\n" +
+          "            <enabled>true</enabled>\n" +
+          "        </provider>\n" +
+          "\n" +
+          "        <provider>\n" +
+          "            <role>hostmap</role>\n" +
+          "            <name>static</name>\n" +
+          "            <enabled>true</enabled>\n" +
+          "            <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" +
+          "        </provider>\n" +
+          "    </gateway>\n";
+
+
+  /**
+   * KNOX-1136
+   * <p>
+   * Test that a credential store is created, and a encryptQueryString alias is defined, with a password that is not
+   * random (but is derived from the master secret and the topology name).
+   * <p>
+   * N.B. This test depends on the NoOpServiceDiscovery extension being configured in META-INF/services
+   */
+  @Test
+  public void testSimpleDescriptorHandlerQueryStringCredentialAliasCreation() throws Exception {
+
+    final String testMasterSecret = "mysecret";
+    final String discoveryType = "NO_OP";
+    final String clusterName = "dummy";
+
+    final Map<String, List<String>> serviceURLs = new HashMap<>();
+    serviceURLs.put("RESOURCEMANAGER", Collections.singletonList("http://myhost:1234/resource"));
+
+    File testRootDir = TestUtils.createTempDir(getClass().getSimpleName());
+    File testConfDir = new File(testRootDir, "conf");
+    File testProvDir = new File(testConfDir, "shared-providers");
+    File testTopoDir = new File(testConfDir, "topologies");
+    File testDeployDir = new File(testConfDir, "deployments");
+
+    // Write the externalized provider config to a temp file
+    File providerConfig = new File(testProvDir, "ambari-cluster-policy.xml");
+    FileUtils.write(providerConfig, TEST_PROVIDER_CONFIG);
+
+    File topologyFile = null;
+    try {
+      File destDir = new File(System.getProperty("java.io.tmpdir")).getCanonicalFile();
+
+      // Mock out the simple descriptor
+      SimpleDescriptor testDescriptor = EasyMock.createNiceMock(SimpleDescriptor.class);
+      EasyMock.expect(testDescriptor.getName()).andReturn("mysimpledescriptor").anyTimes();
+      EasyMock.expect(testDescriptor.getDiscoveryAddress()).andReturn(null).anyTimes();
+      EasyMock.expect(testDescriptor.getDiscoveryType()).andReturn(discoveryType).anyTimes();
+      EasyMock.expect(testDescriptor.getDiscoveryUser()).andReturn(null).anyTimes();
+      EasyMock.expect(testDescriptor.getProviderConfig()).andReturn(providerConfig.getAbsolutePath()).anyTimes();
+      EasyMock.expect(testDescriptor.getClusterName()).andReturn(clusterName).anyTimes();
+      List<SimpleDescriptor.Service> serviceMocks = new ArrayList<>();
+      for (String serviceName : serviceURLs.keySet()) {
+        SimpleDescriptor.Service svc = EasyMock.createNiceMock(SimpleDescriptor.Service.class);
+        EasyMock.expect(svc.getName()).andReturn(serviceName).anyTimes();
+        EasyMock.expect(svc.getURLs()).andReturn(serviceURLs.get(serviceName)).anyTimes();
+        EasyMock.expect(svc.getParams()).andReturn(Collections.emptyMap()).anyTimes();
+        EasyMock.replay(svc);
+        serviceMocks.add(svc);
+      }
+      EasyMock.expect(testDescriptor.getServices()).andReturn(serviceMocks).anyTimes();
+      EasyMock.replay(testDescriptor);
+
+      // Try setting up enough of the GatewayServer to support the test...
+      GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+      InetSocketAddress gatewayAddress = new InetSocketAddress(0);
+      EasyMock.expect(config.getGatewayTopologyDir()).andReturn(testTopoDir.getAbsolutePath()).anyTimes();
+      EasyMock.expect(config.getGatewayDeploymentDir()).andReturn(testDeployDir.getAbsolutePath()).anyTimes();
+      EasyMock.expect(config.getGatewayAddress()).andReturn(gatewayAddress).anyTimes();
+      EasyMock.expect(config.getGatewayPortMappings()).andReturn(Collections.emptyMap()).anyTimes();
+      EasyMock.replay(config);
+
+      // Setup the Gateway Services
+      GatewayServices gatewayServices = EasyMock.createNiceMock(GatewayServices.class);
+
+      // Master Service
+      MasterService ms = EasyMock.createNiceMock(MasterService.class);
+      EasyMock.expect(ms.getMasterSecret()).andReturn(testMasterSecret.toCharArray()).anyTimes();
+      EasyMock.replay(ms);
+      EasyMock.expect(gatewayServices.getService("MasterService")).andReturn(ms).anyTimes();
+
+      // Keystore Service
+      KeystoreService ks = EasyMock.createNiceMock(KeystoreService.class);
+      EasyMock.expect(ks.isCredentialStoreForClusterAvailable(testDescriptor.getName())).andReturn(false).once();
+      ks.createCredentialStoreForCluster(testDescriptor.getName());
+      EasyMock.expectLastCall().once();
+      KeyStore credStore = EasyMock.createNiceMock(KeyStore.class);
+      EasyMock.expect(ks.getCredentialStoreForCluster(testDescriptor.getName())).andReturn(credStore).anyTimes();
+      EasyMock.replay(ks);
+      EasyMock.expect(gatewayServices.getService(GatewayServices.KEYSTORE_SERVICE)).andReturn(ks).anyTimes();
+
+      // Alias Service
+      AliasService as = EasyMock.createNiceMock(AliasService.class);
+      // Captures for validating the alias creation for a generated topology
+      Capture<String> capturedCluster = EasyMock.newCapture();
+      Capture<String> capturedAlias = EasyMock.newCapture();
+      Capture<String> capturedPwd = EasyMock.newCapture();
+      as.addAliasForCluster(capture(capturedCluster), capture(capturedAlias), capture(capturedPwd));
+      EasyMock.expectLastCall().anyTimes();
+      EasyMock.replay(as);
+      EasyMock.expect(gatewayServices.getService(GatewayServices.ALIAS_SERVICE)).andReturn(as).anyTimes();
+
+      // Topology Service
+      TopologyService ts = EasyMock.createNiceMock(TopologyService.class);
+      ts.addTopologyChangeListener(anyObject());
+      EasyMock.expectLastCall().anyTimes();
+      ts.reloadTopologies();
+      EasyMock.expectLastCall().anyTimes();
+      EasyMock.expect(ts.getTopologies()).andReturn(Collections.emptyList()).anyTimes();
+      EasyMock.replay(ts);
+      EasyMock.expect(gatewayServices.getService(GatewayServices.TOPOLOGY_SERVICE)).andReturn(ts).anyTimes();
+
+      EasyMock.replay(gatewayServices);
+
+      // Start a GatewayService with the GatewayServices mock
+      GatewayServer server = GatewayServer.startGateway(config, gatewayServices);
+
+      // Invoke the simple descriptor handler, which will also create the credential store
+      // (because it doesn't exist) and the encryptQueryString alias
+      Map<String, File> files = SimpleDescriptorHandler.handle(testDescriptor,
+                                                               providerConfig.getParentFile(),
+                                                               destDir);
+      topologyFile = files.get("topology");
+
+      // Validate the AliasService interaction
+      assertEquals("Unexpected cluster name for the alias (should be the topology name).",
+                   testDescriptor.getName(), capturedCluster.getValue());
+      assertEquals("Unexpected alias name.", "encryptQueryString", capturedAlias.getValue());
+      assertEquals("Unexpected alias value (should be master secret + topology name.",
+                   testMasterSecret + testDescriptor.getName(), capturedPwd.getValue());
+
+    } catch (Exception e) {
+      e.printStackTrace();
+      fail(e.getMessage());
+    } finally {
+      FileUtils.forceDelete(testRootDir);
+      if (topologyFile != null) {
+        topologyFile.delete();
+      }
+    }
+  }
+
+
+  ///////////////////////////////////////////////////////////////////////////////////////////////////////
+  // Test classes for effectively "skipping" service discovery for this test.
+  ///////////////////////////////////////////////////////////////////////////////////////////////////////
+
+  public static final class NoOpServiceDiscoveryType implements ServiceDiscoveryType {
+    @Override
+    public String getType() {
+      return NoOpServiceDiscovery.TYPE;
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+      return new NoOpServiceDiscovery();
+    }
+  }
+
+  private static final class NoOpServiceDiscovery implements ServiceDiscovery {
+    static final String TYPE = "NO_OP";
+
+    @Override
+    public String getType() {
+      return TYPE;
+    }
+
+    @Override
+    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
+      return Collections.emptyMap();
+    }
+
+    @Override
+    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+      return null;
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-test/src/test/java/org/apache/knox/gateway/topology/monitor/RemoteConfigurationMonitorTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/topology/monitor/RemoteConfigurationMonitorTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/topology/monitor/RemoteConfigurationMonitorTest.java
new file mode 100644
index 0000000..37668a8
--- /dev/null
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/topology/monitor/RemoteConfigurationMonitorTest.java
@@ -0,0 +1,603 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.monitor;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.apache.curator.test.InstanceSpec;
+import org.apache.curator.test.TestingCluster;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.service.config.remote.zk.ZooKeeperClientService;
+import org.apache.knox.gateway.service.config.remote.zk.ZooKeeperClientServiceProvider;
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.test.TestUtils;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.ZooDefs;
+import org.apache.zookeeper.data.ACL;
+import org.apache.zookeeper.data.Id;
+import org.easymock.EasyMock;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/**
+ * Test the RemoteConfigurationMonitor functionality with SASL configured, and znode ACLs applied.
+ *
+ * The expected implementation is org.apache.knox.gateway.topology.monitor.zk.ZooKeeperConfigMonitor
+ *
+ * Digest-based SASL is used for this test, but since that is dictated solely by the JAAS config, Kerberos-based SASL
+ * should work in exactly the same way, simply by modifying the SASL config.
+ */
+public class RemoteConfigurationMonitorTest {
+
+    private static final String PATH_KNOX = "/knox";
+    private static final String PATH_KNOX_CONFIG = PATH_KNOX + "/config";
+    private static final String PATH_KNOX_PROVIDERS = PATH_KNOX_CONFIG + "/shared-providers";
+    private static final String PATH_KNOX_DESCRIPTORS = PATH_KNOX_CONFIG + "/descriptors";
+
+    private static final String PATH_AUTH_TEST = "/auth_test/child_node";
+
+
+    private static final String ALT_USERNAME = "notyou";
+    private static final String ZK_USERNAME = "testsasluser";
+    private static final String ZK_PASSWORD = "testsaslpwd";
+
+    private static final ACL ANY_AUTHENTICATED_USER_ALL = new ACL(ZooDefs.Perms.ALL, new Id("auth", ""));
+    private static final ACL SASL_TESTUSER_ALL = new ACL(ZooDefs.Perms.ALL, new Id("sasl", ZK_USERNAME));
+
+    private static File testTmp;
+    private static File providersDir;
+    private static File descriptorsDir;
+
+    private static TestingCluster zkCluster;
+
+    private static CuratorFramework client;
+
+    @BeforeClass
+    public static void setupSuite() throws Exception {
+        testTmp = TestUtils.createTempDir(RemoteConfigurationMonitorTest.class.getName());
+        File confDir = TestUtils.createTempDir(testTmp + "/conf");
+        providersDir = TestUtils.createTempDir(confDir + "/shared-providers");
+        descriptorsDir = TestUtils.createTempDir(confDir + "/descriptors");
+    }
+
+    @AfterClass
+    public static void tearDownSuite() throws Exception {
+        // Delete the working dir
+        testTmp.delete();
+    }
+
+    @Before
+    public void setupTest() throws Exception {
+        configureAndStartZKCluster();
+    }
+
+    @After
+    public void tearDownTest() throws Exception {
+        // Clean up the ZK nodes, and close the client
+        if (client != null) {
+            if (client.checkExists().forPath(PATH_KNOX) != null) {
+                client.delete().deletingChildrenIfNeeded().forPath(PATH_KNOX);
+            }
+            client.close();
+        }
+
+        // Shutdown the ZK cluster
+        zkCluster.close();
+    }
+
+    /**
+     * Create and persist a JAAS configuration file, defining the SASL config for both the ZooKeeper cluster instances
+     * and ZooKeeper clients.
+     *
+     * @param username The digest username
+     * @param password The digest password
+     *
+     * @return The JAAS configuration file
+     */
+    private static File setupDigestSaslConfig(String username, String password) throws Exception {
+        File saslConfigFile = new File(testTmp, "server-jaas.conf");
+        FileWriter fw = new FileWriter(saslConfigFile);
+        fw.write("Server {\n" +
+                "    org.apache.zookeeper.server.auth.DigestLoginModule required\n" +
+                "    user_" + username + " =\"" + password + "\";\n" +
+                "};\n" +
+                "Client {\n" +
+                "    org.apache.zookeeper.server.auth.DigestLoginModule required\n" +
+                "    username=\"" + username + "\"\n" +
+                "    password=\"" + password + "\";\n" +
+                "};\n");
+        fw.close();
+        return saslConfigFile;
+    }
+
+    /**
+     * Configure and start the ZooKeeper test cluster, and create the znodes monitored by the RemoteConfigurationMonitor.
+     */
+    private static void configureAndStartZKCluster() throws Exception {
+        // Configure security for the ZK cluster instances
+        Map<String, Object> customInstanceSpecProps = new HashMap<>();
+        customInstanceSpecProps.put("authProvider.1", "org.apache.zookeeper.server.auth.SASLAuthenticationProvider");
+        customInstanceSpecProps.put("requireClientAuthScheme", "sasl");
+
+        // Define the test cluster
+        List<InstanceSpec> instanceSpecs = new ArrayList<>();
+        for (int i = 0 ; i < 3 ; i++) {
+            InstanceSpec is = new InstanceSpec(null, -1, -1, -1, false, (i+1), -1, -1, customInstanceSpecProps);
+            instanceSpecs.add(is);
+        }
+        zkCluster = new TestingCluster(instanceSpecs);
+
+        // Configure auth for the ZooKeeper servers and the clients
+        File saslConfigFile = setupDigestSaslConfig(ZK_USERNAME, ZK_PASSWORD);
+
+        // This system property is used by the ZooKeeper cluster instances, the test driver client, and the
+        // RemoteConfigurationMonitor implementation for SASL authentication/authorization
+        System.setProperty("java.security.auth.login.config", saslConfigFile.getAbsolutePath());
+
+        // Start the cluster
+        zkCluster.start();
+
+        // Create the client for the test cluster
+        client = CuratorFrameworkFactory.builder()
+                                        .connectString(zkCluster.getConnectString())
+                                        .retryPolicy(new ExponentialBackoffRetry(100, 3))
+                                        .build();
+        assertNotNull(client);
+        client.start();
+
+        // Create test config nodes with an ACL for a sasl user that is NOT configured for the test client
+        List<ACL> acls = Arrays.asList(new ACL(ZooDefs.Perms.ALL, new Id("sasl", ALT_USERNAME)),
+                                       new ACL(ZooDefs.Perms.READ, ZooDefs.Ids.ANYONE_ID_UNSAFE));
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_AUTH_TEST);
+        assertNotNull("Failed to create node:" + PATH_AUTH_TEST,
+                      client.checkExists().forPath(PATH_AUTH_TEST));
+    }
+
+
+    private static void validateKnoxConfigNodeACLs(List<ACL> expectedACLS, List<ACL> actualACLs) throws Exception {
+        assertEquals(expectedACLS.size(), actualACLs.size());
+        int matchedCount = 0;
+        for (ACL expected : expectedACLS) {
+            for (ACL actual : actualACLs) {
+                Id expectedId = expected.getId();
+                Id actualId = actual.getId();
+                if (actualId.getScheme().equals(expectedId.getScheme()) && actualId.getId().equals(expectedId.getId())) {
+                    matchedCount++;
+                    assertEquals(expected.getPerms(), actual.getPerms());
+                    break;
+                }
+            }
+        }
+        assertEquals("ACL mismatch despite being same quantity.", expectedACLS.size(), matchedCount);
+    }
+
+
+    @Test
+    public void testZooKeeperConfigMonitorSASLNodesExistWithUnacceptableACL() throws Exception {
+        final String configMonitorName = "zkConfigClient";
+        final String alias = "zkPass";
+
+        // Setup the base GatewayConfig mock
+        GatewayConfig gc = EasyMock.createNiceMock(GatewayConfig.class);
+        EasyMock.expect(gc.getGatewayProvidersConfigDir()).andReturn(providersDir.getAbsolutePath()).anyTimes();
+        EasyMock.expect(gc.getGatewayDescriptorsDir()).andReturn(descriptorsDir.getAbsolutePath()).anyTimes();
+        EasyMock.expect(gc.getRemoteRegistryConfigurationNames())
+                .andReturn(Collections.singletonList(configMonitorName))
+                .anyTimes();
+        final String registryConfig =
+                GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
+                        GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString() + ";" +
+                        GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL + "=" + ZK_USERNAME + ";" +
+                        GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE + "=Digest;" +
+                        GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS + "=" + alias;
+        EasyMock.expect(gc.getRemoteRegistryConfiguration(configMonitorName))
+                .andReturn(registryConfig).anyTimes();
+        EasyMock.expect(gc.getRemoteConfigurationMonitorClientName()).andReturn(configMonitorName).anyTimes();
+        EasyMock.replay(gc);
+
+        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
+        EasyMock.expect(aliasService.getPasswordFromAliasForGateway(alias))
+                .andReturn(ZK_PASSWORD.toCharArray())
+                .anyTimes();
+        EasyMock.replay(aliasService);
+
+        RemoteConfigurationRegistryClientService clientService = (new ZooKeeperClientServiceProvider()).newInstance();
+        clientService.setAliasService(aliasService);
+        clientService.init(gc, Collections.emptyMap());
+        clientService.start();
+
+        RemoteConfigurationMonitorFactory.setClientService(clientService);
+
+        RemoteConfigurationMonitor cm = RemoteConfigurationMonitorFactory.get(gc);
+        assertNotNull("Failed to load RemoteConfigurationMonitor", cm);
+
+        final ACL ANY_AUTHENTICATED_USER_ALL = new ACL(ZooDefs.Perms.ALL, new Id("auth", ""));
+        List<ACL> acls = Arrays.asList(ANY_AUTHENTICATED_USER_ALL, new ACL(ZooDefs.Perms.WRITE, ZooDefs.Ids.ANYONE_ID_UNSAFE));
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX);
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_CONFIG);
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_PROVIDERS);
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_DESCRIPTORS);
+
+        // Make sure both ACLs were applied
+        List<ACL> preACLs = client.getACL().forPath(PATH_KNOX);
+        assertEquals(2, preACLs.size());
+
+        // Check that the config nodes really do exist (the monitor will NOT create them if they're present)
+        assertNotNull(client.checkExists().forPath(PATH_KNOX));
+        assertNotNull(client.checkExists().forPath(PATH_KNOX_CONFIG));
+        assertNotNull(client.checkExists().forPath(PATH_KNOX_PROVIDERS));
+        assertNotNull(client.checkExists().forPath(PATH_KNOX_DESCRIPTORS));
+
+        try {
+            cm.start();
+        } catch (Exception e) {
+            fail("Failed to start monitor: " + e.getMessage());
+        }
+
+        // Validate the expected ACLs on the Knox config znodes (make sure the monitor removed the world:anyone ACL)
+        List<ACL> expectedACLs = Collections.singletonList(SASL_TESTUSER_ALL);
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX));
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_CONFIG));
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_PROVIDERS));
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_DESCRIPTORS));
+    }
+
+
+    @Test
+    public void testZooKeeperConfigMonitorSASLNodesExistWithAcceptableACL() throws Exception {
+        final String configMonitorName = "zkConfigClient";
+        final String alias = "zkPass";
+
+        // Setup the base GatewayConfig mock
+        GatewayConfig gc = EasyMock.createNiceMock(GatewayConfig.class);
+        EasyMock.expect(gc.getGatewayProvidersConfigDir()).andReturn(providersDir.getAbsolutePath()).anyTimes();
+        EasyMock.expect(gc.getGatewayDescriptorsDir()).andReturn(descriptorsDir.getAbsolutePath()).anyTimes();
+        EasyMock.expect(gc.getRemoteRegistryConfigurationNames())
+                .andReturn(Collections.singletonList(configMonitorName))
+                .anyTimes();
+        final String registryConfig =
+                GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
+                        GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString() + ";" +
+                        GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL + "=" + ZK_USERNAME + ";" +
+                        GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE + "=Digest;" +
+                        GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS + "=" + alias;
+        EasyMock.expect(gc.getRemoteRegistryConfiguration(configMonitorName))
+                .andReturn(registryConfig).anyTimes();
+        EasyMock.expect(gc.getRemoteConfigurationMonitorClientName()).andReturn(configMonitorName).anyTimes();
+        EasyMock.replay(gc);
+
+        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
+        EasyMock.expect(aliasService.getPasswordFromAliasForGateway(alias))
+                .andReturn(ZK_PASSWORD.toCharArray())
+                .anyTimes();
+        EasyMock.replay(aliasService);
+
+        RemoteConfigurationRegistryClientService clientService = (new ZooKeeperClientServiceProvider()).newInstance();
+        clientService.setAliasService(aliasService);
+        clientService.init(gc, Collections.emptyMap());
+        clientService.start();
+
+        RemoteConfigurationMonitorFactory.setClientService(clientService);
+
+        RemoteConfigurationMonitor cm = RemoteConfigurationMonitorFactory.get(gc);
+        assertNotNull("Failed to load RemoteConfigurationMonitor", cm);
+
+        List<ACL> acls = Arrays.asList(ANY_AUTHENTICATED_USER_ALL);
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX);
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_CONFIG);
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_PROVIDERS);
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_DESCRIPTORS);
+
+        // Check that the config nodes really do exist (the monitor will NOT create them if they're present)
+        assertNotNull(client.checkExists().forPath(PATH_KNOX));
+        assertNotNull(client.checkExists().forPath(PATH_KNOX_CONFIG));
+        assertNotNull(client.checkExists().forPath(PATH_KNOX_PROVIDERS));
+        assertNotNull(client.checkExists().forPath(PATH_KNOX_DESCRIPTORS));
+
+        try {
+            cm.start();
+        } catch (Exception e) {
+            fail("Failed to start monitor: " + e.getMessage());
+        }
+
+        // Test auth violation
+        clientService.get(configMonitorName).createEntry("/auth_test/child_node/test1");
+        assertNull("Creation should have been prevented since write access is not granted to the test client.",
+                client.checkExists().forPath("/auth_test/child_node/test1"));
+        assertTrue("Creation should have been prevented since write access is not granted to the test client.",
+                client.getChildren().forPath("/auth_test/child_node").isEmpty());
+
+        // Validate the expected ACLs on the Knox config znodes (make sure the monitor didn't change them)
+        List<ACL> expectedACLs = Collections.singletonList(SASL_TESTUSER_ALL);
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX));
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_CONFIG));
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_PROVIDERS));
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_DESCRIPTORS));
+    }
+
+
+    @Test
+    public void testZooKeeperConfigMonitorSASLCreateNodes() throws Exception {
+        final String configMonitorName = "zkConfigClient";
+        final String alias = "zkPass";
+
+        // Setup the base GatewayConfig mock
+        GatewayConfig gc = EasyMock.createNiceMock(GatewayConfig.class);
+        EasyMock.expect(gc.getGatewayProvidersConfigDir()).andReturn(providersDir.getAbsolutePath()).anyTimes();
+        EasyMock.expect(gc.getGatewayDescriptorsDir()).andReturn(descriptorsDir.getAbsolutePath()).anyTimes();
+        EasyMock.expect(gc.getRemoteRegistryConfigurationNames())
+                .andReturn(Collections.singletonList(configMonitorName))
+                .anyTimes();
+        final String registryConfig =
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString() + ";" +
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL + "=" + ZK_USERNAME + ";" +
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE + "=Digest;" +
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS + "=" + alias;
+        EasyMock.expect(gc.getRemoteRegistryConfiguration(configMonitorName))
+                .andReturn(registryConfig).anyTimes();
+        EasyMock.expect(gc.getRemoteConfigurationMonitorClientName()).andReturn(configMonitorName).anyTimes();
+        EasyMock.replay(gc);
+
+        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
+        EasyMock.expect(aliasService.getPasswordFromAliasForGateway(alias))
+                .andReturn(ZK_PASSWORD.toCharArray())
+                .anyTimes();
+        EasyMock.replay(aliasService);
+
+        RemoteConfigurationRegistryClientService clientService = (new ZooKeeperClientServiceProvider()).newInstance();
+        clientService.setAliasService(aliasService);
+        clientService.init(gc, Collections.emptyMap());
+        clientService.start();
+
+        RemoteConfigurationMonitorFactory.setClientService(clientService);
+
+        RemoteConfigurationMonitor cm = RemoteConfigurationMonitorFactory.get(gc);
+        assertNotNull("Failed to load RemoteConfigurationMonitor", cm);
+
+        // Check that the config nodes really don't yet exist (the monitor will create them if they're not present)
+        assertNull(client.checkExists().forPath(PATH_KNOX));
+        assertNull(client.checkExists().forPath(PATH_KNOX_CONFIG));
+        assertNull(client.checkExists().forPath(PATH_KNOX_PROVIDERS));
+        assertNull(client.checkExists().forPath(PATH_KNOX_DESCRIPTORS));
+
+        try {
+            cm.start();
+        } catch (Exception e) {
+            fail("Failed to start monitor: " + e.getMessage());
+        }
+
+        // Test auth violation
+        clientService.get(configMonitorName).createEntry("/auth_test/child_node/test1");
+        assertNull("Creation should have been prevented since write access is not granted to the test client.",
+                   client.checkExists().forPath("/auth_test/child_node/test1"));
+        assertTrue("Creation should have been prevented since write access is not granted to the test client.",
+                   client.getChildren().forPath("/auth_test/child_node").isEmpty());
+
+        // Validate the expected ACLs on the Knox config znodes (make sure the monitor created them correctly)
+        List<ACL> expectedACLs = Collections.singletonList(SASL_TESTUSER_ALL);
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX));
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_CONFIG));
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_PROVIDERS));
+        validateKnoxConfigNodeACLs(expectedACLs, client.getACL().forPath(PATH_KNOX_DESCRIPTORS));
+
+        // Test the Knox config nodes, for which authentication should be sufficient for access
+        try {
+            final String pc_one_znode = getProviderPath("providers-config1.xml");
+            final File pc_one         = new File(providersDir, "providers-config1.xml");
+            final String pc_two_znode = getProviderPath("providers-config2.xml");
+            final File pc_two         = new File(providersDir, "providers-config2.xml");
+
+            client.create().withMode(CreateMode.PERSISTENT).forPath(pc_one_znode, TEST_PROVIDERS_CONFIG_1.getBytes());
+            Thread.sleep(100);
+            assertTrue(pc_one.exists());
+            assertEquals(TEST_PROVIDERS_CONFIG_1, FileUtils.readFileToString(pc_one));
+
+            client.create().withMode(CreateMode.PERSISTENT).forPath(getProviderPath("providers-config2.xml"), TEST_PROVIDERS_CONFIG_2.getBytes());
+            Thread.sleep(100);
+            assertTrue(pc_two.exists());
+            assertEquals(TEST_PROVIDERS_CONFIG_2, FileUtils.readFileToString(pc_two));
+
+            client.setData().forPath(pc_two_znode, TEST_PROVIDERS_CONFIG_1.getBytes());
+            Thread.sleep(100);
+            assertTrue(pc_two.exists());
+            assertEquals(TEST_PROVIDERS_CONFIG_1, FileUtils.readFileToString(pc_two));
+
+            client.delete().forPath(pc_two_znode);
+            Thread.sleep(100);
+            assertFalse(pc_two.exists());
+
+            client.delete().forPath(pc_one_znode);
+            Thread.sleep(100);
+            assertFalse(pc_one.exists());
+
+            final String desc_one_znode   = getDescriptorPath("test1.json");
+            final String desc_two_znode   = getDescriptorPath("test2.json");
+            final String desc_three_znode = getDescriptorPath("test3.json");
+            final File desc_one           = new File(descriptorsDir, "test1.json");
+            final File desc_two           = new File(descriptorsDir, "test2.json");
+            final File desc_three         = new File(descriptorsDir, "test3.json");
+
+            client.create().withMode(CreateMode.PERSISTENT).forPath(desc_one_znode, TEST_DESCRIPTOR_1.getBytes());
+            Thread.sleep(100);
+            assertTrue(desc_one.exists());
+            assertEquals(TEST_DESCRIPTOR_1, FileUtils.readFileToString(desc_one));
+
+            client.create().withMode(CreateMode.PERSISTENT).forPath(desc_two_znode, TEST_DESCRIPTOR_1.getBytes());
+            Thread.sleep(100);
+            assertTrue(desc_two.exists());
+            assertEquals(TEST_DESCRIPTOR_1, FileUtils.readFileToString(desc_two));
+
+            client.setData().forPath(desc_two_znode, TEST_DESCRIPTOR_2.getBytes());
+            Thread.sleep(100);
+            assertTrue(desc_two.exists());
+            assertEquals(TEST_DESCRIPTOR_2, FileUtils.readFileToString(desc_two));
+
+            client.create().withMode(CreateMode.PERSISTENT).forPath(desc_three_znode, TEST_DESCRIPTOR_1.getBytes());
+            Thread.sleep(100);
+            assertTrue(desc_three.exists());
+            assertEquals(TEST_DESCRIPTOR_1, FileUtils.readFileToString(desc_three));
+
+            client.delete().forPath(desc_two_znode);
+            Thread.sleep(100);
+            assertFalse("Expected test2.json to have been deleted.", desc_two.exists());
+
+            client.delete().forPath(desc_three_znode);
+            Thread.sleep(100);
+            assertFalse(desc_three.exists());
+
+            client.delete().forPath(desc_one_znode);
+            Thread.sleep(100);
+            assertFalse(desc_one.exists());
+        } finally {
+            cm.stop();
+        }
+    }
+
+    private static String getDescriptorPath(String descriptorName) {
+        return PATH_KNOX_DESCRIPTORS + "/" + descriptorName;
+    }
+
+    private static String getProviderPath(String providerConfigName) {
+        return PATH_KNOX_PROVIDERS + "/" + providerConfigName;
+    }
+
+
+    private static final String TEST_PROVIDERS_CONFIG_1 =
+                    "<gateway>\n" +
+                    "    <provider>\n" +
+                    "        <role>identity-assertion</role>\n" +
+                    "        <name>Default</name>\n" +
+                    "        <enabled>true</enabled>\n" +
+                    "    </provider>\n" +
+                    "    <provider>\n" +
+                    "        <role>hostmap</role>\n" +
+                    "        <name>static</name>\n" +
+                    "        <enabled>true</enabled>\n" +
+                    "        <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" +
+                    "    </provider>\n" +
+                    "</gateway>\n";
+
+    private static final String TEST_PROVIDERS_CONFIG_2 =
+                    "<gateway>\n" +
+                    "    <provider>\n" +
+                    "        <role>authentication</role>\n" +
+                    "        <name>ShiroProvider</name>\n" +
+                    "        <enabled>true</enabled>\n" +
+                    "        <param>\n" +
+                    "            <name>sessionTimeout</name>\n" +
+                    "            <value>30</value>\n" +
+                    "        </param>\n" +
+                    "        <param>\n" +
+                    "            <name>main.ldapRealm</name>\n" +
+                    "            <value>org.apache.knox.gateway.shirorealm.KnoxLdapRealm</value>\n" +
+                    "        </param>\n" +
+                    "        <param>\n" +
+                    "            <name>main.ldapContextFactory</name>\n" +
+                    "            <value>org.apache.knox.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
+                    "        </param>\n" +
+                    "        <param>\n" +
+                    "            <name>main.ldapRealm.contextFactory</name>\n" +
+                    "            <value>$ldapContextFactory</value>\n" +
+                    "        </param>\n" +
+                    "        <param>\n" +
+                    "            <name>main.ldapRealm.userDnTemplate</name>\n" +
+                    "            <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
+                    "        </param>\n" +
+                    "        <param>\n" +
+                    "            <name>main.ldapRealm.contextFactory.url</name>\n" +
+                    "            <value>ldap://localhost:33389</value>\n" +
+                    "        </param>\n" +
+                    "        <param>\n" +
+                    "            <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
+                    "            <value>simple</value>\n" +
+                    "        </param>\n" +
+                    "        <param>\n" +
+                    "            <name>urls./**</name>\n" +
+                    "            <value>authcBasic</value>\n" +
+                    "        </param>\n" +
+                    "    </provider>\n" +
+                    "</gateway>\n";
+
+    private static final String TEST_DESCRIPTOR_1 =
+                    "{\n" +
+                    "  \"discovery-type\":\"AMBARI\",\n" +
+                    "  \"discovery-address\":\"http://sandbox.hortonworks.com:8080\",\n" +
+                    "  \"discovery-user\":\"maria_dev\",\n" +
+                    "  \"discovery-pwd-alias\":\"sandbox.ambari.discovery.password\",\n" +
+                    "  \"provider-config-ref\":\"sandbox-providers.xml\",\n" +
+                    "  \"cluster\":\"Sandbox\",\n" +
+                    "  \"services\":[\n" +
+                    "    {\"name\":\"NODEUI\"},\n" +
+                    "    {\"name\":\"YARNUI\"},\n" +
+                    "    {\"name\":\"HDFSUI\"},\n" +
+                    "    {\"name\":\"OOZIEUI\"},\n" +
+                    "    {\"name\":\"HBASEUI\"},\n" +
+                    "    {\"name\":\"NAMENODE\"},\n" +
+                    "    {\"name\":\"JOBTRACKER\"},\n" +
+                    "    {\"name\":\"WEBHDFS\"},\n" +
+                    "    {\"name\":\"WEBHCAT\"},\n" +
+                    "    {\"name\":\"OOZIE\"},\n" +
+                    "    {\"name\":\"WEBHBASE\"},\n" +
+                    "    {\"name\":\"RESOURCEMANAGER\"},\n" +
+                    "    {\"name\":\"AMBARI\", \"urls\":[\"http://c6401.ambari.apache.org:8080\"]},\n" +
+                    "    {\"name\":\"AMBARIUI\", \"urls\":[\"http://c6401.ambari.apache.org:8080\"]}\n" +
+                    "  ]\n" +
+                    "}\n";
+
+    private static final String TEST_DESCRIPTOR_2 =
+                    "{\n" +
+                    "  \"discovery-type\":\"AMBARI\",\n" +
+                    "  \"discovery-address\":\"http://sandbox.hortonworks.com:8080\",\n" +
+                    "  \"discovery-user\":\"maria_dev\",\n" +
+                    "  \"discovery-pwd-alias\":\"sandbox.ambari.discovery.password\",\n" +
+                    "  \"provider-config-ref\":\"sandbox-providers.xml\",\n" +
+                    "  \"cluster\":\"Sandbox\",\n" +
+                    "  \"services\":[\n" +
+                    "    {\"name\":\"NAMENODE\"},\n" +
+                    "    {\"name\":\"JOBTRACKER\"},\n" +
+                    "    {\"name\":\"WEBHDFS\"},\n" +
+                    "    {\"name\":\"WEBHCAT\"},\n" +
+                    "    {\"name\":\"OOZIE\"},\n" +
+                    "    {\"name\":\"WEBHBASE\"},\n" +
+                    "    {\"name\":\"RESOURCEMANAGER\"}\n" +
+                    "  ]\n" +
+                    "}\n";
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-test/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType b/gateway-test/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
deleted file mode 100644
index 0c5fe09..0000000
--- a/gateway-test/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
+++ /dev/null
@@ -1,19 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.hadoop.gateway.SimpleDescriptorHandlerFuncTest$NoOpServiceDiscoveryType

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-test/src/test/resources/META-INF/services/org.apache.knox.gateway.topology.discovery.ServiceDiscoveryType
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/META-INF/services/org.apache.knox.gateway.topology.discovery.ServiceDiscoveryType b/gateway-test/src/test/resources/META-INF/services/org.apache.knox.gateway.topology.discovery.ServiceDiscoveryType
new file mode 100644
index 0000000..8d72813
--- /dev/null
+++ b/gateway-test/src/test/resources/META-INF/services/org.apache.knox.gateway.topology.discovery.ServiceDiscoveryType
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.knox.gateway.SimpleDescriptorHandlerFuncTest$NoOpServiceDiscoveryType


[13/49] knox git commit: KNOX-1128 - Readonly protection for generated topologies in Knox Admin UI

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-applications/src/main/resources/applications/admin-ui/app/scripts.2c89ed78f648df44c10f.bundle.js
----------------------------------------------------------------------
diff --git a/gateway-applications/src/main/resources/applications/admin-ui/app/scripts.2c89ed78f648df44c10f.bundle.js b/gateway-applications/src/main/resources/applications/admin-ui/app/scripts.2c89ed78f648df44c10f.bundle.js
new file mode 100644
index 0000000..7f7abe4
--- /dev/null
+++ b/gateway-applications/src/main/resources/applications/admin-ui/app/scripts.2c89ed78f648df44c10f.bundle.js
@@ -0,0 +1,12 @@
+(function(){var e=function(){return this}();if(!e&&"undefined"!=typeof window&&(e=window),"undefined"==typeof requirejs){var t=function(e,i,n){if("string"!=typeof e)return void(t.original?t.original.apply(this,arguments):(console.error("dropping module because define wasn't a string."),console.trace()));2==arguments.length&&(n=i),t.modules[e]||(t.payloads[e]=n,t.modules[e]=null)};t.modules={},t.payloads={};var i=function(e,t,i){if("string"==typeof t){var s=o(e,t);if(void 0!=s)return i&&i(),s}else if("[object Array]"===Object.prototype.toString.call(t)){for(var r=[],a=0,l=t.length;a<l;++a){var h=o(e,t[a]);if(void 0==h&&n.original)return;r.push(h)}return i&&i.apply(null,r)||!0}},n=function(e,t){var s=i("",e,t);return void 0==s&&n.original?n.original.apply(this,arguments):s},s=function(e,t){if(-1!==t.indexOf("!")){var i=t.split("!");return s(e,i[0])+"!"+s(e,i[1])}if("."==t.charAt(0)){var n=e.split("/").slice(0,-1).join("/");for(t=n+"/"+t;-1!==t.indexOf(".")&&o!=t;){var o=t;t=t.replace(
 /\/\.\//,"/").replace(/[^\/]+\/\.\.\//,"")}}return t},o=function(e,n){n=s(e,n);var o=t.modules[n];if(!o){if("function"==typeof(o=t.payloads[n])){var r={},a={id:n,uri:"",exports:r,packaged:!0};r=o(function(e,t){return i(n,e,t)},r,a)||a.exports,t.modules[n]=r,delete t.payloads[n]}o=t.modules[n]=r||o}return o};!function(i){var s=e;i&&(e[i]||(e[i]={}),s=e[i]),s.define&&s.define.packaged||(t.original=s.define,s.define=t,s.define.packaged=!0),s.require&&s.require.packaged||(n.original=s.require,s.require=n,s.require.packaged=!0)}("")}})(),define("ace/lib/regexp",["require","exports","module"],function(e,t,i){"use strict";function n(e){return(e.global?"g":"")+(e.ignoreCase?"i":"")+(e.multiline?"m":"")+(e.extended?"x":"")+(e.sticky?"y":"")}function s(e,t,i){if(Array.prototype.indexOf)return e.indexOf(t,i);for(var n=i||0;n<e.length;n++)if(e[n]===t)return n;return-1}var o={exec:RegExp.prototype.exec,test:RegExp.prototype.test,match:String.prototype.match,replace:String.prototype.replace,split
 :String.prototype.split},r=void 0===o.exec.call(/()??/,"")[1],a=function(){var e=/^/g;return o.test.call(e,""),!e.lastIndex}();a&&r||(RegExp.prototype.exec=function(e){var t,i,l=o.exec.apply(this,arguments);if("string"==typeof e&&l){if(!r&&l.length>1&&s(l,"")>-1&&(i=RegExp(this.source,o.replace.call(n(this),"g","")),o.replace.call(e.slice(l.index),i,function(){for(var e=1;e<arguments.length-2;e++)void 0===arguments[e]&&(l[e]=void 0)})),this._xregexp&&this._xregexp.captureNames)for(var h=1;h<l.length;h++)(t=this._xregexp.captureNames[h-1])&&(l[t]=l[h]);!a&&this.global&&!l[0].length&&this.lastIndex>l.index&&this.lastIndex--}return l},a||(RegExp.prototype.test=function(e){var t=o.exec.call(this,e);return t&&this.global&&!t[0].length&&this.lastIndex>t.index&&this.lastIndex--,!!t}))}),define("ace/lib/es5-shim",["require","exports","module"],function(e,t,i){function n(){}function s(e){try{return Object.defineProperty(e,"sentinel",{}),"sentinel"in e}catch(e){}}function o(e){return e=+e,e!=
 =e?e=0:0!==e&&e!==1/0&&e!==-1/0&&(e=(e>0||-1)*Math.floor(Math.abs(e))),e}Function.prototype.bind||(Function.prototype.bind=function(e){var t=this;if("function"!=typeof t)throw new TypeError("Function.prototype.bind called on incompatible "+t);var i=f.call(arguments,1),s=function(){if(this instanceof s){var n=t.apply(this,i.concat(f.call(arguments)));return Object(n)===n?n:this}return t.apply(e,i.concat(f.call(arguments)))};return t.prototype&&(n.prototype=t.prototype,s.prototype=new n,n.prototype=null),s});var r,a,l,h,c,u=Function.prototype.call,d=Array.prototype,g=Object.prototype,f=d.slice,m=u.bind(g.toString),p=u.bind(g.hasOwnProperty);if((c=p(g,"__defineGetter__"))&&(r=u.bind(g.__defineGetter__),a=u.bind(g.__defineSetter__),l=u.bind(g.__lookupGetter__),h=u.bind(g.__lookupSetter__)),2!=[1,2].splice(0).length)if(function(){function e(e){var t=new Array(e+2);return t[0]=t[1]=0,t}var t,i=[];if(i.splice.apply(i,e(20)),i.splice.apply(i,e(26)),t=i.length,i.splice(5,0,"XXX"),i.length,t+
 1==i.length)return!0}()){var A=Array.prototype.splice;Array.prototype.splice=function(e,t){return arguments.length?A.apply(this,[void 0===e?0:e,void 0===t?this.length-e:t].concat(f.call(arguments,2))):[]}}else Array.prototype.splice=function(e,t){var i=this.length;e>0?e>i&&(e=i):void 0==e?e=0:e<0&&(e=Math.max(i+e,0)),e+t<i||(t=i-e);var n=this.slice(e,e+t),s=f.call(arguments,2),o=s.length;if(e===i)o&&this.push.apply(this,s);else{var r=Math.min(t,i-e),a=e+r,l=a+o-r,h=i-a,c=i-r;if(l<a)for(var u=0;u<h;++u)this[l+u]=this[a+u];else if(l>a)for(u=h;u--;)this[l+u]=this[a+u];if(o&&e===c)this.length=c,this.push.apply(this,s);else for(this.length=c+o,u=0;u<o;++u)this[e+u]=s[u]}return n};Array.isArray||(Array.isArray=function(e){return"[object Array]"==m(e)});var C=Object("a"),v="a"!=C[0]||!(0 in C);if(Array.prototype.forEach||(Array.prototype.forEach=function(e){var t=L(this),i=v&&"[object String]"==m(this)?this.split(""):t,n=arguments[1],s=-1,o=i.length>>>0;if("[object Function]"!=m(e))throw n
 ew TypeError;for(;++s<o;)s in i&&e.call(n,i[s],s,t)}),Array.prototype.map||(Array.prototype.map=function(e){var t=L(this),i=v&&"[object String]"==m(this)?this.split(""):t,n=i.length>>>0,s=Array(n),o=arguments[1];if("[object Function]"!=m(e))throw new TypeError(e+" is not a function");for(var r=0;r<n;r++)r in i&&(s[r]=e.call(o,i[r],r,t));return s}),Array.prototype.filter||(Array.prototype.filter=function(e){var t,i=L(this),n=v&&"[object String]"==m(this)?this.split(""):i,s=n.length>>>0,o=[],r=arguments[1];if("[object Function]"!=m(e))throw new TypeError(e+" is not a function");for(var a=0;a<s;a++)a in n&&(t=n[a],e.call(r,t,a,i)&&o.push(t));return o}),Array.prototype.every||(Array.prototype.every=function(e){var t=L(this),i=v&&"[object String]"==m(this)?this.split(""):t,n=i.length>>>0,s=arguments[1];if("[object Function]"!=m(e))throw new TypeError(e+" is not a function");for(var o=0;o<n;o++)if(o in i&&!e.call(s,i[o],o,t))return!1;return!0}),Array.prototype.some||(Array.prototype.some=
 function(e){var t=L(this),i=v&&"[object String]"==m(this)?this.split(""):t,n=i.length>>>0,s=arguments[1];if("[object Function]"!=m(e))throw new TypeError(e+" is not a function");for(var o=0;o<n;o++)if(o in i&&e.call(s,i[o],o,t))return!0;return!1}),Array.prototype.reduce||(Array.prototype.reduce=function(e){var t=L(this),i=v&&"[object String]"==m(this)?this.split(""):t,n=i.length>>>0;if("[object Function]"!=m(e))throw new TypeError(e+" is not a function");if(!n&&1==arguments.length)throw new TypeError("reduce of empty array with no initial value");var s,o=0;if(arguments.length>=2)s=arguments[1];else for(;;){if(o in i){s=i[o++];break}if(++o>=n)throw new TypeError("reduce of empty array with no initial value")}for(;o<n;o++)o in i&&(s=e.call(void 0,s,i[o],o,t));return s}),Array.prototype.reduceRight||(Array.prototype.reduceRight=function(e){var t=L(this),i=v&&"[object String]"==m(this)?this.split(""):t,n=i.length>>>0;if("[object Function]"!=m(e))throw new TypeError(e+" is not a function
 ");if(!n&&1==arguments.length)throw new TypeError("reduceRight of empty array with no initial value");var s,o=n-1;if(arguments.length>=2)s=arguments[1];else for(;;){if(o in i){s=i[o--];break}if(--o<0)throw new TypeError("reduceRight of empty array with no initial value")}do{o in this&&(s=e.call(void 0,s,i[o],o,t))}while(o--);return s}),Array.prototype.indexOf&&-1==[0,1].indexOf(1,2)||(Array.prototype.indexOf=function(e){var t=v&&"[object String]"==m(this)?this.split(""):L(this),i=t.length>>>0;if(!i)return-1;var n=0;for(arguments.length>1&&(n=o(arguments[1])),n=n>=0?n:Math.max(0,i+n);n<i;n++)if(n in t&&t[n]===e)return n;return-1}),Array.prototype.lastIndexOf&&-1==[0,1].lastIndexOf(0,-3)||(Array.prototype.lastIndexOf=function(e){var t=v&&"[object String]"==m(this)?this.split(""):L(this),i=t.length>>>0;if(!i)return-1;var n=i-1;for(arguments.length>1&&(n=Math.min(n,o(arguments[1]))),n=n>=0?n:i-Math.abs(n);n>=0;n--)if(n in t&&e===t[n])return n;return-1}),Object.getPrototypeOf||(Object.ge
 tPrototypeOf=function(e){return e.__proto__||(e.constructor?e.constructor.prototype:g)}),!Object.getOwnPropertyDescriptor){Object.getOwnPropertyDescriptor=function(e,t){if("object"!=typeof e&&"function"!=typeof e||null===e)throw new TypeError("Object.getOwnPropertyDescriptor called on a non-object: "+e);if(p(e,t)){var i,n,s;if(i={enumerable:!0,configurable:!0},c){var o=e.__proto__;e.__proto__=g;var n=l(e,t),s=h(e,t);if(e.__proto__=o,n||s)return n&&(i.get=n),s&&(i.set=s),i}return i.value=e[t],i}}}if(Object.getOwnPropertyNames||(Object.getOwnPropertyNames=function(e){return Object.keys(e)}),!Object.create){var F;F=null===Object.prototype.__proto__?function(){return{__proto__:null}}:function(){var e={};for(var t in e)e[t]=null;return e.constructor=e.hasOwnProperty=e.propertyIsEnumerable=e.isPrototypeOf=e.toLocaleString=e.toString=e.valueOf=e.__proto__=null,e},Object.create=function(e,t){var i;if(null===e)i=F();else{if("object"!=typeof e)throw new TypeError("typeof prototype["+typeof e+
 "] != 'object'");var n=function(){};n.prototype=e,i=new n,i.__proto__=e}return void 0!==t&&Object.defineProperties(i,t),i}}if(Object.defineProperty){var w=s({}),E="undefined"==typeof document||s(document.createElement("div"));if(!w||!E)var b=Object.defineProperty}if(!Object.defineProperty||b){Object.defineProperty=function(e,t,i){if("object"!=typeof e&&"function"!=typeof e||null===e)throw new TypeError("Object.defineProperty called on non-object: "+e);if("object"!=typeof i&&"function"!=typeof i||null===i)throw new TypeError("Property description must be an object: "+i);if(b)try{return b.call(Object,e,t,i)}catch(e){}if(p(i,"value"))if(c&&(l(e,t)||h(e,t))){var n=e.__proto__;e.__proto__=g,delete e[t],e[t]=i.value,e.__proto__=n}else e[t]=i.value;else{if(!c)throw new TypeError("getters & setters can not be defined on this javascript engine");p(i,"get")&&r(e,t,i.get),p(i,"set")&&a(e,t,i.set)}return e}}Object.defineProperties||(Object.defineProperties=function(e,t){for(var i in t)p(t,i)&&O
 bject.defineProperty(e,i,t[i]);return e}),Object.seal||(Object.seal=function(e){return e}),Object.freeze||(Object.freeze=function(e){return e});try{Object.freeze(function(){})}catch(e){Object.freeze=function(e){return function(t){return"function"==typeof t?t:e(t)}}(Object.freeze)}if(Object.preventExtensions||(Object.preventExtensions=function(e){return e}),Object.isSealed||(Object.isSealed=function(e){return!1}),Object.isFrozen||(Object.isFrozen=function(e){return!1}),Object.isExtensible||(Object.isExtensible=function(e){if(Object(e)===e)throw new TypeError;for(var t="";p(e,t);)t+="?";e[t]=!0;var i=p(e,t);return delete e[t],i}),!Object.keys){var $=!0,y=["toString","toLocaleString","valueOf","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","constructor"],B=y.length;for(var D in{toString:null})$=!1;Object.keys=function(e){if("object"!=typeof e&&"function"!=typeof e||null===e)throw new TypeError("Object.keys called on a non-object");var t=[];for(var i in e)p(e,i)&&t.push(i);if($
 )for(var n=0,s=B;n<s;n++){var o=y[n];p(e,o)&&t.push(o)}return t}}Date.now||(Date.now=function(){return(new Date).getTime()});var S="\t\n\v\f\r   ᠎              \u2028\u2029\ufeff";if(!String.prototype.trim||S.trim()){S="["+S+"]";var k=new RegExp("^"+S+S+"*"),x=new RegExp(S+S+"*$");String.prototype.trim=function(){return String(this).replace(k,"").replace(x,"")}}var L=function(e){if(null==e)throw new TypeError("can't convert "+e+" to object");return Object(e)}}),define("ace/lib/fixoldbrowsers",["require","exports","module","ace/lib/regexp","ace/lib/es5-shim"],function(e,t,i){"use strict";e("./regexp"),e("./es5-shim")}),define("ace/lib/dom",["require","exports","module"],function(e,t,i){"use strict";if(t.getDocumentHead=function(e){return e||(e=document),e.head||e.getElementsByTagName("head")[0]||e.documentElement},t.createElement=function(e,t){return document.createElementNS?document.createElementNS(t||"http://www.w3.org/1999/xhtml",e):document.create
 Element(e)},t.hasCssClass=function(e,t){return-1!==(e.className+"").split(/\s+/g).indexOf(t)},t.addCssClass=function(e,i){t.hasCssClass(e,i)||(e.className+=" "+i)},t.removeCssClass=function(e,t){for(var i=e.className.split(/\s+/g);;){var n=i.indexOf(t);if(-1==n)break;i.splice(n,1)}e.className=i.join(" ")},t.toggleCssClass=function(e,t){for(var i=e.className.split(/\s+/g),n=!0;;){var s=i.indexOf(t);if(-1==s)break;n=!1,i.splice(s,1)}return n&&i.push(t),e.className=i.join(" "),n},t.setCssClass=function(e,i,n){n?t.addCssClass(e,i):t.removeCssClass(e,i)},t.hasCssString=function(e,t){var i,n=0;if(t=t||document,t.createStyleSheet&&(i=t.styleSheets)){for(;n<i.length;)if(i[n++].owningElement.id===e)return!0}else if(i=t.getElementsByTagName("style"))for(;n<i.length;)if(i[n++].id===e)return!0;return!1},t.importCssString=function(e,i,n){if(n=n||document,i&&t.hasCssString(i,n))return null;var s;i&&(e+="\n/*# sourceURL=ace/css/"+i+" */"),n.createStyleSheet?(s=n.createStyleSheet(),s.cssText=e,i&&(
 s.owningElement.id=i)):(s=t.createElement("style"),s.appendChild(n.createTextNode(e)),i&&(s.id=i),t.getDocumentHead(n).appendChild(s))},t.importCssStylsheet=function(e,i){if(i.createStyleSheet)i.createStyleSheet(e);else{var n=t.createElement("link");n.rel="stylesheet",n.href=e,t.getDocumentHead(i).appendChild(n)}},t.getInnerWidth=function(e){return parseInt(t.computedStyle(e,"paddingLeft"),10)+parseInt(t.computedStyle(e,"paddingRight"),10)+e.clientWidth},t.getInnerHeight=function(e){return parseInt(t.computedStyle(e,"paddingTop"),10)+parseInt(t.computedStyle(e,"paddingBottom"),10)+e.clientHeight},t.scrollbarWidth=function(e){var i=t.createElement("ace_inner");i.style.width="100%",i.style.minWidth="0px",i.style.height="200px",i.style.display="block";var n=t.createElement("ace_outer"),s=n.style;s.position="absolute",s.left="-10000px",s.overflow="hidden",s.width="200px",s.minWidth="0px",s.height="150px",s.display="block",n.appendChild(i);var o=e.documentElement;o.appendChild(n);var r=i
 .offsetWidth;s.overflow="scroll";var a=i.offsetWidth;return r==a&&(a=n.clientWidth),o.removeChild(n),r-a},"undefined"==typeof document)return void(t.importCssString=function(){});void 0!==window.pageYOffset?(t.getPageScrollTop=function(){return window.pageYOffset},t.getPageScrollLeft=function(){return window.pageXOffset}):(t.getPageScrollTop=function(){return document.body.scrollTop},t.getPageScrollLeft=function(){return document.body.scrollLeft}),window.getComputedStyle?t.computedStyle=function(e,t){return t?(window.getComputedStyle(e,"")||{})[t]||"":window.getComputedStyle(e,"")||{}}:t.computedStyle=function(e,t){return t?e.currentStyle[t]:e.currentStyle},t.setInnerHtml=function(e,t){var i=e.cloneNode(!1);return i.innerHTML=t,e.parentNode.replaceChild(i,e),i},"textContent"in document.documentElement?(t.setInnerText=function(e,t){e.textContent=t},t.getInnerText=function(e){return e.textContent}):(t.setInnerText=function(e,t){e.innerText=t},t.getInnerText=function(e){return e.innerT
 ext}),t.getParentWindow=function(e){return e.defaultView||e.parentWindow}}),define("ace/lib/oop",["require","exports","module"],function(e,t,i){"use strict";t.inherits=function(e,t){e.super_=t,e.prototype=Object.create(t.prototype,{constructor:{value:e,enumerable:!1,writable:!0,configurable:!0}})},t.mixin=function(e,t){for(var i in t)e[i]=t[i];return e},t.implement=function(e,i){t.mixin(e,i)}}),define("ace/lib/keys",["require","exports","module","ace/lib/fixoldbrowsers","ace/lib/oop"],function(e,t,i){"use strict";e("./fixoldbrowsers");var n=e("./oop"),s=function(){var e,t,i={MODIFIER_KEYS:{16:"Shift",17:"Ctrl",18:"Alt",224:"Meta"},KEY_MODS:{ctrl:1,alt:2,option:2,shift:4,super:8,meta:8,command:8,cmd:8},FUNCTION_KEYS:{8:"Backspace",9:"Tab",13:"Return",19:"Pause",27:"Esc",32:"Space",33:"PageUp",34:"PageDown",35:"End",36:"Home",37:"Left",38:"Up",39:"Right",40:"Down",44:"Print",45:"Insert",46:"Delete",96:"Numpad0",97:"Numpad1",98:"Numpad2",99:"Numpad3",100:"Numpad4",101:"Numpad5",102:"Nu
 mpad6",103:"Numpad7",104:"Numpad8",105:"Numpad9","-13":"NumpadEnter",112:"F1",113:"F2",114:"F3",115:"F4",116:"F5",117:"F6",118:"F7",119:"F8",120:"F9",121:"F10",122:"F11",123:"F12",144:"Numlock",145:"Scrolllock"},PRINTABLE_KEYS:{32:" ",48:"0",49:"1",50:"2",51:"3",52:"4",53:"5",54:"6",55:"7",56:"8",57:"9",59:";",61:"=",65:"a",66:"b",67:"c",68:"d",69:"e",70:"f",71:"g",72:"h",73:"i",74:"j",75:"k",76:"l",77:"m",78:"n",79:"o",80:"p",81:"q",82:"r",83:"s",84:"t",85:"u",86:"v",87:"w",88:"x",89:"y",90:"z",107:"+",109:"-",110:".",186:";",187:"=",188:",",189:"-",190:".",191:"/",192:"`",219:"[",220:"\\",221:"]",222:"'",111:"/",106:"*"}};for(t in i.FUNCTION_KEYS)e=i.FUNCTION_KEYS[t].toLowerCase(),i[e]=parseInt(t,10);for(t in i.PRINTABLE_KEYS)e=i.PRINTABLE_KEYS[t].toLowerCase(),i[e]=parseInt(t,10);return n.mixin(i,i.MODIFIER_KEYS),n.mixin(i,i.PRINTABLE_KEYS),n.mixin(i,i.FUNCTION_KEYS),i.enter=i.return,i.escape=i.esc,i.del=i.delete,i[173]="-",function(){for(var e=["cmd","ctrl","alt","shift"],t=Math
 .pow(2,e.length);t--;)i.KEY_MODS[t]=e.filter(function(e){return t&i.KEY_MODS[e]}).join("-")+"-"}(),i.KEY_MODS[0]="",i.KEY_MODS[-1]="input-",i}();n.mixin(t,s),t.keyCodeToString=function(e){var t=s[e];return"string"!=typeof t&&(t=String.fromCharCode(e)),t.toLowerCase()}}),define("ace/lib/useragent",["require","exports","module"],function(e,t,i){"use strict";if(t.OS={LINUX:"LINUX",MAC:"MAC",WINDOWS:"WINDOWS"},t.getOS=function(){return t.isMac?t.OS.MAC:t.isLinux?t.OS.LINUX:t.OS.WINDOWS},"object"==typeof navigator){var n=(navigator.platform.match(/mac|win|linux/i)||["other"])[0].toLowerCase(),s=navigator.userAgent;t.isWin="win"==n,t.isMac="mac"==n,t.isLinux="linux"==n,t.isIE="Microsoft Internet Explorer"==navigator.appName||navigator.appName.indexOf("MSAppHost")>=0?parseFloat((s.match(/(?:MSIE |Trident\/[0-9]+[\.0-9]+;.*rv:)([0-9]+[\.0-9]+)/)||[])[1]):parseFloat((s.match(/(?:Trident\/[0-9]+[\.0-9]+;.*rv:)([0-9]+[\.0-9]+)/)||[])[1]),t.isOldIE=t.isIE&&t.isIE<9,t.isGecko=t.isMozilla=(window
 .Controllers||window.controllers)&&"Gecko"===window.navigator.product,t.isOldGecko=t.isGecko&&parseInt((s.match(/rv:(\d+)/)||[])[1],10)<4,t.isOpera=window.opera&&"[object Opera]"==Object.prototype.toString.call(window.opera),t.isWebKit=parseFloat(s.split("WebKit/")[1])||void 0,t.isChrome=parseFloat(s.split(" Chrome/")[1])||void 0,t.isAIR=s.indexOf("AdobeAIR")>=0,t.isIPad=s.indexOf("iPad")>=0,t.isChromeOS=s.indexOf(" CrOS ")>=0,t.isIOS=/iPad|iPhone|iPod/.test(s)&&!window.MSStream,t.isIOS&&(t.isMac=!0)}}),define("ace/lib/event",["require","exports","module","ace/lib/keys","ace/lib/useragent"],function(e,t,i){"use strict";function n(e,t,i){var n=h(t);if(!r.isMac&&a){if(t.getModifierState&&(t.getModifierState("OS")||t.getModifierState("Win"))&&(n|=8),a.altGr){if(3==(3&n))return;a.altGr=0}if(18===i||17===i){var s="location"in t?t.location:t.keyLocation;if(17===i&&1===s)1==a[i]&&(l=t.timeStamp);else if(18===i&&3===n&&2===s){var c=t.timeStamp-l;c<50&&(a.altGr=!0)}}}if(i in o.MODIFIER_KEYS&
 &(i=-1),8&n&&i>=91&&i<=93&&(i=-1),!n&&13===i){var s="location"in t?t.location:t.keyLocation;if(3===s&&(e(t,n,-i),t.defaultPrevented))return}if(r.isChromeOS&&8&n){if(e(t,n,i),t.defaultPrevented)return;n&=-9}return!!(n||i in o.FUNCTION_KEYS||i in o.PRINTABLE_KEYS)&&e(t,n,i)}function s(){a=Object.create(null)}var o=e("./keys"),r=e("./useragent"),a=null,l=0;t.addListener=function(e,t,i){if(e.addEventListener)return e.addEventListener(t,i,!1);if(e.attachEvent){var n=function(){i.call(e,window.event)};i._wrapper=n,e.attachEvent("on"+t,n)}},t.removeListener=function(e,t,i){if(e.removeEventListener)return e.removeEventListener(t,i,!1);e.detachEvent&&e.detachEvent("on"+t,i._wrapper||i)},t.stopEvent=function(e){return t.stopPropagation(e),t.preventDefault(e),!1},t.stopPropagation=function(e){e.stopPropagation?e.stopPropagation():e.cancelBubble=!0},t.preventDefault=function(e){e.preventDefault?e.preventDefault():e.returnValue=!1},t.getButton=function(e){return"dblclick"==e.type?0:"contextmenu"
 ==e.type||r.isMac&&e.ctrlKey&&!e.altKey&&!e.shiftKey?2:e.preventDefault?e.button:{1:0,2:2,4:1}[e.button]},t.capture=function(e,i,n){function s(e){i&&i(e),n&&n(e),t.removeListener(document,"mousemove",i,!0),t.removeListener(document,"mouseup",s,!0),t.removeListener(document,"dragstart",s,!0)}return t.addListener(document,"mousemove",i,!0),t.addListener(document,"mouseup",s,!0),t.addListener(document,"dragstart",s,!0),s},t.addTouchMoveListener=function(e,i){var n,s;t.addListener(e,"touchstart",function(e){var t=e.touches,i=t[0];n=i.clientX,s=i.clientY}),t.addListener(e,"touchmove",function(e){var t=e.touches;if(!(t.length>1)){var o=t[0];e.wheelX=n-o.clientX,e.wheelY=s-o.clientY,n=o.clientX,s=o.clientY,i(e)}})},t.addMouseWheelListener=function(e,i){"onmousewheel"in e?t.addListener(e,"mousewheel",function(e){void 0!==e.wheelDeltaX?(e.wheelX=-e.wheelDeltaX/8,e.wheelY=-e.wheelDeltaY/8):(e.wheelX=0,e.wheelY=-e.wheelDelta/8),i(e)}):"onwheel"in e?t.addListener(e,"wheel",function(e){switch(e.
 deltaMode){case e.DOM_DELTA_PIXEL:e.wheelX=.35*e.deltaX||0,e.wheelY=.35*e.deltaY||0;break;case e.DOM_DELTA_LINE:case e.DOM_DELTA_PAGE:e.wheelX=5*(e.deltaX||0),e.wheelY=5*(e.deltaY||0)}i(e)}):t.addListener(e,"DOMMouseScroll",function(e){e.axis&&e.axis==e.HORIZONTAL_AXIS?(e.wheelX=5*(e.detail||0),e.wheelY=0):(e.wheelX=0,e.wheelY=5*(e.detail||0)),i(e)})},t.addMultiMouseDownListener=function(e,i,n,s){function o(e){if(0!==t.getButton(e)?u=0:e.detail>1?++u>4&&(u=1):u=1,r.isIE){var o=Math.abs(e.clientX-l)>5||Math.abs(e.clientY-h)>5;c&&!o||(u=1),c&&clearTimeout(c),c=setTimeout(function(){c=null},i[u-1]||600),1==u&&(l=e.clientX,h=e.clientY)}if(e._clicks=u,n[s]("mousedown",e),u>4)u=0;else if(u>1)return n[s](d[u],e)}function a(e){u=2,c&&clearTimeout(c),c=setTimeout(function(){c=null},i[u-1]||600),n[s]("mousedown",e),n[s](d[u],e)}var l,h,c,u=0,d={2:"dblclick",3:"tripleclick",4:"quadclick"};Array.isArray(e)||(e=[e]),e.forEach(function(e){t.addListener(e,"mousedown",o),r.isOldIE&&t.addListener(e,
 "dblclick",a)})};var h=!r.isMac||!r.isOpera||"KeyboardEvent"in window?function(e){return 0|(e.ctrlKey?1:0)|(e.altKey?2:0)|(e.shiftKey?4:0)|(e.metaKey?8:0)}:function(e){return 0|(e.metaKey?1:0)|(e.altKey?2:0)|(e.shiftKey?4:0)|(e.ctrlKey?8:0)};if(t.getModifierString=function(e){return o.KEY_MODS[h(e)]},t.addCommandKeyListener=function(e,i){var o=t.addListener;if(r.isOldGecko||r.isOpera&&!("KeyboardEvent"in window)){var l=null;o(e,"keydown",function(e){l=e.keyCode}),o(e,"keypress",function(e){return n(i,e,l)})}else{var h=null;o(e,"keydown",function(e){a[e.keyCode]=(a[e.keyCode]||0)+1;var t=n(i,e,e.keyCode);return h=e.defaultPrevented,t}),o(e,"keypress",function(e){h&&(e.ctrlKey||e.altKey||e.shiftKey||e.metaKey)&&(t.stopEvent(e),h=null)}),o(e,"keyup",function(e){a[e.keyCode]=null}),a||(s(),o(window,"focus",s))}},"object"==typeof window&&window.postMessage&&!r.isOldIE){t.nextTick=function(e,i){i=i||window;var n="zero-timeout-message-1";t.addListener(i,"message",function s(o){o.data==n&&(
 t.stopPropagation(o),t.removeListener(i,"message",s),e())}),i.postMessage(n,"*")}}t.nextFrame="object"==typeof window&&(window.requestAnimationFrame||window.mozRequestAnimationFrame||window.webkitRequestAnimationFrame||window.msRequestAnimationFrame||window.oRequestAnimationFrame),t.nextFrame?t.nextFrame=t.nextFrame.bind(window):t.nextFrame=function(e){setTimeout(e,17)}}),define("ace/lib/lang",["require","exports","module"],function(e,t,i){"use strict";t.last=function(e){return e[e.length-1]},t.stringReverse=function(e){return e.split("").reverse().join("")},t.stringRepeat=function(e,t){for(var i="";t>0;)1&t&&(i+=e),(t>>=1)&&(e+=e);return i};var n=/^\s\s*/,s=/\s\s*$/;t.stringTrimLeft=function(e){return e.replace(n,"")},t.stringTrimRight=function(e){return e.replace(s,"")},t.copyObject=function(e){var t={};for(var i in e)t[i]=e[i];return t},t.copyArray=function(e){for(var t=[],i=0,n=e.length;i<n;i++)e[i]&&"object"==typeof e[i]?t[i]=this.copyObject(e[i]):t[i]=e[i];return t},t.deepCopy
 =function e(t){if("object"!=typeof t||!t)return t;var i;if(Array.isArray(t)){i=[];for(var n=0;n<t.length;n++)i[n]=e(t[n]);return i}if("[object Object]"!==Object.prototype.toString.call(t))return t;i={};for(var n in t)i[n]=e(t[n]);return i},t.arrayToMap=function(e){for(var t={},i=0;i<e.length;i++)t[e[i]]=1;return t},t.createMap=function(e){var t=Object.create(null);for(var i in e)t[i]=e[i];return t},t.arrayRemove=function(e,t){for(var i=0;i<=e.length;i++)t===e[i]&&e.splice(i,1)},t.escapeRegExp=function(e){return e.replace(/([.*+?^${}()|[\]\/\\])/g,"\\$1")},t.escapeHTML=function(e){return e.replace(/&/g,"&#38;").replace(/"/g,"&#34;").replace(/'/g,"&#39;").replace(/</g,"&#60;")},t.getMatchOffsets=function(e,t){var i=[];return e.replace(t,function(e){i.push({offset:arguments[arguments.length-2],length:e.length})}),i},t.deferredCall=function(e){var t=null,i=function(){t=null,e()},n=function(e){return n.cancel(),t=setTimeout(i,e||0),n};return n.schedule=n,n.call=function(){return this.can
 cel(),e(),n},n.cancel=function(){return clearTimeout(t),t=null,n},n.isPending=function(){return t},n},t.delayedCall=function(e,t){var i=null,n=function(){i=null,e()},s=function(e){null==i&&(i=setTimeout(n,e||t))};return s.delay=function(e){i&&clearTimeout(i),i=setTimeout(n,e||t)},s.schedule=s,s.call=function(){this.cancel(),e()},s.cancel=function(){i&&clearTimeout(i),i=null},s.isPending=function(){return i},s}}),define("ace/keyboard/textinput_ios",["require","exports","module","ace/lib/event","ace/lib/useragent","ace/lib/dom","ace/lib/lang","ace/lib/keys"],function(e,t,i){"use strict";var n=e("../lib/event"),s=e("../lib/useragent"),o=e("../lib/dom"),r=e("../lib/lang"),a=e("../lib/keys"),l=a.KEY_MODS,h=s.isChrome<18,c=s.isIE,u=function(e,t){function i(e){if(!C){if(C=!0,B)t=0,i=e?0:g.value.length-1;else var t=4,i=5;try{g.setSelectionRange(t,i)}catch(e){}C=!1}}function u(){C||(g.value=f,s.isWebKit&&b.schedule())}function d(){clearTimeout(H),H=setTimeout(function(){v&&(g.style.cssText=v
 ,v=""),null==t.renderer.$keepTextAreaAtCursor&&(t.renderer.$keepTextAreaAtCursor=!0,t.renderer.$moveTextAreaToCursor())},0)}var g=o.createElement("textarea");g.className=s.isIOS?"ace_text-input ace_text-input-ios":"ace_text-input",s.isTouchPad&&g.setAttribute("x-palm-disable-auto-cap",!0),g.setAttribute("wrap","off"),g.setAttribute("autocorrect","off"),g.setAttribute("autocapitalize","off"),g.setAttribute("spellcheck",!1),g.style.opacity="0",e.insertBefore(g,e.firstChild);var f="\n aaaa a\n",m=!1,p=!1,A=!1,C=!1,v="",F=!0;try{var w=document.activeElement===g}catch(e){}n.addListener(g,"blur",function(e){t.onBlur(e),w=!1}),n.addListener(g,"focus",function(e){w=!0,t.onFocus(e),i()}),this.focus=function(){if(v)return g.focus();g.style.position="fixed",g.focus()},this.blur=function(){g.blur()},this.isFocused=function(){return w};var E=r.delayedCall(function(){w&&i(F)}),b=r.delayedCall(function(){C||(g.value=f,w&&i())});s.isWebKit||t.addEventListener("changeSelection",function(){t.selectio
 n.isEmpty()!=F&&(F=!F,E.schedule())}),u(),w&&t.onFocus();var $=function(e){return 0===e.selectionStart&&e.selectionEnd===e.value.length},y=function(e){$(g)?(t.selectAll(),i()):B&&i(t.selection.isEmpty())},B=null;this.setInputHandler=function(e){B=e},this.getInputHandler=function(){return B};var D=!1,S=function(e){4===g.selectionStart&&5===g.selectionEnd||(B&&(e=B(e),B=null),A?(i(),e&&t.onPaste(e),A=!1):e==f.substr(0)&&4===g.selectionStart?D?t.execCommand("del",{source:"ace"}):t.execCommand("backspace",{source:"ace"}):m||(e.substring(0,9)==f&&e.length>f.length?e=e.substr(9):e.substr(0,4)==f.substr(0,4)?e=e.substr(4,e.length-f.length+1):e.charAt(e.length-1)==f.charAt(0)&&(e=e.slice(0,-1)),e!=f.charAt(0)&&e.charAt(e.length-1)==f.charAt(0)&&(e=e.slice(0,-1)),e&&t.onTextInput(e)),m&&(m=!1),D&&(D=!1))},k=function(e){if(!C){var t=g.value;S(t),u()}},x=function(e,t,i){var n=e.clipboardData||window.clipboardData;if(n&&!h){var s=c||i?"Text":"text/plain";try{return t?!1!==n.setData(s,t):n.getDa
 ta(s)}catch(e){if(!i)return x(e,t,!0)}}},L=function(e,o){var r=t.getCopyText();if(!r)return n.preventDefault(e);x(e,r)?(s.isIOS&&(p=o,g.value="\n aa"+r+"a a\n",g.setSelectionRange(4,4+r.length),m={value:r}),o?t.onCut():t.onCopy(),s.isIOS||n.preventDefault(e)):(m=!0,g.value=r,g.select(),setTimeout(function(){m=!1,u(),i(),o?t.onCut():t.onCopy()}))},R=function(e){L(e,!0)},M=function(e){L(e,!1)},T=function(e){var o=x(e);"string"==typeof o?(o&&t.onPaste(o,e),s.isIE&&setTimeout(i),n.preventDefault(e)):(g.value="",A=!0)};n.addCommandKeyListener(g,t.onCommandKey.bind(t)),n.addListener(g,"select",y),n.addListener(g,"input",k),n.addListener(g,"cut",R),n.addListener(g,"copy",M),n.addListener(g,"paste",T);var _=function(e){C||!t.onCompositionStart||t.$readOnly||(C={},C.canUndo=t.session.$undoManager,t.onCompositionStart(),setTimeout(O,0),t.on("mousedown",I),C.canUndo&&!t.selection.isEmpty()&&(t.insert(""),t.session.markUndoGroup(),t.selection.clearSelection()),t.session.markUndoGroup())},O=func
 tion(){if(C&&t.onCompositionUpdate&&!t.$readOnly){var e=g.value.replace(/\x01/g,"");if(C.lastValue!==e&&(t.onCompositionUpdate(e),C.lastValue&&t.undo(),C.canUndo&&(C.lastValue=e),C.lastValue)){var i=t.selection.getRange();t.insert(C.lastValue),t.session.markUndoGroup(),C.range=t.selection.getRange(),t.selection.setRange(i),t.selection.clearSelection()}}},I=function(e){if(t.onCompositionEnd&&!t.$readOnly){var i=C;C=!1;var n=setTimeout(function(){n=null;var e=g.value.replace(/\x01/g,"");C||(e==i.lastValue?u():!i.lastValue&&e&&(u(),S(e)))});B=function(e){return n&&clearTimeout(n),e=e.replace(/\x01/g,""),e==i.lastValue?"":(i.lastValue&&n&&t.undo(),e)},t.onCompositionEnd(),t.removeListener("mousedown",I),"compositionend"==e.type&&i.range&&t.selection.setRange(i.range);(!!s.isChrome&&s.isChrome>=53||!!s.isWebKit&&s.isWebKit>=603)&&k()}},W=r.delayedCall(O,50);n.addListener(g,"compositionstart",_),s.isGecko?n.addListener(g,"text",function(){W.schedule()}):(n.addListener(g,"keyup",function()
 {W.schedule()}),n.addListener(g,"keydown",function(){W.schedule()})),n.addListener(g,"compositionend",I),this.getElement=function(){return g},this.setReadOnly=function(e){g.readOnly=e},this.onContextMenu=function(e){D=!0,i(t.selection.isEmpty()),t._emit("nativecontextmenu",{target:t,domEvent:e}),this.moveToMouse(e,!0)},this.moveToMouse=function(e,i){v||(v=g.style.cssText),g.style.cssText=(i?"z-index:100000;":"")+"height:"+g.style.height+";"+(s.isIE?"opacity:0.1;":"");var r=t.container.getBoundingClientRect(),a=o.computedStyle(t.container),l=r.top+(parseInt(a.borderTopWidth)||0),h=r.left+(parseInt(r.borderLeftWidth)||0),c=r.bottom-l-g.clientHeight-2,u=function(e){g.style.left=e.clientX-h-2+"px",g.style.top=Math.min(e.clientY-l-2,c)+"px"};u(e),"mousedown"==e.type&&(t.renderer.$keepTextAreaAtCursor&&(t.renderer.$keepTextAreaAtCursor=null),clearTimeout(H),s.isWin&&n.capture(t.container,u,d))},this.onContextMenuClose=d;var H,P=function(e){t.textInput.onContextMenu(e),d()};if(n.addListene
 r(g,"mouseup",P),n.addListener(g,"mousedown",function(e){e.preventDefault(),d()}),n.addListener(t.renderer.scroller,"contextmenu",P),n.addListener(g,"contextmenu",P),s.isIOS){var N=null,z=!1;e.addEventListener("keydown",function(e){N&&clearTimeout(N),z=!0}),e.addEventListener("keyup",function(e){N=setTimeout(function(){z=!1},100)});var V=function(e){if(document.activeElement===g&&!z){if(p)return setTimeout(function(){p=!1},100);var i=g.selectionStart,n=g.selectionEnd;if(g.setSelectionRange(4,5),i==n)switch(i){case 0:t.onCommandKey(null,0,a.up);break;case 1:t.onCommandKey(null,0,a.home);break;case 2:t.onCommandKey(null,l.option,a.left);break;case 4:t.onCommandKey(null,0,a.left);break;case 5:t.onCommandKey(null,0,a.right);break;case 7:t.onCommandKey(null,l.option,a.right);break;case 8:t.onCommandKey(null,0,a.end);break;case 9:t.onCommandKey(null,0,a.down)}else{switch(n){case 6:t.onCommandKey(null,l.shift,a.right);break;case 7:t.onCommandKey(null,l.shift|l.option,a.right);break;case 8:
 t.onCommandKey(null,l.shift,a.end);break;case 9:t.onCommandKey(null,l.shift,a.down)}switch(i){case 0:
+t.onCommandKey(null,l.shift,a.up);break;case 1:t.onCommandKey(null,l.shift,a.home);break;case 2:t.onCommandKey(null,l.shift|l.option,a.left);break;case 3:t.onCommandKey(null,l.shift,a.left)}}}};document.addEventListener("selectionchange",V),t.on("destroy",function(){document.removeEventListener("selectionchange",V)})}};t.TextInput=u}),define("ace/keyboard/textinput",["require","exports","module","ace/lib/event","ace/lib/useragent","ace/lib/dom","ace/lib/lang","ace/keyboard/textinput_ios"],function(e,t,i){"use strict";var n=e("../lib/event"),s=e("../lib/useragent"),o=e("../lib/dom"),r=e("../lib/lang"),a=s.isChrome<18,l=s.isIE,h=e("./textinput_ios").TextInput,c=function(e,t){function i(e){if(!p){if(p=!0,$)var t=0,i=e?0:d.value.length-1;else var t=e?2:1,i=2;try{d.setSelectionRange(t,i)}catch(e){}p=!1}}function c(){p||(d.value=g,s.isWebKit&&w.schedule())}function u(){clearTimeout(I),I=setTimeout(function(){A&&(d.style.cssText=A,A=""),null==t.renderer.$keepTextAreaAtCursor&&(t.renderer.$
 keepTextAreaAtCursor=!0,t.renderer.$moveTextAreaToCursor())},0)}if(s.isIOS)return h.call(this,e,t);var d=o.createElement("textarea");d.className="ace_text-input",d.setAttribute("wrap","off"),d.setAttribute("autocorrect","off"),d.setAttribute("autocapitalize","off"),d.setAttribute("spellcheck",!1),d.style.opacity="0",e.insertBefore(d,e.firstChild);var g="\u2028\u2028",f=!1,m=!1,p=!1,A="",C=!0;try{var v=document.activeElement===d}catch(e){}n.addListener(d,"blur",function(e){t.onBlur(e),v=!1}),n.addListener(d,"focus",function(e){v=!0,t.onFocus(e),i()}),this.focus=function(){if(A)return d.focus();var e=d.style.top;d.style.position="fixed",d.style.top="0px",d.focus(),setTimeout(function(){d.style.position="","0px"==d.style.top&&(d.style.top=e)},0)},this.blur=function(){d.blur()},this.isFocused=function(){return v};var F=r.delayedCall(function(){v&&i(C)}),w=r.delayedCall(function(){p||(d.value=g,v&&i())});s.isWebKit||t.addEventListener("changeSelection",function(){t.selection.isEmpty()!=C
 &&(C=!C,F.schedule())}),c(),v&&t.onFocus();var E=function(e){return 0===e.selectionStart&&e.selectionEnd===e.value.length},b=function(e){f?f=!1:E(d)?(t.selectAll(),i()):$&&i(t.selection.isEmpty())},$=null;this.setInputHandler=function(e){$=e},this.getInputHandler=function(){return $};var y=!1,B=function(e){$&&(e=$(e),$=null),m?(i(),e&&t.onPaste(e),m=!1):e==g.charAt(0)?y?t.execCommand("del",{source:"ace"}):t.execCommand("backspace",{source:"ace"}):(e.substring(0,2)==g?e=e.substr(2):e.charAt(0)==g.charAt(0)?e=e.substr(1):e.charAt(e.length-1)==g.charAt(0)&&(e=e.slice(0,-1)),e.charAt(e.length-1)==g.charAt(0)&&(e=e.slice(0,-1)),e&&t.onTextInput(e)),y&&(y=!1)},D=function(e){if(!p){var t=d.value;B(t),c()}},S=function(e,t,i){var n=e.clipboardData||window.clipboardData;if(n&&!a){var s=l||i?"Text":"text/plain";try{return t?!1!==n.setData(s,t):n.getData(s)}catch(e){if(!i)return S(e,t,!0)}}},k=function(e,s){var o=t.getCopyText();if(!o)return n.preventDefault(e);S(e,o)?(s?t.onCut():t.onCopy(),n.
 preventDefault(e)):(f=!0,d.value=o,d.select(),setTimeout(function(){f=!1,c(),i(),s?t.onCut():t.onCopy()}))},x=function(e){k(e,!0)},L=function(e){k(e,!1)},R=function(e){var o=S(e);"string"==typeof o?(o&&t.onPaste(o,e),s.isIE&&setTimeout(i),n.preventDefault(e)):(d.value="",m=!0)};n.addCommandKeyListener(d,t.onCommandKey.bind(t)),n.addListener(d,"select",b),n.addListener(d,"input",D),n.addListener(d,"cut",x),n.addListener(d,"copy",L),n.addListener(d,"paste",R),(!("oncut"in d)||!("oncopy"in d)||!("onpaste"in d))&&n.addListener(e,"keydown",function(e){if((!s.isMac||e.metaKey)&&e.ctrlKey)switch(e.keyCode){case 67:L(e);break;case 86:R(e);break;case 88:x(e)}});var M=function(e){p||!t.onCompositionStart||t.$readOnly||(p={},p.canUndo=t.session.$undoManager,t.onCompositionStart(),setTimeout(T,0),t.on("mousedown",_),p.canUndo&&!t.selection.isEmpty()&&(t.insert(""),t.session.markUndoGroup(),t.selection.clearSelection()),t.session.markUndoGroup())},T=function(){if(p&&t.onCompositionUpdate&&!t.$re
 adOnly){var e=d.value.replace(/\u2028/g,"");if(p.lastValue!==e&&(t.onCompositionUpdate(e),p.lastValue&&t.undo(),p.canUndo&&(p.lastValue=e),p.lastValue)){var i=t.selection.getRange();t.insert(p.lastValue),t.session.markUndoGroup(),p.range=t.selection.getRange(),t.selection.setRange(i),t.selection.clearSelection()}}},_=function(e){if(t.onCompositionEnd&&!t.$readOnly){var i=p;p=!1;var n=setTimeout(function(){n=null;var e=d.value.replace(/\u2028/g,"");p||(e==i.lastValue?c():!i.lastValue&&e&&(c(),B(e)))});$=function(e){return n&&clearTimeout(n),e=e.replace(/\u2028/g,""),e==i.lastValue?"":(i.lastValue&&n&&t.undo(),e)},t.onCompositionEnd(),t.removeListener("mousedown",_),"compositionend"==e.type&&i.range&&t.selection.setRange(i.range);(!!s.isChrome&&s.isChrome>=53||!!s.isWebKit&&s.isWebKit>=603)&&D()}},O=r.delayedCall(T,50);n.addListener(d,"compositionstart",M),s.isGecko?n.addListener(d,"text",function(){O.schedule()}):(n.addListener(d,"keyup",function(){O.schedule()}),n.addListener(d,"key
 down",function(){O.schedule()})),n.addListener(d,"compositionend",_),this.getElement=function(){return d},this.setReadOnly=function(e){d.readOnly=e},this.onContextMenu=function(e){y=!0,i(t.selection.isEmpty()),t._emit("nativecontextmenu",{target:t,domEvent:e}),this.moveToMouse(e,!0)},this.moveToMouse=function(e,i){A||(A=d.style.cssText),d.style.cssText=(i?"z-index:100000;":"")+"height:"+d.style.height+";"+(s.isIE?"opacity:0.1;":"");var r=t.container.getBoundingClientRect(),a=o.computedStyle(t.container),l=r.top+(parseInt(a.borderTopWidth)||0),h=r.left+(parseInt(r.borderLeftWidth)||0),c=r.bottom-l-d.clientHeight-2,g=function(e){d.style.left=e.clientX-h-2+"px",d.style.top=Math.min(e.clientY-l-2,c)+"px"};g(e),"mousedown"==e.type&&(t.renderer.$keepTextAreaAtCursor&&(t.renderer.$keepTextAreaAtCursor=null),clearTimeout(I),s.isWin&&n.capture(t.container,g,u))},this.onContextMenuClose=u;var I,W=function(e){t.textInput.onContextMenu(e),u()};n.addListener(d,"mouseup",W),n.addListener(d,"mouse
 down",function(e){e.preventDefault(),u()}),n.addListener(t.renderer.scroller,"contextmenu",W),n.addListener(d,"contextmenu",W)};t.TextInput=c}),define("ace/mouse/default_handlers",["require","exports","module","ace/lib/dom","ace/lib/event","ace/lib/useragent"],function(e,t,i){"use strict";function n(e){e.$clickSelection=null;var t=e.editor;t.setDefaultHandler("mousedown",this.onMouseDown.bind(e)),t.setDefaultHandler("dblclick",this.onDoubleClick.bind(e)),t.setDefaultHandler("tripleclick",this.onTripleClick.bind(e)),t.setDefaultHandler("quadclick",this.onQuadClick.bind(e)),t.setDefaultHandler("mousewheel",this.onMouseWheel.bind(e)),t.setDefaultHandler("touchmove",this.onTouchMove.bind(e)),["select","startSelect","selectEnd","selectAllEnd","selectByWordsEnd","selectByLinesEnd","dragWait","dragWaitEnd","focusWait"].forEach(function(t){e[t]=this[t]},this),e.selectByLines=this.extendSelectionBy.bind(e,"getLineRange"),e.selectByWords=this.extendSelectionBy.bind(e,"getWordRange")}function 
 s(e,t,i,n){return Math.sqrt(Math.pow(i-e,2)+Math.pow(n-t,2))}function o(e,t){if(e.start.row==e.end.row)var i=2*t.column-e.start.column-e.end.column;else if(e.start.row!=e.end.row-1||e.start.column||e.end.column)var i=2*t.row-e.start.row-e.end.row;else var i=t.column-4;return i<0?{cursor:e.start,anchor:e.end}:{cursor:e.end,anchor:e.start}}var r=(e("../lib/dom"),e("../lib/event"),e("../lib/useragent"));(function(){this.onMouseDown=function(e){var t=e.inSelection(),i=e.getDocumentPosition();this.mousedownEvent=e;var n=this.editor,s=e.getButton();if(0!==s){var o=n.getSelectionRange(),a=o.isEmpty();return n.$blockScrolling++,(a||1==s)&&n.selection.moveToPosition(i),n.$blockScrolling--,2==s&&(n.textInput.onContextMenu(e.domEvent),r.isMozilla||e.preventDefault()),void 0}return this.mousedownEvent.time=Date.now(),!t||n.isFocused()||(n.focus(),!this.$focusTimout||this.$clickSelection||n.inMultiSelectMode)?(this.captureMouse(e),this.startSelect(i,e.domEvent._clicks>1),e.preventDefault()):(thi
 s.setState("focusWait"),void this.captureMouse(e))},this.startSelect=function(e,t){e=e||this.editor.renderer.screenToTextCoordinates(this.x,this.y);var i=this.editor;i.$blockScrolling++,this.mousedownEvent.getShiftKey()?i.selection.selectToPosition(e):t||i.selection.moveToPosition(e),t||this.select(),i.renderer.scroller.setCapture&&i.renderer.scroller.setCapture(),i.setStyle("ace_selecting"),this.setState("select"),i.$blockScrolling--},this.select=function(){var e,t=this.editor,i=t.renderer.screenToTextCoordinates(this.x,this.y);if(t.$blockScrolling++,this.$clickSelection){var n=this.$clickSelection.comparePoint(i);if(-1==n)e=this.$clickSelection.end;else if(1==n)e=this.$clickSelection.start;else{var s=o(this.$clickSelection,i);i=s.cursor,e=s.anchor}t.selection.setSelectionAnchor(e.row,e.column)}t.selection.selectToPosition(i),t.$blockScrolling--,t.renderer.scrollCursorIntoView()},this.extendSelectionBy=function(e){var t,i=this.editor,n=i.renderer.screenToTextCoordinates(this.x,this
 .y),s=i.selection[e](n.row,n.column);if(i.$blockScrolling++,this.$clickSelection){var r=this.$clickSelection.comparePoint(s.start),a=this.$clickSelection.comparePoint(s.end);if(-1==r&&a<=0)t=this.$clickSelection.end,s.end.row==n.row&&s.end.column==n.column||(n=s.start);else if(1==a&&r>=0)t=this.$clickSelection.start,s.start.row==n.row&&s.start.column==n.column||(n=s.end);else if(-1==r&&1==a)n=s.end,t=s.start;else{var l=o(this.$clickSelection,n);n=l.cursor,t=l.anchor}i.selection.setSelectionAnchor(t.row,t.column)}i.selection.selectToPosition(n),i.$blockScrolling--,i.renderer.scrollCursorIntoView()},this.selectEnd=this.selectAllEnd=this.selectByWordsEnd=this.selectByLinesEnd=function(){this.$clickSelection=null,this.editor.unsetStyle("ace_selecting"),this.editor.renderer.scroller.releaseCapture&&this.editor.renderer.scroller.releaseCapture()},this.focusWait=function(){var e=s(this.mousedownEvent.x,this.mousedownEvent.y,this.x,this.y),t=Date.now();(e>0||t-this.mousedownEvent.time>this.
 $focusTimout)&&this.startSelect(this.mousedownEvent.getDocumentPosition())},this.onDoubleClick=function(e){var t=e.getDocumentPosition(),i=this.editor,n=i.session,s=n.getBracketRange(t);s?(s.isEmpty()&&(s.start.column--,s.end.column++),this.setState("select")):(s=i.selection.getWordRange(t.row,t.column),this.setState("selectByWords")),this.$clickSelection=s,this.select()},this.onTripleClick=function(e){var t=e.getDocumentPosition(),i=this.editor;this.setState("selectByLines");var n=i.getSelectionRange();n.isMultiLine()&&n.contains(t.row,t.column)?(this.$clickSelection=i.selection.getLineRange(n.start.row),this.$clickSelection.end=i.selection.getLineRange(n.end.row).end):this.$clickSelection=i.selection.getLineRange(t.row),this.select()},this.onQuadClick=function(e){var t=this.editor;t.selectAll(),this.$clickSelection=t.getSelectionRange(),this.setState("selectAll")},this.onMouseWheel=function(e){if(!e.getAccelKey()){e.getShiftKey()&&e.wheelY&&!e.wheelX&&(e.wheelX=e.wheelY,e.wheelY=0
 );var t=this.editor;this.$lastScroll||(this.$lastScroll={t:0,vx:0,vy:0,allowed:0});var i=this.$lastScroll,n=e.domEvent.timeStamp,s=n-i.t,o=e.wheelX/s,r=e.wheelY/s;s<250&&(o=(o+i.vx)/2,r=(r+i.vy)/2);var a=Math.abs(o/r),l=!1;if(a>=1&&t.renderer.isScrollableBy(e.wheelX*e.speed,0)&&(l=!0),a<=1&&t.renderer.isScrollableBy(0,e.wheelY*e.speed)&&(l=!0),l)i.allowed=n;else if(n-i.allowed<250){var h=Math.abs(o)<=1.1*Math.abs(i.vx)&&Math.abs(r)<=1.1*Math.abs(i.vy);h?(l=!0,i.allowed=n):i.allowed=0}return i.t=n,i.vx=o,i.vy=r,l?(t.renderer.scrollBy(e.wheelX*e.speed,e.wheelY*e.speed),e.stop()):void 0}},this.onTouchMove=function(e){this.editor._emit("mousewheel",e)}}).call(n.prototype),t.DefaultHandlers=n}),define("ace/tooltip",["require","exports","module","ace/lib/oop","ace/lib/dom"],function(e,t,i){"use strict";function n(e){this.isOpen=!1,this.$element=null,this.$parentNode=e}var s=(e("./lib/oop"),e("./lib/dom"));(function(){this.$init=function(){return this.$element=s.createElement("div"),this.$
 element.className="ace_tooltip",this.$element.style.display="none",this.$parentNode.appendChild(this.$element),this.$element},this.getElement=function(){return this.$element||this.$init()},this.setText=function(e){s.setInnerText(this.getElement(),e)},this.setHtml=function(e){this.getElement().innerHTML=e},this.setPosition=function(e,t){this.getElement().style.left=e+"px",this.getElement().style.top=t+"px"},this.setClassName=function(e){s.addCssClass(this.getElement(),e)},this.show=function(e,t,i){null!=e&&this.setText(e),null!=t&&null!=i&&this.setPosition(t,i),this.isOpen||(this.getElement().style.display="block",this.isOpen=!0)},this.hide=function(){this.isOpen&&(this.getElement().style.display="none",this.isOpen=!1)},this.getHeight=function(){return this.getElement().offsetHeight},this.getWidth=function(){return this.getElement().offsetWidth},this.destroy=function(){this.isOpen=!1,this.$element&&this.$element.parentNode&&this.$element.parentNode.removeChild(this.$element)}}).call(
 n.prototype),t.Tooltip=n}),define("ace/mouse/default_gutter_handler",["require","exports","module","ace/lib/dom","ace/lib/oop","ace/lib/event","ace/tooltip"],function(e,t,i){"use strict";function n(e){function t(){var t=u.getDocumentPosition().row,s=l.$annotations[t];if(!s)return i();if(t==r.session.getLength()){var o=r.renderer.pixelToScreenCoordinates(0,u.y).row,a=u.$pos;if(o>r.session.documentToScreenRow(a.row,a.column))return i()}if(d!=s)if(d=s.text.join("<br/>"),h.setHtml(d),h.show(),r._signal("showGutterTooltip",h),r.on("mousewheel",i),e.$tooltipFollowsMouse)n(u);else{var c=u.domEvent.target,g=c.getBoundingClientRect(),f=h.getElement().style;f.left=g.right+"px",f.top=g.bottom+"px"}}function i(){c&&(c=clearTimeout(c)),d&&(h.hide(),d=null,r._signal("hideGutterTooltip",h),r.removeEventListener("mousewheel",i))}function n(e){h.setPosition(e.x,e.y)}var r=e.editor,l=r.renderer.$gutterLayer,h=new s(r.container);e.editor.setDefaultHandler("guttermousedown",function(t){if(r.isFocused()
 &&0==t.getButton()){if("foldWidgets"!=l.getRegion(t)){var i=t.getDocumentPosition().row,n=r.session.selection;if(t.getShiftKey())n.selectTo(i,0);else{if(2==t.domEvent.detail)return r.selectAll(),t.preventDefault();e.$clickSelection=r.selection.getLineRange(i)}return e.setState("selectByLines"),e.captureMouse(t),t.preventDefault()}}});var c,u,d;e.editor.setDefaultHandler("guttermousemove",function(s){var r=s.domEvent.target||s.domEvent.srcElement;if(o.hasCssClass(r,"ace_fold-widget"))return i();d&&e.$tooltipFollowsMouse&&n(s),u=s,c||(c=setTimeout(function(){c=null,u&&!e.isMousePressed?t():i()},50))}),a.addListener(r.renderer.$gutter,"mouseout",function(e){u=null,d&&!c&&(c=setTimeout(function(){c=null,i()},50))}),r.on("changeSession",i)}function s(e){l.call(this,e)}var o=e("../lib/dom"),r=e("../lib/oop"),a=e("../lib/event"),l=e("../tooltip").Tooltip;r.inherits(s,l),function(){this.setPosition=function(e,t){var i=window.innerWidth||document.documentElement.clientWidth,n=window.innerHei
 ght||document.documentElement.clientHeight,s=this.getWidth(),o=this.getHeight();e+=15,t+=15,e+s>i&&(e-=e+s-i),t+o>n&&(t-=20+o),l.prototype.setPosition.call(this,e,t)}}.call(s.prototype),t.GutterHandler=n}),define("ace/mouse/mouse_event",["require","exports","module","ace/lib/event","ace/lib/useragent"],function(e,t,i){"use strict";var n=e("../lib/event"),s=e("../lib/useragent"),o=t.MouseEvent=function(e,t){this.domEvent=e,this.editor=t,this.x=this.clientX=e.clientX,this.y=this.clientY=e.clientY,this.$pos=null,this.$inSelection=null,this.propagationStopped=!1,this.defaultPrevented=!1};(function(){this.stopPropagation=function(){n.stopPropagation(this.domEvent),this.propagationStopped=!0},this.preventDefault=function(){n.preventDefault(this.domEvent),this.defaultPrevented=!0},this.stop=function(){this.stopPropagation(),this.preventDefault()},this.getDocumentPosition=function(){return this.$pos?this.$pos:(this.$pos=this.editor.renderer.screenToTextCoordinates(this.clientX,this.clientY)
 ,this.$pos)},this.inSelection=function(){if(null!==this.$inSelection)return this.$inSelection;var e=this.editor,t=e.getSelectionRange();if(t.isEmpty())this.$inSelection=!1;else{var i=this.getDocumentPosition();this.$inSelection=t.contains(i.row,i.column)}return this.$inSelection},this.getButton=function(){return n.getButton(this.domEvent)},this.getShiftKey=function(){return this.domEvent.shiftKey},this.getAccelKey=s.isMac?function(){return this.domEvent.metaKey}:function(){return this.domEvent.ctrlKey}}).call(o.prototype)}),define("ace/mouse/dragdrop_handler",["require","exports","module","ace/lib/dom","ace/lib/event","ace/lib/useragent"],function(e,t,i){"use strict";function n(e){function t(e,t){var i=Date.now(),n=!t||e.row!=t.row,o=!t||e.column!=t.column;if(!D||n||o)p.$blockScrolling+=1,p.moveCursorToPosition(e),p.$blockScrolling-=1,D=i,S={x:v,y:F};else{s(S.x,S.y,v,F)>c?D=null:i-D>=h&&(p.renderer.scrollCursorIntoView(),D=null)}}function i(e,t){var i=Date.now(),n=p.renderer.layerCo
 nfig.lineHeight,s=p.renderer.layerConfig.characterWidth,o=p.renderer.scroller.getBoundingClientRect(),r={x:{left:v-o.left,right:o.right-v},y:{top:F-o.top,bottom:o.bottom-F}},a=Math.min(r.x.left,r.x.right),h=Math.min(r.y.top,r.y.bottom),c={row:e.row,column:e.column};a/s<=2&&(c.column+=r.x.left<r.x.right?-3:2),h/n<=1&&(c.row+=r.y.top<r.y.bottom?-1:1);var u=e.row!=c.row,d=e.column!=c.column,g=!t||e.row!=t.row;u||d&&!g?B?i-B>=l&&p.renderer.scrollCursorIntoView(c):B=i:B=null}function n(){var e=b;b=p.renderer.screenToTextCoordinates(v,F),t(b,e),i(b,e)}function u(){E=p.selection.toOrientedRange(),C=p.session.addMarker(E,"ace_selection",p.getSelectionStyle()),p.clearSelection(),p.isFocused()&&p.renderer.$cursorLayer.setBlinking(!1),clearInterval(w),n(),w=setInterval(n,20),x=0,r.addListener(document,"mousemove",g)}function d(){clearInterval(w),p.session.removeMarker(C),C=null,p.$blockScrolling+=1,p.selection.fromOrientedRange(E),p.$blockScrolling-=1,p.isFocused()&&!y&&p.renderer.$cursorLayer
 .setBlinking(!p.getReadOnly()),E=null,b=null,x=0,B=null,D=null,r.removeListener(document,"mousemove",g)}function g(){null==L&&(L=setTimeout(function(){null!=L&&C&&d()},20))}function f(e){var t=e.types;return!t||Array.prototype.some.call(t,function(e){return"text/plain"==e||"Text"==e})}function m(e){var t=["copy","copymove","all","uninitialized"],i=["move","copymove","linkmove","all","uninitialized"],n=a.isMac?e.altKey:e.ctrlKey,s="uninitialized";try{s=e.dataTransfer.effectAllowed.toLowerCase()}catch(e){}var o="none";return n&&t.indexOf(s)>=0?o="copy":i.indexOf(s)>=0?o="move":t.indexOf(s)>=0&&(o="copy"),o}var p=e.editor,A=o.createElement("img");A.src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==",a.isOpera&&(A.style.cssText="width:1px;height:1px;position:fixed;top:0;left:0;z-index:2147483647;opacity:0;"),["dragWait","dragWaitEnd","startDrag","dragReadyEnd","onMouseDrag"].forEach(function(t){e[t]=this[t]},this),p.addEventListener("mousedown",this.onMouseD
 own.bind(e));var C,v,F,w,E,b,$,y,B,D,S,k=p.container,x=0;this.onDragStart=function(e){if(this.cancelDrag||!k.draggable){var t=this;return setTimeout(function(){t.startSelect(),t.captureMouse(e)},0),e.preventDefault()}E=p.getSelectionRange();var i=e.dataTransfer;i.effectAllowed=p.getReadOnly()?"copy":"copyMove",a.isOpera&&(p.container.appendChild(A),A.scrollTop=0),i.setDragImage&&i.setDragImage(A,0,0),a.isOpera&&p.container.removeChild(A),i.clearData(),i.setData("Text",p.session.getTextRange()),y=!0,this.setState("drag")},this.onDragEnd=function(e){if(k.draggable=!1,y=!1,this.setState(null),!p.getReadOnly()){var t=e.dataTransfer.dropEffect;!$&&"move"==t&&p.session.remove(p.getSelectionRange()),p.renderer.$cursorLayer.setBlinking(!0)}this.editor.unsetStyle("ace_dragging"),this.editor.renderer.setCursorStyle("")},this.onDragEnter=function(e){if(!p.getReadOnly()&&f(e.dataTransfer))return v=e.clientX,F=e.clientY,C||u(),x++,e.dataTransfer.dropEffect=$=m(e),r.preventDefault(e)},this.onDrag
 Over=function(e){if(!p.getReadOnly()&&f(e.dataTransfer))return v=e.clientX,F=e.clientY,C||(u(),x++),null!==L&&(L=null),e.dataTransfer.dropEffect=$=m(e),r.preventDefault(e)},this.onDragLeave=function(e){if(--x<=0&&C)return d(),$=null,r.preventDefault(e)},this.onDrop=function(e){if(b){var t=e.dataTransfer;if(y)switch($){case"move":E=E.contains(b.row,b.column)?{start:b,end:b}:p.moveText(E,b);break;case"copy":E=p.moveText(E,b,!0)}else{var i=t.getData("Text");E={start:b,end:p.session.insert(b,i)},p.focus(),$=null}return d(),r.preventDefault(e)}},r.addListener(k,"dragstart",this.onDragStart.bind(e)),r.addListener(k,"dragend",this.onDragEnd.bind(e)),r.addListener(k,"dragenter",this.onDragEnter.bind(e)),r.addListener(k,"dragover",this.onDragOver.bind(e)),r.addListener(k,"dragleave",this.onDragLeave.bind(e)),r.addListener(k,"drop",this.onDrop.bind(e));var L=null}function s(e,t,i,n){return Math.sqrt(Math.pow(i-e,2)+Math.pow(n-t,2))}var o=e("../lib/dom"),r=e("../lib/event"),a=e("../lib/userage
 nt"),l=200,h=200,c=5;(function(){this.dragWait=function(){Date.now()-this.mousedownEvent.time>this.editor.getDragDelay()&&this.startDrag()},this.dragWaitEnd=function(){this.editor.container.draggable=!1,this.startSelect(this.mousedownEvent.getDocumentPosition()),this.selectEnd()},this.dragReadyEnd=function(e){this.editor.renderer.$cursorLayer.setBlinking(!this.editor.getReadOnly()),this.editor.unsetStyle("ace_dragging"),this.editor.renderer.setCursorStyle(""),this.dragWaitEnd()},this.startDrag=function(){this.cancelDrag=!1;var e=this.editor;e.container.draggable=!0,e.renderer.$cursorLayer.setBlinking(!1),e.setStyle("ace_dragging");var t=a.isWin?"default":"move";e.renderer.setCursorStyle(t),this.setState("dragReady")},this.onMouseDrag=function(e){var t=this.editor.container;if(a.isIE&&"dragReady"==this.state){var i=s(this.mousedownEvent.x,this.mousedownEvent.y,this.x,this.y);i>3&&t.dragDrop()}if("dragWait"===this.state){var i=s(this.mousedownEvent.x,this.mousedownEvent.y,this.x,this.
 y);i>0&&(t.draggable=!1,this.startSelect(this.mousedownEvent.getDocumentPosition()))}},this.onMouseDown=function(e){if(this.$dragEnabled){this.mousedownEvent=e;var t=this.editor,i=e.inSelection(),n=e.getButton();if(1===(e.domEvent.detail||1)&&0===n&&i){if(e.editor.inMultiSelectMode&&(e.getAccelKey()||e.getShiftKey()))return;this.mousedownEvent.time=Date.now();var s=e.domEvent.target||e.domEvent.srcElement;if("unselectable"in s&&(s.unselectable="on"),t.getDragDelay()){if(a.isWebKit){this.cancelDrag=!0;t.container.draggable=!0}this.setState("dragWait")}else this.startDrag();this.captureMouse(e,this.onMouseDrag.bind(this)),e.defaultPrevented=!0}}}}).call(n.prototype),t.DragdropHandler=n}),define("ace/lib/net",["require","exports","module","ace/lib/dom"],function(e,t,i){"use strict";var n=e("./dom");t.get=function(e,t){var i=new XMLHttpRequest;i.open("GET",e,!0),i.onreadystatechange=function(){4===i.readyState&&t(i.responseText)},i.send(null)},t.loadScript=function(e,t){var i=n.getDocum
 entHead(),s=document.createElement("script");s.src=e,i.appendChild(s),s.onload=s.onreadystatechange=function(e,i){!i&&s.readyState&&"loaded"!=s.readyState&&"complete"!=s.readyState||(s=s.onload=s.onreadystatechange=null,i||t())}},t.qualifyURL=function(e){var t=document.createElement("a");return t.href=e,t.href}}),define("ace/lib/event_emitter",["require","exports","module"],function(e,t,i){"use strict";var n={},s=function(){this.propagationStopped=!0},o=function(){this.defaultPrevented=!0};n._emit=n._dispatchEvent=function(e,t){this._eventRegistry||(this._eventRegistry={}),this._defaultHandlers||(this._defaultHandlers={});var i=this._eventRegistry[e]||[],n=this._defaultHandlers[e];if(i.length||n){"object"==typeof t&&t||(t={}),t.type||(t.type=e),t.stopPropagation||(t.stopPropagation=s),t.preventDefault||(t.preventDefault=o),i=i.slice();for(var r=0;r<i.length&&(i[r](t,this),!t.propagationStopped);r++);return n&&!t.defaultPrevented?n(t,this):void 0}},n._signal=function(e,t){var i=(this
 ._eventRegistry||{})[e];if(i){i=i.slice();for(var n=0;n<i.length;n++)i[n](t,this)}},n.once=function(e,t){var i=this;t&&this.addEventListener(e,function n(){i.removeEventListener(e,n),t.apply(null,arguments)})},n.setDefaultHandler=function(e,t){var i=this._defaultHandlers;if(i||(i=this._defaultHandlers={_disabled_:{}}),i[e]){var n=i[e],s=i._disabled_[e];s||(i._disabled_[e]=s=[]),s.push(n);var o=s.indexOf(t);-1!=o&&s.splice(o,1)}i[e]=t},n.removeDefaultHandler=function(e,t){var i=this._defaultHandlers;if(i){var n=i._disabled_[e];if(i[e]==t){i[e];n&&this.setDefaultHandler(e,n.pop())}else if(n){var s=n.indexOf(t);-1!=s&&n.splice(s,1)}}},n.on=n.addEventListener=function(e,t,i){this._eventRegistry=this._eventRegistry||{};var n=this._eventRegistry[e];return n||(n=this._eventRegistry[e]=[]),-1==n.indexOf(t)&&n[i?"unshift":"push"](t),t},n.off=n.removeListener=n.removeEventListener=function(e,t){this._eventRegistry=this._eventRegistry||{};var i=this._eventRegistry[e];if(i){var n=i.indexOf(t);-
 1!==n&&i.splice(n,1)}},n.removeAllListeners=function(e){this._eventRegistry&&(this._eventRegistry[e]=[])},t.EventEmitter=n}),define("ace/lib/app_config",["require","exports","module","ace/lib/oop","ace/lib/event_emitter"],function(e,t,i){"no use strict";function n(e){"undefined"!=typeof console&&console.warn&&console.warn.apply(console,arguments)}function s(e,t){var i=new Error(e);i.data=t,"object"==typeof console&&console.error&&console.error(i),setTimeout(function(){throw i})}var o=e("./oop"),r=e("./event_emitter").EventEmitter,a={setOptions:function(e){Object.keys(e).forEach(function(t){this.setOption(t,e[t])},this)},getOptions:function(e){var t={};return e?Array.isArray(e)||(t=e,e=Object.keys(t)):e=Object.keys(this.$options),e.forEach(function(e){t[e]=this.getOption(e)},this),t},setOption:function(e,t){if(this["$"+e]!==t){var i=this.$options[e];return i?i.forwardTo?this[i.forwardTo]&&this[i.forwardTo].setOption(e,t):(i.handlesSet||(this["$"+e]=t),void(i&&i.set&&i.set.call(this,t
 ))):n('misspelled option "'+e+'"')}},getOption:function(e){var t=this.$options[e];return t?t.forwardTo?this[t.forwardTo]&&this[t.forwardTo].getOption(e):t&&t.get?t.get.call(this):this["$"+e]:n('misspelled option "'+e+'"')}},l=function(){this.$defaultOptions={}};(function(){o.implement(this,r),this.defineOptions=function(e,t,i){return e.$options||(this.$defaultOptions[t]=e.$options={}),Object.keys(i).forEach(function(t){var n=i[t];"string"==typeof n&&(n={forwardTo:n}),n.name||(n.name=t),e.$options[n.name]=n,"initialValue"in n&&(e["$"+n.name]=n.initialValue)}),o.implement(e,a),this},this.resetOptions=function(e){Object.keys(e.$options).forEach(function(t){var i=e.$options[t];"value"in i&&e.setOption(t,i.value)})},this.setDefaultValue=function(e,t,i){var n=this.$defaultOptions[e]||(this.$defaultOptions[e]={});n[t]&&(n.forwardTo?this.setDefaultValue(n.forwardTo,t,i):n[t].value=i)},this.setDefaultValues=function(e,t){Object.keys(t).forEach(function(i){this.setDefaultValue(e,i,t[i])},this
 )},this.warn=n,this.reportError=s}).call(l.prototype),t.AppConfig=l}),define("ace/config",["require","exports","module","ace/lib/lang","ace/lib/oop","ace/lib/net","ace/lib/app_config"],function(e,t,i){"no use strict";function n(n){if(l&&l.document){h.packaged=n||e.packaged||i.packaged||l.define&&define.packaged;for(var o={},r="",a=document.currentScript||document._currentScript,c=a&&a.ownerDocument||document,u=c.getElementsByTagName("script"),d=0;d<u.length;d++){var g=u[d],f=g.src||g.getAttribute("src");if(f){for(var m=g.attributes,p=0,A=m.length;p<A;p++){var C=m[p];0===C.name.indexOf("data-ace-")&&(o[s(C.name.replace(/^data-ace-/,""))]=C.value)}var v=f.match(/^(.*)\/ace(\-\w+)?\.js(\?|$)/);v&&(r=v[1])}}r&&(o.base=o.base||r,o.packaged=!0),o.basePath=o.base,o.workerPath=o.workerPath||o.base,o.modePath=o.modePath||o.base,o.themePath=o.themePath||o.base,delete o.base;for(var F in o)void 0!==o[F]&&t.set(F,o[F])}}function s(e){return e.replace(/-(.)/g,function(e,t){return t.toUpperCase()
 })}var o=e("./lib/lang"),r=(e("./lib/oop"),e("./lib/net")),a=e("./lib/app_config").AppConfig;i.exports=t=new a;var l=function(){return this||"undefined"!=typeof window&&window}(),h={packaged:!1,workerPath:null,modePath:null,themePath:null,basePath:"",suffix:".js",$moduleUrls:{}};t.get=function(e){if(!h.hasOwnProperty(e))throw new Error("Unknown config key: "+e);return h[e]},t.set=function(e,t){if(!h.hasOwnProperty(e))throw new Error("Unknown config key: "+e);h[e]=t},t.all=function(){return o.copyObject(h)},t.moduleUrl=function(e,t){if(h.$moduleUrls[e])return h.$moduleUrls[e];var i=e.split("/");t=t||i[i.length-2]||"";var n="snippets"==t?"/":"-",s=i[i.length-1];if("worker"==t&&"-"==n){var o=new RegExp("^"+t+"[\\-_]|[\\-_]"+t+"$","g");s=s.replace(o,"")}(!s||s==t)&&i.length>1&&(s=i[i.length-2]);var r=h[t+"Path"];return null==r?r=h.basePath:"/"==n&&(t=n=""),r&&"/"!=r.slice(-1)&&(r+="/"),r+t+n+s+this.get("suffix")},t.setModuleUrl=function(e,t){return h.$moduleUrls[e]=t},t.$loading={},t.lo
 adModule=function(i,n){var s,o;Array.isArray(i)&&(o=i[0],i=i[1]);try{s=e(i)}catch(e){}if(s&&!t.$loading[i])return n&&n(s);if(t.$loading[i]||(t.$loading[i]=[]),t.$loading[i].push(n),!(t.$loading[i].length>1)){var a=function(){e([i],function(e){t._emit("load.module",{name:i,module:e});var n=t.$loading[i];t.$loading[i]=null,n.forEach(function(t){t&&t(e)})})};if(!t.get("packaged"))return a();r.loadScript(t.moduleUrl(i,o),a)}},t.init=n}),define("ace/mouse/mouse_handler",["require","exports","module","ace/lib/event","ace/lib/useragent","ace/mouse/default_handlers","ace/mouse/default_gutter_handler","ace/mouse/mouse_event","ace/mouse/dragdrop_handler","ace/config"],function(e,t,i){"use strict";var n=e("../lib/event"),s=e("../lib/useragent"),o=e("./default_handlers").DefaultHandlers,r=e("./default_gutter_handler").GutterHandler,a=e("./mouse_event").MouseEvent,l=e("./dragdrop_handler").DragdropHandler,h=e("../config"),c=function(e){var t=this;this.editor=e,new o(this),new r(this),new l(this)
 ;var i=function(t){(!document.hasFocus||!document.hasFocus()||!e.isFocused()&&document.activeElement==(e.textInput&&e.textInput.getElement()))&&window.focus(),e.focus()},a=e.renderer.getMouseEventTarget();n.addListener(a,"click",this.onMouseEvent.bind(this,"click")),n.addListener(a,"mousemove",this.onMouseMove.bind(this,"mousemove")),n.addMultiMouseDownListener([a,e.renderer.scrollBarV&&e.renderer.scrollBarV.inner,e.renderer.scrollBarH&&e.renderer.scrollBarH.inner,e.textInput&&e.textInput.getElement()].filter(Boolean),[400,300,250],this,"onMouseEvent"),n.addMouseWheelListener(e.container,this.onMouseWheel.bind(this,"mousewheel")),n.addTouchMoveListener(e.container,this.onTouchMove.bind(this,"touchmove"));var h=e.renderer.$gutter;n.addListener(h,"mousedown",this.onMouseEvent.bind(this,"guttermousedown")),n.addListener(h,"click",this.onMouseEvent.bind(this,"gutterclick")),n.addListener(h,"dblclick",this.onMouseEvent.bind(this,"gutterdblclick")),n.addListener(h,"mousemove",this.onMouse
 Event.bind(this,"guttermousemove")),n.addListener(a,"mousedown",i),n.addListener(h,"mousedown",i),s.isIE&&e.renderer.scrollBarV&&(n.addListener(e.renderer.scrollBarV.element,"mousedown",i),n.addListener(e.renderer.scrollBarH.element,"mousedown",i)),e.on("mousemove",function(i){if(!t.state&&!t.$dragDelay&&t.$dragEnabled){var n=e.renderer.screenToTextCoordinates(i.x,i.y),s=e.session.selection.getRange(),o=e.renderer;!s.isEmpty()&&s.insideStart(n.row,n.column)?o.setCursorStyle("default"):o.setCursorStyle("")}})};(function(){this.onMouseEvent=function(e,t){this.editor._emit(e,new a(t,this.editor))},this.onMouseMove=function(e,t){var i=this.editor._eventRegistry&&this.editor._eventRegistry.mousemove;i&&i.length&&this.editor._emit(e,new a(t,this.editor))},this.onMouseWheel=function(e,t){var i=new a(t,this.editor);i.speed=2*this.$scrollSpeed,i.wheelX=t.wheelX,i.wheelY=t.wheelY,this.editor._emit(e,i)},this.onTouchMove=function(e,t){var i=new a(t,this.editor);i.speed=1,i.wheelX=t.wheelX,i.wh
 eelY=t.wheelY,this.editor._emit(e,i)},this.setState=function(e){this.state=e},
+this.captureMouse=function(e,t){this.x=e.x,this.y=e.y,this.isMousePressed=!0;var i=this.editor.renderer;i.$keepTextAreaAtCursor&&(i.$keepTextAreaAtCursor=null);var o=this,r=function(e){if(e)return s.isWebKit&&!e.which&&o.releaseMouse?o.releaseMouse():(o.x=e.clientX,o.y=e.clientY,t&&t(e),o.mouseEvent=new a(e,o.editor),o.$mouseMoved=!0,void 0)},l=function(e){clearInterval(c),h(),o[o.state+"End"]&&o[o.state+"End"](e),o.state="",null==i.$keepTextAreaAtCursor&&(i.$keepTextAreaAtCursor=!0,i.$moveTextAreaToCursor()),o.isMousePressed=!1,o.$onCaptureMouseMove=o.releaseMouse=null,e&&o.onMouseEvent("mouseup",e)},h=function(){o[o.state]&&o[o.state](),o.$mouseMoved=!1};if(s.isOldIE&&"dblclick"==e.domEvent.type)return setTimeout(function(){l(e)});o.$onCaptureMouseMove=r,o.releaseMouse=n.capture(this.editor.container,r,l);var c=setInterval(h,20)},this.releaseMouse=null,this.cancelContextMenu=function(){var e=function(t){t&&t.domEvent&&"contextmenu"!=t.domEvent.type||(this.editor.off("nativecontext
 menu",e),t&&t.domEvent&&n.stopEvent(t.domEvent))}.bind(this);setTimeout(e,10),this.editor.on("nativecontextmenu",e)}}).call(c.prototype),h.defineOptions(c.prototype,"mouseHandler",{scrollSpeed:{initialValue:2},dragDelay:{initialValue:s.isMac?150:0},dragEnabled:{initialValue:!0},focusTimout:{initialValue:0},tooltipFollowsMouse:{initialValue:!0}}),t.MouseHandler=c}),define("ace/mouse/fold_handler",["require","exports","module"],function(e,t,i){"use strict";function n(e){e.on("click",function(t){var i=t.getDocumentPosition(),n=e.session,s=n.getFoldAt(i.row,i.column,1);s&&(t.getAccelKey()?n.removeFold(s):n.expandFold(s),t.stop())}),e.on("gutterclick",function(t){if("foldWidgets"==e.renderer.$gutterLayer.getRegion(t)){var i=t.getDocumentPosition().row,n=e.session;n.foldWidgets&&n.foldWidgets[i]&&e.session.onFoldWidgetClick(i,t),e.isFocused()||e.focus(),t.stop()}}),e.on("gutterdblclick",function(t){if("foldWidgets"==e.renderer.$gutterLayer.getRegion(t)){var i=t.getDocumentPosition().row,n
 =e.session,s=n.getParentFoldRangeData(i,!0),o=s.range||s.firstRange;if(o){i=o.start.row;var r=n.getFoldAt(i,n.getLine(i).length,1);r?n.removeFold(r):(n.addFold("...",o),e.renderer.scrollCursorIntoView({row:o.start.row,column:0}))}t.stop()}})}t.FoldHandler=n}),define("ace/keyboard/keybinding",["require","exports","module","ace/lib/keys","ace/lib/event"],function(e,t,i){"use strict";var n=e("../lib/keys"),s=e("../lib/event"),o=function(e){this.$editor=e,this.$data={editor:e},this.$handlers=[],this.setDefaultHandler(e.commands)};(function(){this.setDefaultHandler=function(e){this.removeKeyboardHandler(this.$defaultHandler),this.$defaultHandler=e,this.addKeyboardHandler(e,0)},this.setKeyboardHandler=function(e){var t=this.$handlers;if(t[t.length-1]!=e){for(;t[t.length-1]&&t[t.length-1]!=this.$defaultHandler;)this.removeKeyboardHandler(t[t.length-1]);this.addKeyboardHandler(e,1)}},this.addKeyboardHandler=function(e,t){if(e){"function"==typeof e&&!e.handleKeyboard&&(e.handleKeyboard=e);va
 r i=this.$handlers.indexOf(e);-1!=i&&this.$handlers.splice(i,1),void 0==t?this.$handlers.push(e):this.$handlers.splice(t,0,e),-1==i&&e.attach&&e.attach(this.$editor)}},this.removeKeyboardHandler=function(e){var t=this.$handlers.indexOf(e);return-1!=t&&(this.$handlers.splice(t,1),e.detach&&e.detach(this.$editor),!0)},this.getKeyboardHandler=function(){return this.$handlers[this.$handlers.length-1]},this.getStatusText=function(){var e=this.$data,t=e.editor;return this.$handlers.map(function(i){return i.getStatusText&&i.getStatusText(t,e)||""}).filter(Boolean).join(" ")},this.$callKeyboardHandlers=function(e,t,i,n){for(var o,r=!1,a=this.$editor.commands,l=this.$handlers.length;l--&&!((o=this.$handlers[l].handleKeyboard(this.$data,e,t,i,n))&&o.command&&((r="null"==o.command||a.exec(o.command,this.$editor,o.args,n))&&n&&-1!=e&&1!=o.passEvent&&1!=o.command.passEvent&&s.stopEvent(n),r)););return!r&&-1==e&&(o={command:"insertstring"},r=a.exec("insertstring",this.$editor,t)),r&&this.$editor.
 _signal&&this.$editor._signal("keyboardActivity",o),r},this.onCommandKey=function(e,t,i){var s=n.keyCodeToString(i);this.$callKeyboardHandlers(t,s,i,e)},this.onTextInput=function(e){this.$callKeyboardHandlers(-1,e)}}).call(o.prototype),t.KeyBinding=o}),define("ace/lib/bidiutil",["require","exports","module"],function(e,t,i){"use strict";function n(e,t,i,n){var s=a?f:g,m=null,p=null,A=null,C=0,v=null,F=-1,b=null,y=null,B=[];if(!n)for(b=0,n=[];b<i;b++)n[b]=r(e[b]);for(l=a,h=!1,c=!1,u=!1,d=!1,y=0;y<i;y++){if(m=C,B[y]=p=o(e,n,B,y),C=s[m][p],v=240&C,C&=15,t[y]=A=s[C][5],v>0)if(16==v){for(b=F;b<y;b++)t[b]=1;F=-1}else F=-1;if(s[C][6])-1==F&&(F=y);else if(F>-1){for(b=F;b<y;b++)t[b]=A;F=-1}n[y]==w&&(t[y]=0),l|=A}if(d)for(b=0;b<i;b++)if(n[b]==E){t[b]=a;for(var D=b-1;D>=0&&n[D]==$;D--)t[D]=a}}function s(e,t,i){if(!(l<e)){if(1==e&&a==m&&!u)return void i.reverse();for(var n,s,o,r,h=i.length,c=0;c<h;){if(t[c]>=e){for(n=c+1;n<h&&t[n]>=e;)n++;for(s=c,o=n-1;s<o;s++,o--)r=i[s],i[s]=i[o],i[o]=r;c=n}c+
 +}}}function o(e,t,i,n){var s,o,r,l,g=t[n];switch(g){case p:case A:h=!1;case F:case v:return g;case C:return h?v:C;case b:return h=!0,c=!0,A;case $:return F;case y:return n<1||n+1>=t.length||(s=i[n-1])!=C&&s!=v||(o=t[n+1])!=C&&o!=v?F:(h&&(o=v),o==s?o:F);case B:return s=n>0?i[n-1]:w,s==C&&n+1<t.length&&t[n+1]==C?C:F;case D:if(n>0&&i[n-1]==C)return C;if(h)return F;for(l=n+1,r=t.length;l<r&&t[l]==D;)l++;return l<r&&t[l]==C?C:F;case S:for(r=t.length,l=n+1;l<r&&t[l]==S;)l++;if(l<r){var f=e[n],m=f>=1425&&f<=2303||64286==f;if(s=t[l],m&&(s==A||s==b))return A}return n<1||(s=t[n-1])==w?F:i[n-1];case w:return h=!1,u=!0,a;case E:return d=!0,F;case k:case x:case R:case M:case L:h=!1;case T:return F}}function r(e){var t=e.charCodeAt(0),i=t>>8;return 0==i?t>191?p:_[t]:5==i?/[\u0591-\u05f4]/.test(e)?A:p:6==i?/[\u0610-\u061a\u064b-\u065f\u06d6-\u06e4\u06e7-\u06ed]/.test(e)?S:/[\u0660-\u0669\u066b-\u066c]/.test(e)?v:1642==t?D:/[\u06f0-\u06f9]/.test(e)?C:b:32==i&&t<=8287?O[255&t]:254==i&&t>=65136?b:F}
 var a=0,l=0,h=!1,c=!1,u=!1,d=!1,g=[[0,3,0,1,0,0,0],[0,3,0,1,2,2,0],[0,3,0,17,2,0,1],[0,3,5,5,4,1,0],[0,3,21,21,4,0,1],[0,3,5,5,4,2,0]],f=[[2,0,1,1,0,1,0],[2,0,1,1,0,2,0],[2,0,2,1,3,2,0],[2,0,2,33,3,1,1]],m=1,p=0,A=1,C=2,v=3,F=4,w=5,E=6,b=7,$=8,y=9,B=10,D=11,S=12,k=13,x=14,L=15,R=16,M=17,T=18,_=[T,T,T,T,T,T,T,T,T,E,w,E,$,w,T,T,T,T,T,T,T,T,T,T,T,T,T,T,w,w,w,E,$,F,F,D,D,D,F,F,F,F,F,B,y,B,y,y,C,C,C,C,C,C,C,C,C,C,y,F,F,F,F,F,F,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,F,F,F,F,F,F,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,p,F,F,F,F,T,T,T,T,T,T,w,T,T,T,T,T,T,T,T,T,T,T,T,T,T,T,T,T,T,T,T,T,T,T,T,T,T,y,F,D,D,D,D,F,F,F,F,p,F,F,T,F,F,D,D,C,C,F,p,F,F,F,C,p,F,F,F,F,F],O=[$,$,$,$,$,$,$,$,$,$,$,T,T,T,p,A,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,$,w,k,x,L,R,M,y,D,D,D,D,D,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,y,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,F,$];t.L=p,t.R=A,t.EN=C,t.ON_R=3,t.AN=4,t.R_H=5,t.B=6,t.DOT="·",t.doBidiReorder=function(e,i,o){if(e.length<2)return{};var r
 =e.split(""),l=new Array(r.length),h=new Array(r.length),c=[];a=o?m:0,n(r,c,r.length,i);for(var u=0;u<l.length;l[u]=u,u++);s(2,c,l),s(1,c,l);for(var u=0;u<l.length-1;u++)i[u]===v?c[u]=t.AN:c[u]===A&&(i[u]>b&&i[u]<k||i[u]===F||i[u]===T)?c[u]=t.ON_R:u>0&&"ل"===r[u-1]&&/\u0622|\u0623|\u0625|\u0627/.test(r[u])&&(c[u-1]=c[u]=t.R_H,u++);r[r.length-1]===t.DOT&&(c[r.length-1]=t.B);for(var u=0;u<l.length;u++)h[u]=c[l[u]];return{logicalFromVisual:l,bidiLevels:h}},t.hasBidiCharacters=function(e,t){for(var i=!1,n=0;n<e.length;n++)t[n]=r(e.charAt(n)),!i&&(t[n]==A||t[n]==b)&&(i=!0);return i},t.getVisualFromLogicalIdx=function(e,t){for(var i=0;i<t.logicalFromVisual.length;i++)if(t.logicalFromVisual[i]==e)return i;return 0}}),define("ace/bidihandler",["require","exports","module","ace/lib/bidiutil","ace/lib/lang","ace/lib/useragent"],function(e,t,i){"use strict";var n=e("./lib/bidiutil"),s=e("./lib/lang"),o=e("./lib/useragent"),r=/[\u0590-\u05f4\u0600-\u06ff\u0700-\u08ac]/,a=function(e){this.sessi
 on=e,this.bidiMap={},this.currentRow=null,this.bidiUtil=n,this.charWidths=[],this.EOL="¬",this.showInvisibles=!0,this.isRtlDir=!1,this.line="",this.wrapIndent=0,this.isLastRow=!1,this.EOF="¶",this.seenBidi=!1};(function(){this.isBidiRow=function(e,t,i){return!!this.seenBidi&&(e!==this.currentRow&&(this.currentRow=e,this.updateRowLine(t,i),this.updateBidiMap()),this.bidiMap.bidiLevels)},this.onChange=function(e){this.seenBidi?this.currentRow=null:"insert"==e.action&&r.test(e.lines.join("\n"))&&(this.seenBidi=!0,this.currentRow=null)},this.getDocumentRow=function(){var e=0,t=this.session.$screenRowCache;if(t.length){var i=this.session.$getRowCacheIndex(t,this.currentRow);i>=0&&(e=this.session.$docRowCache[i])}return e},this.getSplitIndex=function(){var e=0,t=this.session.$screenRowCache;if(t.length)for(var i,n=this.session.$getRowCacheIndex(t,this.currentRow);this.currentRow-e>0&&(i=this.session.$getRowCacheIndex(t,this.currentRow-e-1))===n;)n=i,e++;return e},this.updateRowLine=func
 tion(e,t){if(void 0===e&&(e=this.getDocumentRow()),this.wrapIndent=0,this.isLastRow=e===this.session.getLength()-1,this.line=this.session.getLine(e),this.session.$useWrapMode){var i=this.session.$wrapData[e];i&&(void 0===t&&(t=this.getSplitIndex()),t>0&&i.length?(this.wrapIndent=i.indent,this.line=t<i.length?this.line.substring(i[t-1],i[i.length-1]):this.line.substring(i[i.length-1])):this.line=this.line.substring(0,i[t]))}var o,r=this.session,a=0;this.line=this.line.replace(/\t|[\u1100-\u2029, \u202F-\uFFE6]/g,function(e,t){return"\t"===e||r.isFullWidth(e.charCodeAt(0))?(o="\t"===e?r.getScreenTabSize(t+a):2,a+=o-1,s.stringRepeat(n.DOT,o)):e})},this.updateBidiMap=function(){var e=[],t=this.isLastRow?this.EOF:this.EOL,i=this.line+(this.showInvisibles?t:n.DOT);n.hasBidiCharacters(i,e)?this.bidiMap=n.doBidiReorder(i,e,this.isRtlDir):this.bidiMap={}},this.markAsDirty=function(){this.currentRow=null},this.updateCharacterWidths=function(e){if(this.seenBidi&&this.characterWidth!==e.$charac
 terSize.width){var t=this.characterWidth=e.$characterSize.width,i=e.$measureCharWidth("ה");this.charWidths[n.L]=this.charWidths[n.EN]=this.charWidths[n.ON_R]=t,this.charWidths[n.R]=this.charWidths[n.AN]=i,this.charWidths[n.R_H]=o.isChrome?i:.45*i,this.charWidths[n.B]=0,this.currentRow=null}},this.getShowInvisibles=function(){return this.showInvisibles},this.setShowInvisibles=function(e){this.showInvisibles=e,this.currentRow=null},this.setEolChar=function(e){this.EOL=e},this.setTextDir=function(e){this.isRtlDir=e},this.getPosLeft=function(e){e-=this.wrapIndent;var t=n.getVisualFromLogicalIdx(e>0?e-1:0,this.bidiMap),i=this.bidiMap.bidiLevels,s=0;0===e&&i[t]%2!=0&&t++;for(var o=0;o<t;o++)s+=this.charWidths[i[o]];return 0!==e&&i[t]%2==0&&(s+=this.charWidths[i[t]]),this.wrapIndent&&(s+=this.wrapIndent*this.charWidths[n.L]),s},this.getSelections=function(e,t){for(var i,s,o=this.bidiMap,r=o.bidiLevels,a=this.wrapIndent*this.charWidths[n.L],l=[],h=Math.min(e,t)-this.wrapIndent,c=Math.max(e
 ,t)-this.wrapIndent,u=!1,d=!1,g=0,f=0;f<r.length;f++)s=o.logicalFromVisual[f],i=r[f],u=s>=h&&s<c,u&&!d?g=a:!u&&d&&l.push({left:g,width:a-g}),a+=this.charWidths[i],d=u;return u&&f===r.length&&l.push({left:g,width:a-g}),l},this.offsetToCol=function(e){var t=0,e=Math.max(e,0),i=0,s=0,o=this.bidiMap.bidiLevels,r=this.charWidths[o[s]];for(this.wrapIndent&&(e-=this.wrapIndent*this.charWidths[n.L]);e>i+r/2;){if(i+=r,s===o.length-1){r=0;break}r=this.charWidths[o[++s]]}return s>0&&o[s-1]%2!=0&&o[s]%2==0?(e<i&&s--,t=this.bidiMap.logicalFromVisual[s]):s>0&&o[s-1]%2==0&&o[s]%2!=0?t=1+(e>i?this.bidiMap.logicalFromVisual[s]:this.bidiMap.logicalFromVisual[s-1]):this.isRtlDir&&s===o.length-1&&0===r&&o[s-1]%2==0||!this.isRtlDir&&0===s&&o[s]%2!=0?t=1+this.bidiMap.logicalFromVisual[s]:(s>0&&o[s-1]%2!=0&&0!==r&&s--,t=this.bidiMap.logicalFromVisual[s]),t+this.wrapIndent}}).call(a.prototype),t.BidiHandler=a}),define("ace/range",["require","exports","module"],function(e,t,i){"use strict";var n=function(e,
 t){return e.row-t.row||e.column-t.column},s=function(e,t,i,n){this.start={row:e,column:t},this.end={row:i,column:n}};(function(){this.isEqual=function(e){return this.start.row===e.start.row&&this.end.row===e.end.row&&this.start.column===e.start.column&&this.end.column===e.end.column},this.toString=function(){return"Range: ["+this.start.row+"/"+this.start.column+"] -> ["+this.end.row+"/"+this.end.column+"]"},this.contains=function(e,t){return 0==this.compare(e,t)},this.compareRange=function(e){var t,i=e.end,n=e.start;return t=this.compare(i.row,i.column),1==t?(t=this.compare(n.row,n.column),1==t?2:0==t?1:0):-1==t?-2:(t=this.compare(n.row,n.column),-1==t?-1:1==t?42:0)},this.comparePoint=function(e){return this.compare(e.row,e.column)},this.containsRange=function(e){return 0==this.comparePoint(e.start)&&0==this.comparePoint(e.end)},this.intersects=function(e){var t=this.compareRange(e);return-1==t||0==t||1==t},this.isEnd=function(e,t){return this.end.row==e&&this.end.column==t},this.is
 Start=function(e,t){return this.start.row==e&&this.start.column==t},this.setStart=function(e,t){"object"==typeof e?(this.start.column=e.column,this.start.row=e.row):(this.start.row=e,this.start.column=t)},this.setEnd=function(e,t){"object"==typeof e?(this.end.column=e.column,this.end.row=e.row):(this.end.row=e,this.end.column=t)},this.inside=function(e,t){return 0==this.compare(e,t)&&(!this.isEnd(e,t)&&!this.isStart(e,t))},this.insideStart=function(e,t){return 0==this.compare(e,t)&&!this.isEnd(e,t)},this.insideEnd=function(e,t){return 0==this.compare(e,t)&&!this.isStart(e,t)},this.compare=function(e,t){return this.isMultiLine()||e!==this.start.row?e<this.start.row?-1:e>this.end.row?1:this.start.row===e?t>=this.start.column?0:-1:this.end.row===e?t<=this.end.column?0:1:0:t<this.start.column?-1:t>this.end.column?1:0},this.compareStart=function(e,t){return this.start.row==e&&this.start.column==t?-1:this.compare(e,t)},this.compareEnd=function(e,t){return this.end.row==e&&this.end.column=
 =t?1:this.compare(e,t)},this.compareInside=function(e,t){return this.end.row==e&&this.end.column==t?1:this.start.row==e&&this.start.column==t?-1:this.compare(e,t)},this.clipRows=function(e,t){if(this.end.row>t)var i={row:t+1,column:0};else if(this.end.row<e)var i={row:e,column:0};if(this.start.row>t)var n={row:t+1,column:0};else if(this.start.row<e)var n={row:e,column:0};return s.fromPoints(n||this.start,i||this.end)},this.extend=function(e,t){var i=this.compare(e,t);if(0==i)return this;if(-1==i)var n={row:e,column:t};else var o={row:e,column:t};return s.fromPoints(n||this.start,o||this.end)},this.isEmpty=function(){return this.start.row===this.end.row&&this.start.column===this.end.column},this.isMultiLine=function(){return this.start.row!==this.end.row},this.clone=function(){return s.fromPoints(this.start,this.end)},this.collapseRows=function(){return 0==this.end.column?new s(this.start.row,0,Math.max(this.start.row,this.end.row-1),0):new s(this.start.row,0,this.end.row,0)},this.to
 ScreenRange=function(e){var t=e.documentToScreenPosition(this.start),i=e.documentToScreenPosition(this.end);return new s(t.row,t.column,i.row,i.column)},this.moveBy=function(e,t){this.start.row+=e,this.start.column+=t,this.end.row+=e,this.end.column+=t}}).call(s.prototype),s.fromPoints=function(e,t){return new s(e.row,e.column,t.row,t.column)},s.comparePoints=n,s.comparePoints=function(e,t){return e.row-t.row||e.column-t.column},t.Range=s}),define("ace/selection",["require","exports","module","ace/lib/oop","ace/lib/lang","ace/lib/event_emitter","ace/range"],function(e,t,i){"use strict";var n=e("./lib/oop"),s=e("./lib/lang"),o=e("./lib/event_emitter").EventEmitter,r=e("./range").Range,a=function(e){this.session=e,this.doc=e.getDocument(),this.clearSelection(),this.lead=this.selectionLead=this.doc.createAnchor(0,0),this.anchor=this.selectionAnchor=this.doc.createAnchor(0,0);var t=this;this.lead.on("change",function(e){t._emit("changeCursor"),t.$isEmpty||t._emit("changeSelection"),!t.$
 keepDesiredColumnOnChange&&e.old.column!=e.value.column&&(t.$desiredColumn=null)}),this.selectionAnchor.on("change",function(){t.$isEmpty||t._emit("changeSelection")})};(function(){n.implement(this,o),this.isEmpty=function(){return this.$isEmpty||this.anchor.row==this.lead.row&&this.anchor.column==this.lead.column},this.isMultiLine=function(){return!this.isEmpty()&&this.getRange().isMultiLine()},this.getCursor=function(){return this.lead.getPosition()},this.setSelectionAnchor=function(e,t){this.anchor.setPosition(e,t),this.$isEmpty&&(this.$isEmpty=!1,this._emit("changeSelection"))},this.getSelectionAnchor=function(){return this.$isEmpty?this.getSelectionLead():this.anchor.getPosition()},this.getSelectionLead=function(){return this.lead.getPosition()},this.shiftSelection=function(e){if(this.$isEmpty)return void this.moveCursorTo(this.lead.row,this.lead.column+e);var t=this.getSelectionAnchor(),i=this.getSelectionLead(),n=this.isBackwards();(!n||0!==t.column)&&this.setSelectionAnchor(
 t.row,t.column+e),(n||0!==i.column)&&this.$moveSelection(function(){this.moveCursorTo(i.row,i.column+e)})},this.isBackwards=function(){var e=this.anchor,t=this.lead;return e.row>t.row||e.row==t.row&&e.column>t.column},this.getRange=function(){var e=this.anchor,t=this.lead;return this.isEmpty()?r.fromPoints(t,t):this.isBackwards()?r.fromPoints(t,e):r.fromPoints(e,t)},this.clearSelection=function(){this.$isEmpty||(this.$isEmpty=!0,this._emit("changeSelection"))},this.selectAll=function(){var e=this.doc.getLength()-1;this.setSelectionAnchor(0,0),this.moveCursorTo(e,this.doc.getLine(e).length)},this.setRange=this.setSelectionRange=function(e,t){t?(this.setSelectionAnchor(e.end.row,e.end.column),this.selectTo(e.start.row,e.start.column)):(this.setSelectionAnchor(e.start.row,e.start.column),this.selectTo(e.end.row,e.end.column)),this.getRange().isEmpty()&&(this.$isEmpty=!0),this.$desiredColumn=null},this.$moveSelection=function(e){var t=this.lead;this.$isEmpty&&this.setSelectionAnchor(t.r
 ow,t.column),e.call(this)},this.selectTo=function(e,t){this.$moveSelection(function(){this.moveCursorTo(e,t)})},this.selectToPosition=function(e){this.$moveSelection(function(){this.moveCursorToPosition(e)})},this.moveTo=function(e,t){this.clearSelection(),this.moveCursorTo(e,t)},this.moveToPosition=function(e){this.clearSelection(),this.moveCursorToPosition(e)},this.selectUp=function(){this.$moveSelection(this.moveCursorUp)},this.selectDown=function(){this.$moveSelection(this.moveCursorDown)},this.selectRight=function(){this.$moveSelection(this.moveCursorRight)},this.selectLeft=function(){this.$moveSelection(this.moveCursorLeft)},this.selectLineStart=function(){this.$moveSelection(this.moveCursorLineStart)},this.selectLineEnd=function(){this.$moveSelection(this.moveCursorLineEnd)},this.selectFileEnd=function(){this.$moveSelection(this.moveCursorFileEnd)},this.selectFileStart=function(){this.$moveSelection(this.moveCursorFileStart)},this.selectWordRight=function(){this.$moveSelectio
 n(this.moveCursorWordRight)},this.selectWordLeft=function(){this.$moveSelection(this.moveCursorWordLeft)},this.getWordRange=function(e,t){if(void 0===t){var i=e||this.lead;e=i.row,t=i.column}return this.session.getWordRange(e,t)},this.selectWord=function(){this.setSelectionRange(this.getWordRange())},this.selectAWord=function(){var e=this.getCursor(),t=this.session.getAWordRange(e.row,e.column);this.setSelectionRange(t)},this.getLineRange=function(e,t){var i,n="number"==typeof e?e:this.lead.row,s=this.session.getFoldLine(n);return s?(n=s.start.row,i=s.end.row):i=n,!0===t?new r(n,0,i,this.session.getLine(i).length):new r(n,0,i+1,0)},this.selectLine=function(){this.setSelectionRange(this.getLineRange())},this.moveCursorUp=function(){this.moveCursorBy(-1,0)},this.moveCursorDown=function(){this.moveCursorBy(1,0)},this.wouldMoveIntoSoftTab=function(e,t,i){var n=e.column,s=e.column+t;return i<0&&(n=e.column-t,s=e.column),this.session.isTabStop(e)&&this.doc.getLine(e.row).slice(n,s).split(
 " ").length-1==t},this.moveCursorLeft=function(){var e,t=this.lead.getPosition();if(e=this.session.getFoldAt(t.row,t.column,-1))this.moveCursorTo(e.start.row,e.start.column);else if(0===t.column)t.row>0&&this.moveCursorTo(t.row-1,this.doc.getLine(t.row-1).length);else{var i=this.session.getTabSize();this.wouldMoveIntoSoftTab(t,i,-1)&&!this.session.getNavigateWithinSoftTabs()?this.moveCursorBy(0,-i):this.moveCursorBy(0,-1)}},this.moveCursorRight=function(){var e,t=this.lead.getPosition();if(e=this.session.getFoldAt(t.row,t.column,1))this.moveCursorTo(e.end.row,e.end.column);else if(this.lead.column==this.doc.getLine(this.lead.row).length)this.lead.row<this.doc.getLength()-1&&this.moveCursorTo(this.lead.row+1,0);else{var i=this.session.getTabSize(),t=this.lead;this.wouldMoveIntoSoftTab(t,i,1)&&!this.session.getNavigateWithinSoftTabs()?this.moveCursorBy(0,i):this.moveCursorBy(0,1)}},this.moveCursorLineStart=function(){var e=this.lead.row,t=this.lead.column,i=this.session.documentToScre
 enRow(e,t),n=this.session.screenToDocumentPosition(i,0),s=this.session.getDisplayLine(e,null,n.row,n.column),o=s.match(/^\s*/);o[0].length!=t&&!this.session.$useEmacsStyleLineStart&&(n.column+=o[0].length),this.moveCursorToPosition(n)},this.moveCursorLineEnd=function(){var e=this.lead,t=this.session.getDocumentLastRowColumnPosition(e.row,e.column);if(this.lead.column==t.column){var i=this.session.getLine(t.row);if(t.column==i.length){var n=i.search(/\s+$/);n>0&&(t.column=n)}}this.moveCursorTo(t.row,t.column)},this.moveCursorFileEnd=function(){var e=this.doc.getLength()-1,t=this.doc.getLine(e).length;this.moveCursorTo(e,t)},this.moveCursorFileStart=function(){this.moveCursorTo(0,0)},this.moveCursorLongWordRight=function(){var e=this.lead.row,t=this.lead.column,i=this.doc.getLine(e),n=i.substring(t);this.session.nonTokenRe.lastIndex=0,this.session.tokenRe.lastIndex=0;var s=this.session.getFoldAt(e,t,1);return s?void this.moveCursorTo(s.end.row,s.end.column):(this.session.nonTokenRe.ex
 ec(n)&&(t+=this.session.nonTokenRe.lastIndex,this.session.nonTokenRe.lastIndex=0,n=i.substring(t)),t>=i.length?(this.moveCursorTo(e,i.length),this.moveCursorRight(),e<this.doc.getLength()-1&&this.moveCursorWordRight(),void 0):(this.session.tokenRe.exec(n)&&(t+=this.session.tokenRe.lastIndex,this.session.tokenRe.lastIndex=0),void this.moveCursorTo(e,t)))},this.moveCursorLongWordLeft=function(){var e,t=this.lead.row,i=this.lead.column;if(e=this.session.getFoldAt(t,i,-1))return void this.moveCursorTo(e.start.row,e.start.column);var n=this.session.getFoldStringAt(t,i,-1);null==n&&(n=this.doc.getLine(t).substring(0,i));var o=s.stringReverse(n);if(this.session.nonTokenRe.lastIndex=0,this.session.tokenRe.lastIndex=0,this.session.nonTokenRe.exec(o)&&(i-=this.session.nonTokenRe.lastIndex,o=o.slice(this.session.nonTokenRe.lastIndex),this.session.nonTokenRe.lastIndex=0),i<=0)return this.moveCursorTo(t,0),this.moveCursorLeft(),t>0&&this.moveCursorWordLeft(),void 0;this.session.tokenRe.exec(o)&&
 (i-=this.session.tokenRe.lastIndex,this.session.tokenRe.lastIndex=0),this.moveCursorTo(t,i)},this.$shortWordEndIndex=function(e){var t,i=0,n=/\s/,s=this.session.tokenRe;if(s.lastIndex=0,this.session.tokenRe.exec(e))i=this.session.tokenRe.lastIndex;else{for(;(t=e[i])&&n.test(t);)i++;if(i<1)for(s.lastIndex=0;(t=e[i])&&!s.test(t);)if(s.lastIndex=0,i++,n.test(t)){if(i>2){i--;break}for(;(t=e[i])&&n.test(t);)i++;if(i>2)break}}return s.lastIndex=0,i},this.moveCursorShortWordRight=function(){var e=this.lead.row,t=this.lead.column,i=this.doc.getLine(e),n=i.substring(t),s=this.session.getFoldAt(e,t,1);if(s)return this.moveCursorTo(s.end.row,s.end.column);if(t==i.length){var o=this.doc.getLength();do{e++,n=this.doc.getLine(e)}while(e<o&&/^\s*$/.test(n));/^\s+/.test(n)||(n=""),t=0}var r=this.$shortWordEndIndex(n);this.moveCursorTo(e,t+r)},this.moveCursorShortWordLeft=function(){var e,t=this.lead.row,i=this.lead.column;if(e=this.session.getFoldAt(t,i,-1))return this.moveCursorTo(e.start.row,e.st
 art.column);var n=this.session.getLine(t).substring(0,i);if(0===i){do{t--,n=this.doc.getLine(t)}while(t>0&&/^\s*$/.test(n));i=n.length,/\s+$/.test(n)||(n="")}var o=s.stringReverse(n),r=this.$shortWordEndIndex(o);return this.moveCursorTo(t,i-r)},this.moveCursorWordRight=function(){this.session.$selectLongWords?this.moveCursorLongWordRight():this.moveCursorShortWordRight()},this.moveCursorWordLeft=function(){this.session.$selectLongWords?this.moveCursorLongWordLeft():this.moveCursorShortWordLeft()},this.moveCursorBy=function(e,t){var i,n=this.session.documentToScreenPosition(this.lead.row,this.lead.column);0===t&&(0!==e&&(this.session.$bidiHandler.isBidiRow(n.row,this.lead.row)?(i=this.session.$bidiHandler.getPosLeft(n.column),n.column=Math.round(i/this.session.$bidiHandler.charWidths[0])):i=n.column*this.session.$bidiHandler.charWidths[0]),this.$desiredColumn?n.column=this.$desiredColumn:this.$desiredColumn=n.column);var s=this.session.screenToDocu

<TRUNCATED>

[06/49] knox git commit: Update CHANGES for v0.14.0

Posted by mo...@apache.org.
Update CHANGES for v0.14.0

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/eb7d1421
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/eb7d1421
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/eb7d1421

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: eb7d1421821784371bcc478832960da004a5126e
Parents: 5af2413
Author: Larry McCay <lm...@hortonworks.com>
Authored: Mon Nov 27 12:49:33 2017 -0500
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Mon Nov 27 12:50:00 2017 -0500

----------------------------------------------------------------------
 CHANGES | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/eb7d1421/CHANGES
----------------------------------------------------------------------
diff --git a/CHANGES b/CHANGES
index 3c84f3d..d613a78 100644
--- a/CHANGES
+++ b/CHANGES
@@ -20,6 +20,7 @@ Release Notes - Apache Knox - Version 0.14.0
    * [KNOX-1041] - High Availability Support For Apache SOLR, HBase & Kafka (Rick Kellogg via Sandeep More)
    * [KNOX-1046] - Add Client Cert Wanted Capability with Configurable Validation that Checks for It
    * [KNOX-1072] - Add Client Cert Required Capability to KnoxToken
+   * [KNOX-1107] - Remote Configuration Registry Client Service (Phil Zampino via lmccay)
 
 ** Improvement
    * [KNOX-921] - Httpclient max connections are always set to default values


[25/49] knox git commit: KNOX-1129 - bump timeout for GatewayBasicFuncTest.testCLIServiceTest

Posted by mo...@apache.org.
KNOX-1129 - bump timeout for GatewayBasicFuncTest.testCLIServiceTest

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/8df6e80e
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/8df6e80e
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/8df6e80e

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 8df6e80e48bca4aeec32f0fa2bfdacecf4730cd0
Parents: e482e2e
Author: Larry McCay <lm...@hortonworks.com>
Authored: Mon Dec 4 21:30:45 2017 -0500
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Mon Dec 4 21:30:45 2017 -0500

----------------------------------------------------------------------
 .../test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/8df6e80e/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
index 7fadb74..210fe5a 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
@@ -3646,7 +3646,7 @@ public class GatewayBasicFuncTest {
     LOG_EXIT();
   }
 
-  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
+  @Test( timeout = TestUtils.LONG_TIMEOUT )
   public void testCLIServiceTest() throws Exception {
     LOG_ENTER();
 


[14/49] knox git commit: KNOX-1128 - Readonly protection for generated topologies in Knox Admin UI

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-applications/src/main/resources/applications/admin-ui/app/main.a69408978854e3a77fb2.bundle.js
----------------------------------------------------------------------
diff --git a/gateway-applications/src/main/resources/applications/admin-ui/app/main.a69408978854e3a77fb2.bundle.js b/gateway-applications/src/main/resources/applications/admin-ui/app/main.a69408978854e3a77fb2.bundle.js
new file mode 100644
index 0000000..81f9324
--- /dev/null
+++ b/gateway-applications/src/main/resources/applications/admin-ui/app/main.a69408978854e3a77fb2.bundle.js
@@ -0,0 +1 @@
+webpackJsonp([0],{"+GRi":function(e,t,n){var r=n("Wo2w"),o=n("Wy9r");e.exports=function(e){return r(o(e))}},"+Q6C":function(e,t,n){var r=n("CDXM"),o=n("6De9").f,i=n("+pQw");r(r.S,"Reflect",{deleteProperty:function(e,t){var n=o(i(e),t);return!(n&&!n.configurable)&&delete e[t]}})},"+aW+":function(e,t,n){"use strict";var r=n("CDXM"),o=n("uNkO"),i=n("RT4T"),a=n("umMR"),s=[].sort,l=[1,2,3];r(r.P+r.F*(a(function(){l.sort(void 0)})||!a(function(){l.sort(null)})||!n("bhtb")(s)),"Array",{sort:function(e){return void 0===e?s.call(i(this)):s.call(i(this),o(e))}})},"+c1l":function(e,t,n){var r=n("CDXM");r(r.S+r.F*!n("V+0c"),"Object",{defineProperty:n("tose").f})},"+iEx":function(e,t,n){n("fHxy"),n("5GJ3"),n("X0O/"),n("HCkn"),n("ncNB"),n("soMw"),n("8sYH"),n("IJ3P"),n("t6ta"),e.exports=n("b4gG").Reflect},"+pQw":function(e,t,n){var r=n("JXkd");e.exports=function(e){if(!r(e))throw TypeError(e+" is not an object!");return e}},"/JsI":function(e,t,n){var r=n("CDXM");r(r.S+r.F,"Object",{assign:n("rIdM"
 )})},"/Mgt":function(e,t,n){var r=n("CDXM");r(r.S,"Number",{MIN_SAFE_INTEGER:-9007199254740991})},"/XRd":function(e,t,n){var r=n("tose"),o=n("CDXM"),i=n("+pQw"),a=n("A1WY");o(o.S+o.F*n("umMR")(function(){Reflect.defineProperty(r.f({},1,{value:1}),1,{value:2})}),"Reflect",{defineProperty:function(e,t,n){i(e),t=a(t,!0),i(n);try{return r.f(e,t,n),!0}catch(e){return!1}}})},"/wY1":function(e,t,n){n("rMMT"),n("dlwK"),n("/XRd"),n("+Q6C"),n("dBNB"),n("7Fno"),n("gZpL"),n("dSHT"),n("d+61"),n("V2Dj"),n("wJYt"),n("gdNQ"),n("VsLy"),n("wLW2"),e.exports=n("b4gG").Reflect},0:function(e,t,n){e.exports=n("cDNt")},"0MXQ":function(e,t,n){var r=n("CDXM");r(r.S,"Math",{fround:n("xxX9")})},"1j/l":function(e,t,n){"use strict";n.d(t,"a",function(){return r});var r=Array.isArray||function(e){return e&&"number"==typeof e.length}},"1zvG":function(e,t,n){"use strict";var r=n("JXkd"),o=n("TJLg"),i=n("3r0D")("hasInstance"),a=Function.prototype;i in a||n("tose").f(a,i,{value:function(e){if("function"!=typeof this|
 |!r(e))return!1;if(!r(this.prototype))return e instanceof this;for(;e=o(e);)if(this.prototype===e)return!0;return!1}})},"2Fuj":function(e,t,n){var r=n("R5c1"),o=n("a/Sk");e.exports=Object.keys||function(e){return r(e,o)}},"2kLc":function(e,t,n){"use strict";function r(e){return e&&"function"==typeof e.schedule}function o(e){return e}var i=n("AP4T"),a=n("6Xbx"),s=function(e){function t(t,n){e.call(this),this.value=t,this.scheduler=n,this._isScalar=!0,n&&(this._isScalar=!1)}return Object(a.b)(t,e),t.create=function(e,n){return new t(e,n)},t.dispatch=function(e){var t=e.value,n=e.subscriber;e.done?n.complete():(n.next(t),n.closed||(e.done=!0,this.schedule(e)))},t.prototype._subscribe=function(e){var n=this.value,r=this.scheduler;if(r)return r.schedule(t.dispatch,0,{done:!1,value:n,subscriber:e});e.next(n),e.closed||e.complete()},t}(i.Observable),l=n("Ecq+"),c=function(e){function t(t,n){e.call(this),this.array=t,this.scheduler=n,n||1!==t.length||(this._isScalar=!0,this.value=t[0])}retu
 rn Object(a.b)(t,e),t.create=function(e,n){return new t(e,n)},t.of=function(){for(var e=[],n=0;n<arguments.length;n++)e[n-0]=arguments[n];var o=e[e.length-1];r(o)?e.pop():o=null;var i=e.length;return i>1?new t(e,o):1===i?new s(e[0],o):new l.a(o)},t.dispatch=function(e){var t=e.array,n=e.index,r=e.subscriber;n>=e.count?r.complete():(r.next(t[n]),r.closed||(e.index=n+1,this.schedule(e)))},t.prototype._subscribe=function(e){var n=this.array,r=n.length,o=this.scheduler;if(o)return o.schedule(t.dispatch,0,{array:n,index:0,count:r,subscriber:e});for(var i=0;i<r&&!e.closed;i++)e.next(n[i]);e.complete()},t}(i.Observable),u=n("qgI0"),d=n("lI6h"),h=function(){function e(e,t,n){void 0===n&&(n=Number.POSITIVE_INFINITY),this.project=e,this.resultSelector=t,this.concurrent=n}return e.prototype.call=function(e,t){return t.subscribe(new p(e,this.project,this.resultSelector,this.concurrent))},e}(),p=function(e){function t(t,n,r,o){void 0===o&&(o=Number.POSITIVE_INFINITY),e.call(this,t),this.project=
 n,this.resultSelector=r,this.concurrent=o,this.hasCompleted=!1,this.buffer=[],this.active=0,this.index=0}return Object(a.b)(t,e),t.prototype._next=function(e){this.active<this.concurrent?this._tryNext(e):this.buffer.push(e)},t.prototype._tryNext=function(e){var t,n=this.index++;try{t=this.project(e,n)}catch(e){return void this.destination.error(e)}this.active++,this._innerSub(t,e,n)},t.prototype._innerSub=function(e,t,n){this.add(Object(u.a)(this,e,t,n))},t.prototype._complete=function(){this.hasCompleted=!0,0===this.active&&0===this.buffer.length&&this.destination.complete()},t.prototype.notifyNext=function(e,t,n,r,o){this.resultSelector?this._notifyResultSelector(e,t,n,r):this.destination.next(t)},t.prototype._notifyResultSelector=function(e,t,n,r){var o;try{o=this.resultSelector(e,t,n,r)}catch(e){return void this.destination.error(e)}this.destination.next(o)},t.prototype.notifyComplete=function(e){var t=this.buffer;this.remove(e),this.active--,t.length>0?this._next(t.shift()):0==
 =this.active&&this.hasCompleted&&this.destination.complete()},t}(d.a);n.d(t,"a",function(){return f});var f=function(){for(var e=[],t=0;t<arguments.length;t++)e[t-0]=arguments[t];var n=Number.POSITIVE_INFINITY,a=null,s=e[e.length-1];return r(s)?(a=e.pop(),e.length>1&&"number"==typeof e[e.length-1]&&(n=e.pop())):"number"==typeof s&&(n=e.pop()),null===a&&1===e.length&&e[0]instanceof i.Observable?e[0]:function(e){return void 0===e&&(e=Number.POSITIVE_INFINITY),function(e,t,n){return void 0===n&&(n=Number.POSITIVE_INFINITY),function(r){return"number"==typeof t&&(n=t,t=null),r.lift(new h(e,t,n))}}(o,null,e)}(n)(new c(e,a))}},"3LDD":function(e,t,n){"use strict";var r=n("tose").f,o=n("51pc"),i=n("pBmS"),a=n("pa70"),s=n("Lcie"),l=n("p/bR"),c=n("WsSm"),u=n("w/BM"),d=n("KpXt"),h=n("V+0c"),p=n("xI8H").fastKey,f=n("Y5fy"),m=h?"_s":"size",g=function(e,t){var n,r=p(t);if("F"!==r)return e._i[r];for(n=e._f;n;n=n.n)if(n.k==t)return n};e.exports={getConstructor:function(e,t,n,c){var u=e(function(e,r)
 {s(e,u,t,"_i"),e._t=t,e._i=o(null),e._f=void 0,e._l=void 0,e[m]=0,void 0!=r&&l(r,n,e[c],e)});return i(u.prototype,{clear:function(){for(var e=f(this,t),n=e._i,r=e._f;r;r=r.n)r.r=!0,r.p&&(r.p=r.p.n=void 0),delete n[r.i];e._f=e._l=void 0,e[m]=0},delete:function(e){var n=f(this,t),r=g(n,e);if(r){var o=r.n,i=r.p;delete n._i[r.i],r.r=!0,i&&(i.n=o),o&&(o.p=i),n._f==r&&(n._f=o),n._l==r&&(n._l=i),n[m]--}return!!r},forEach:function(e){f(this,t);for(var n,r=a(e,arguments.length>1?arguments[1]:void 0,3);n=n?n.n:this._f;)for(r(n.v,n.k,this);n&&n.r;)n=n.p},has:function(e){return!!g(f(this,t),e)}}),h&&r(u.prototype,"size",{get:function(){return f(this,t)[m]}}),u},def:function(e,t,n){var r,o,i=g(e,t);return i?i.v=n:(e._l=i={i:o=p(t,!0),k:t,v:n,p:r=e._l,n:void 0,r:!1},e._f||(e._f=i),r&&(r.n=i),e[m]++,"F"!==o&&(e._i[o]=i)),e},getEntry:g,setStrong:function(e,t,n){c(e,t,function(e,n){this._t=f(e,t),this._k=n,this._l=void 0},function(){for(var e=this,t=e._k,n=e._l;n&&n.r;)n=n.p;return e._t&&(e._l=n=n?n
 .n:e._t._f)?"keys"==t?u(0,n.k):"values"==t?u(0,n.v):u(0,[n.k,n.v]):(e._t=void 0,u(1))},n?"entries":"values",!n,!0),d(t)}}},"3MMU":function(e,t,n){"use strict";var r=n("RT4T"),o=n("KM3d"),i=n("rppw");e.exports=[].copyWithin||function(e,t){var n=r(this),a=i(n.length),s=o(e,a),l=o(t,a),c=arguments.length>2?arguments[2]:void 0,u=Math.min((void 0===c?a:o(c,a))-l,a-s),d=1;for(l<s&&s<l+u&&(d=-1,l+=u-1,s+=u-1);u-- >0;)l in n?n[s]=n[l]:delete n[s],s+=d,l+=d;return n}},"3r0D":function(e,t,n){var r=n("Iclu")("wks"),o=n("c09d"),i=n("ptrv").Symbol,a="function"==typeof i;(e.exports=function(e){return r[e]||(r[e]=a&&i[e]||(a?i:o)("Symbol."+e))}).store=r},"4D9a":function(e,t,n){"use strict";n("RSwQ");var r=n("+pQw"),o=n("8H1R"),i=n("V+0c"),a="toString",s=/./[a],l=function(e){n("lfBE")(RegExp.prototype,a,e,!0)};n("umMR")(function(){return"/a/b"!=s.call({source:"a",flags:"b"})})?l(function(){var e=r(this);return"/".concat(e.source,"/","flags"in e?e.flags:!i&&e instanceof RegExp?o.call(e):void 0)}):s.
 name!=a&&l(function(){return s.call(this)})},"4TT8":function(e,t,n){var r=n("CDXM");r(r.S+r.F*!n("V+0c"),"Object",{defineProperties:n("ewdp")})},"51pc":function(e,t,n){var r=n("+pQw"),o=n("ewdp"),i=n("a/Sk"),a=n("yIWP")("IE_PROTO"),s=function(){},l="prototype",c=function(){var e,t=n("BQSv")("iframe"),r=i.length;for(t.style.display="none",n("Ed9o").appendChild(t),t.src="javascript:",(e=t.contentWindow.document).open(),e.write("<script>document.F=Object<\/script>"),e.close(),c=e.F;r--;)delete c[l][i[r]];return c()};e.exports=Object.create||function(e,t){var n;return null!==e?(s[l]=r(e),n=new s,s[l]=null,n[a]=e):n=c(),void 0===t?n:o(n,t)}},"570Y":function(e,t,n){(function(t){e.exports=function(){if(t.Blob)try{return new Blob(["asdf"],{type:"text/plain"}),Blob}catch(e){}var e=t.WebKitBlobBuilder||t.MozBlobBuilder||t.MSBlobBuilder;return function(t,n){var r=new e,o=n.endings,i=n.type;if(o)for(var a=0,s=t.length;a<s;++a)r.append(t[a],o);else for(var a=0,s=t.length;a<s;++a)r.append(t[a]);r
 eturn i?r.getBlob(i):r.getBlob()}}()}).call(t,n("fRUx"))},"5GJ3":function(e,t,n){var r=n("gBtn"),o=n("+pQw"),i=r.key,a=r.map,s=r.store;r.exp({deleteMetadata:function(e,t){var n=arguments.length<3?void 0:i(arguments[2]),r=a(o(t),n,!1);if(void 0===r||!r.delete(e))return!1;if(r.size)return!0;var l=s.get(t);return l.delete(n),!!l.size||s.delete(t)}})},"5b+r":function(e,t){e.exports=function(e,t,n){var r=void 0===n;switch(t.length){case 0:return r?e():e.call(n);case 1:return r?e(t[0]):e.call(n,t[0]);case 2:return r?e(t[0],t[1]):e.call(n,t[0],t[1]);case 3:return r?e(t[0],t[1],t[2]):e.call(n,t[0],t[1],t[2]);case 4:return r?e(t[0],t[1],t[2],t[3]):e.call(n,t[0],t[1],t[2],t[3])}return e.apply(n,t)}},"5oDA":function(e,t,n){var r=n("JXkd"),o=n("+pQw"),i=function(e,t){if(o(e),!r(t)&&null!==t)throw TypeError(t+": can't set as prototype!")};e.exports={set:Object.setPrototypeOf||("__proto__"in{}?function(e,t,r){try{(r=n("pa70")(Function.call,n("6De9").f(Object.prototype,"__proto__").set,2))(e,[]),t
 =!(e instanceof Array)}catch(e){t=!0}return function(e,n){return i(e,n),t?e.__proto__=n:r(e,n),e}}({},!1):void 0),check:i}},"6De9":function(e,t,n){var r=n("9e9+"),o=n("piOq"),i=n("+GRi"),a=n("A1WY"),s=n("rMsi"),l=n("gNkH"),c=Object.getOwnPropertyDescriptor;t.f=n("V+0c")?c:function(e,t){if(e=i(e),t=a(t,!0),l)try{return c(e,t)}catch(e){}if(s(e,t))return o(!r.f.call(e,t),e[t])}},"6F6V":function(e,t,n){"use strict";n("NhIS")("fontsize",function(e){return function(t){return e(this,"font","size",t)}})},"6GwK":function(e,t,n){var r=n("RT4T"),o=n("2Fuj");n("QN+J")("keys",function(){return function(e){return o(r(e))}})},"6Xbx":function(e,t,n){"use strict";t.b=function(e,t){function n(){this.constructor=e}r(e,t),e.prototype=null===t?Object.create(t):(n.prototype=t.prototype,new n)},n.d(t,"a",function(){return o});var r=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,t){e.__proto__=t}||function(e,t){for(var n in t)t.hasOwnProperty(n)&&(e[n]=t[n])},o=Object.assign||function(e)
 {for(var t,n=1,r=arguments.length;n<r;n++){t=arguments[n];for(var o in t)Object.prototype.hasOwnProperty.call(t,o)&&(e[o]=t[o])}return e}},"6tM8":function(e,t,n){"use strict";n("NhIS")("link",function(e){return function(t){return e(this,"a","href",t)}})},"76yl":function(e,t,n){"use strict";var r=n("+pQw"),o=n("A1WY");e.exports=function(e){if("string"!==e&&"number"!==e&&"default"!==e)throw TypeError("Incorrect hint");return o(r(this),"number"!=e)}},"7Fno":function(e,t,n){function r(e,t){var n,s,u=arguments.length<3?e:arguments[2];return c(e)===u?e[t]:(n=o.f(e,t))?a(n,"value")?n.value:void 0!==n.get?n.get.call(u):void 0:l(s=i(e))?r(s,t,u):void 0}var o=n("6De9"),i=n("TJLg"),a=n("rMsi"),s=n("CDXM"),l=n("JXkd"),c=n("+pQw");s(s.S,"Reflect",{get:r})},"8AR9":function(e,t,n){"use strict";var r=n("LMZF"),o=n("vCyR");t.ModalHeaderComponent=function(){function e(e){this.modal=e,this.showClose=!1}return e.decorators=[{type:r.Component,args:[{selector:"modal-header",template:'\n        <div class
 ="modal-header">\n            <button *ngIf="showClose" type="button" class="close" data-dismiss="modal" aria-label="Close" (click)="modal.dismiss()">\n                <span aria-hidden="true">&times;</span>\n            </button>\n            <ng-content></ng-content>\n        </div>\n    '}]}],e.ctorParameters=[{type:o.ModalComponent}],e.propDecorators={showClose:[{type:r.Input,args:["show-close"]}]},e}()},"8Gg3":function(e,t,n){var r=n("ptrv").parseInt,o=n("kFjN").trim,i=n("9BUF"),a=/^[-+]?0[xX]/;e.exports=8!==r(i+"08")||22!==r(i+"0x16")?function(e,t){var n=o(String(e),3);return r(n,t>>>0||(a.test(n)?16:10))}:r},"8H1R":function(e,t,n){"use strict";var r=n("+pQw");e.exports=function(){var e=r(this),t="";return e.global&&(t+="g"),e.ignoreCase&&(t+="i"),e.multiline&&(t+="m"),e.unicode&&(t+="u"),e.sticky&&(t+="y"),t}},"8ofh":function(e,t,n){"use strict";t.a=function(e){return r=e,function(){try{return r.apply(this,arguments)}catch(e){return o.a.e=e,o.a}}};var r,o=n("NePw")},"8sYH":fu
 nction(e,t,n){var r=n("gBtn"),o=n("+pQw"),i=n("TJLg"),a=r.has,s=r.key,l=function(e,t,n){if(a(e,t,n))return!0;var r=i(t);return null!==r&&l(e,r,n)};r.exp({hasMetadata:function(e,t){return l(e,o(t),arguments.length<3?void 0:s(arguments[2]))}})},"9BUF":function(e,t){e.exports="\t\n\v\f\r \xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029\ufeff"},"9ScN":function(e,t,n){"use strict";var r=n("51pc"),o=n("piOq"),i=n("P6IN"),a={};n("gxdV")(a,n("3r0D")("iterator"),function(){return this}),e.exports=function(e,t,n){e.prototype=r(a,{next:o(1,n)}),i(e,t+" Iterator")}},"9e9+":function(e,t){t.f={}.propertyIsEnumerable},"9wYb":function(e,t){var n=Math.ceil,r=Math.floor;e.exports=function(e){return isNaN(e=+e)?0:(e>0?r:n)(e)}},A1WY:function(e,t,n){var r=n("JXkd");e.exports=function(e,t){if(!r(e))return e;var n,o;if(t&&"function"==typeof(n=e.toString)&&!r(o=n.call(e)))return o;if("function"==typeof(n=e.valueOf)&&!r(o=n.call(e)))return o;i
 f(!t&&"function"==typeof(n=e.toString)&&!r(o=n.call(e)))return o;throw TypeError("Can't convert object to primitive value")}},A3hK:function(e,t,n){var r=n("CDXM");r(r.S,"Math",{sign:n("tWtF")})},ABVq:function(e,t,n){var r=n("CDXM"),o=Math.atanh;r(r.S+r.F*!(o&&1/o(-0)<0),"Math",{atanh:function(e){return 0==(e=+e)?e:Math.log((1+e)/(1-e))/2}})},AOSR:function(e,t,n){var r=n("CDXM"),o=n("KM3d"),i=String.fromCharCode,a=String.fromCodePoint;r(r.S+r.F*(!!a&&1!=a.length),"String",{fromCodePoint:function(e){for(var t,n=[],r=arguments.length,a=0;r>a;){if(t=+arguments[a++],o(t,1114111)!==t)throw RangeError(t+" is not a valid code point");n.push(t<65536?i(t):i(55296+((t-=65536)>>10),t%1024+56320))}return n.join("")}})},AP4T:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0});var r=n("xIGM"),o=n("E9/g"),i=n("V7AE"),a=n("grVA"),s=n("mz3w");n.d(t,"Observable",function(){return l});var l=function(){function e(e){this._isScalar=!1,e&&(this._subscribe=e)}return e.prototype.li
 ft=function(t){var n=new e;return n.source=this,n.operator=t,n},e.prototype.subscribe=function(e,t,n){var r=this.operator,s=function(e,t,n){if(e){if(e instanceof o.a)return e;if(e[i.a])return e[i.a]()}return e||t||n?new o.a(e,t,n):new o.a(a.a)}(e,t,n);if(r?r.call(s,this.source):s.add(this.source?this._subscribe(s):this._trySubscribe(s)),s.syncErrorThrowable&&(s.syncErrorThrowable=!1,s.syncErrorThrown))throw s.syncErrorValue;return s},e.prototype._trySubscribe=function(e){try{return this._subscribe(e)}catch(t){e.syncErrorThrown=!0,e.syncErrorValue=t,e.error(t)}},e.prototype.forEach=function(e,t){var n=this;if(t||(r.a.Rx&&r.a.Rx.config&&r.a.Rx.config.Promise?t=r.a.Rx.config.Promise:r.a.Promise&&(t=r.a.Promise)),!t)throw new Error("no Promise impl found");return new t(function(t,r){var o;o=n.subscribe(function(t){if(o)try{e(t)}catch(e){r(e),o.unsubscribe()}else e(t)},r,t)})},e.prototype._subscribe=function(e){return this.source.subscribe(e)},e.prototype[s.a]=function(){return this},e.p
 rototype.pipe=function(){for(var e=[],t=0;t<arguments.length;t++)e[t-0]=arguments[t];return 0===e.length?this:function(e){return e?1===e.length?e[0]:function(t){return e.reduce(function(e,t){return t(e)},t)}:function(){}}(e)(this)},e.prototype.toPromise=function(e){var t=this;if(e||(r.a.Rx&&r.a.Rx.config&&r.a.Rx.config.Promise?e=r.a.Rx.config.Promise:r.a.Promise&&(e=r.a.Promise)),!e)throw new Error("no Promise impl found");return new e(function(e,n){var r;t.subscribe(function(e){return r=e},function(e){return n(e)},function(){return e(r)})})},e.create=function(t){return new e(t)},e}()},Abrq:function(e,t,n){var r=n("CDXM");r(r.P,"Array",{copyWithin:n("3MMU")}),n("YymB")("copyWithin")},AdFz:function(e,t,n){"use strict";n("NhIS")("fixed",function(e){return function(){return e(this,"tt","","")}})},"B++z":function(e,t,n){var r=n("CDXM");r(r.S,"Number",{isNaN:function(e){return e!=e}})},B1iP:function(e,t,n){"use strict";t.a=function(e){return"function"==typeof e}},BCYq:function(e,t,n){var
  r=n("pa70"),o=n("Wo2w"),i=n("RT4T"),a=n("rppw"),s=n("UKZQ");e.exports=function(e,t){var n=1==e,l=2==e,c=3==e,u=4==e,d=6==e,h=5==e||d,p=t||s;return function(t,s,f){for(var m,g,v=i(t),y=o(v),b=r(s,f,3),w=a(y.length),k=0,x=n?p(t,w):l?p(t,0):void 0;w>k;k++)if((h||k in y)&&(m=y[k],g=b(m,k,v),e))if(n)x[k]=g;else if(g)switch(e){case 3:return!0;case 5:return m;case 6:return k;case 2:x.push(m)}else if(u)return!1;return d?-1:c||u?u:x}}},BMSF:function(e,t,n){var r=n("CDXM"),o=n("T0iK");r(r.S+r.F*(Number.parseFloat!=o),"Number",{parseFloat:o})},BQSv:function(e,t,n){var r=n("JXkd"),o=n("ptrv").document,i=r(o)&&r(o.createElement);e.exports=function(e){return i?o.createElement(e):{}}},CCJL:function(e,t,n){var r=n("+GRi"),o=n("6De9").f;n("QN+J")("getOwnPropertyDescriptor",function(){return function(e,t){return o(r(e),t)}})},CDXM:function(e,t,n){var r=n("ptrv"),o=n("b4gG"),i=n("gxdV"),a=n("lfBE"),s=n("pa70"),l=function(e,t,n){var c,u,d,h,p=e&l.F,f=e&l.G,m=e&l.P,g=e&l.B,v=f?r:e&l.S?r[t]||(r[t]={}):(
 r[t]||{}).prototype,y=f?o:o[t]||(o[t]={}),b=y.prototype||(y.prototype={});f&&(n=t);for(c in n)d=((u=!p&&v&&void 0!==v[c])?v:n)[c],h=g&&u?s(d,r):m&&"function"==typeof d?s(Function.call,d):d,v&&a(v,c,d,e&l.U),y[c]!=d&&i(y,c,h),m&&b[c]!=d&&(b[c]=d)};r.core=o,l.F=1,l.G=2,l.S=4,l.P=8,l.B=16,l.W=32,l.U=64,l.R=128,e.exports=l},Cc13:function(e,t,n){var r=n("ptrv"),o=n("b4gG"),i=n("KGrn"),a=n("qrqn"),s=n("tose").f;e.exports=function(e){var t=o.Symbol||(o.Symbol=i?{}:r.Symbol||{});"_"==e.charAt(0)||e in t||s(t,e,{value:a.f(e)})}},CjAR:function(e,t,n){n("YD56")("replace",2,function(e,t,n){return[function(r,o){"use strict";var i=e(this),a=void 0==r?void 0:r[t];return void 0!==a?a.call(r,i,o):n.call(String(i),r,o)},n]})},CxwD:function(e,t,n){var r=n("JXkd"),o=n("xI8H").onFreeze;n("QN+J")("seal",function(e){return function(t){return e&&r(t)?e(o(t)):t}})},Cz5P:function(e,t,n){"use strict";var r=n("pa70"),o=n("CDXM"),i=n("RT4T"),a=n("ULWX"),s=n("KpI+"),l=n("rppw"),c=n("GVIH"),u=n("fC8q");o(o.S+o.F*
 !n("UlVq")(function(e){Array.from(e)}),"Array",{from:function(e){var t,n,o,d,h=i(e),p="function"==typeof this?this:Array,f=arguments.length,m=f>1?arguments[1]:void 0,g=void 0!==m,v=0,y=u(h);if(g&&(m=r(m,f>2?arguments[2]:void 0,2)),void 0==y||p==Array&&s(y))for(n=new p(t=l(h.length));t>v;v++)c(n,v,g?m(h[v],v):h[v]);else for(d=y.call(h),n=new p;!(o=d.next()).done;v++)c(n,v,g?a(d,m,[o.value,v],!0):o.value);return n.length=v,n}})},DTeS:function(e,t,n){"use strict";n("NhIS")("sub",function(e){return function(){return e(this,"sub","","")}})},"E9/g":function(e,t,n){"use strict";n.d(t,"a",function(){return l});var r=n("6Xbx"),o=n("B1iP"),i=n("qLnt"),a=n("grVA"),s=n("V7AE"),l=function(e){function t(n,r,o){switch(e.call(this),this.syncErrorValue=null,this.syncErrorThrown=!1,this.syncErrorThrowable=!1,this.isStopped=!1,arguments.length){case 0:this.destination=a.a;break;case 1:if(!n){this.destination=a.a;break}if("object"==typeof n){n instanceof t?(this.destination=n,this.destination.add(this)
 ):(this.syncErrorThrowable=!0,this.destination=new c(this,n));break}default:this.syncErrorThrowable=!0,this.destination=new c(this,n,r,o)}}return Object(r.b)(t,e),t.prototype[s.a]=function(){return this},t.create=function(e,n,r){var o=new t(e,n,r);return o.syncErrorThrowable=!1,o},t.prototype.next=function(e){this.isStopped||this._next(e)},t.prototype.error=function(e){this.isStopped||(this.isStopped=!0,this._error(e))},t.prototype.complete=function(){this.isStopped||(this.isStopped=!0,this._complete())},t.prototype.unsubscribe=function(){this.closed||(this.isStopped=!0,e.prototype.unsubscribe.call(this))},t.prototype._next=function(e){this.destination.next(e)},t.prototype._error=function(e){this.destination.error(e),this.unsubscribe()},t.prototype._complete=function(){this.destination.complete(),this.unsubscribe()},t.prototype._unsubscribeAndRecycle=function(){var e=this._parent,t=this._parents;return this._parent=null,this._parents=null,this.unsubscribe(),this.closed=!1,this.isSto
 pped=!1,this._parent=e,this._parents=t,this},t}(i.a),c=function(e){function t(t,n,r,i){e.call(this),this._parentSubscriber=t;var s,l=this;Object(o.a)(n)?s=n:n&&(s=n.next,r=n.error,i=n.complete,n!==a.a&&(l=Object.create(n),Object(o.a)(l.unsubscribe)&&this.add(l.unsubscribe.bind(l)),l.unsubscribe=this.unsubscribe.bind(this))),this._context=l,this._next=s,this._error=r,this._complete=i}return Object(r.b)(t,e),t.prototype.next=function(e){if(!this.isStopped&&this._next){var t=this._parentSubscriber;t.syncErrorThrowable?this.__tryOrSetError(t,this._next,e)&&this.unsubscribe():this.__tryOrUnsub(this._next,e)}},t.prototype.error=function(e){if(!this.isStopped){var t=this._parentSubscriber;if(this._error)t.syncErrorThrowable?(this.__tryOrSetError(t,this._error,e),this.unsubscribe()):(this.__tryOrUnsub(this._error,e),this.unsubscribe());else{if(!t.syncErrorThrowable)throw this.unsubscribe(),e;t.syncErrorValue=e,t.syncErrorThrown=!0,this.unsubscribe()}}},t.prototype.complete=function(){var e=
 this;if(!this.isStopped){var t=this._parentSubscriber;if(this._complete){var n=function(){return e._complete.call(e._context)};t.syncErrorThrowable?(this.__tryOrSetError(t,n),this.unsubscribe()):(this.__tryOrUnsub(n),this.unsubscribe())}else this.unsubscribe()}},t.prototype.__tryOrUnsub=function(e,t){try{e.call(this._context,t)}catch(e){throw this.unsubscribe(),e}},t.prototype.__tryOrSetError=function(e,t,n){try{t.call(this._context,n)}catch(t){return e.syncErrorValue=t,e.syncErrorThrown=!0,!0}return!1},t.prototype._unsubscribe=function(){var e=this._parentSubscriber;this._context=null,this._parentSubscriber=null,e.unsubscribe()},t}(l)},"Ecq+":function(e,t,n){"use strict";n.d(t,"a",function(){return o});var r=n("6Xbx"),o=function(e){function t(t){e.call(this),this.scheduler=t}return Object(r.b)(t,e),t.create=function(e){return new t(e)},t.dispatch=function(e){e.subscriber.complete()},t.prototype._subscribe=function(e){var n=this.scheduler;if(n)return n.schedule(t.dispatch,0,{subscri
 ber:e});e.complete()},t}(n("AP4T").Observable)},Ed9o:function(e,t,n){var r=n("ptrv").document;e.exports=r&&r.documentElement},F6ce:function(e,t,n){var r=n("TM12"),o=n("Wy9r");e.exports=function(e,t,n){if(r(t))throw TypeError("String#"+n+" doesn't accept regex!");return String(o(e))}},FALa:function(e,t,n){var r=n("CDXM"),o=n("V/jj"),i=Math.exp;r(r.S+r.F*n("umMR")(function(){return-2e-17!=!Math.sinh(-2e-17)}),"Math",{sinh:function(e){return Math.abs(e=+e)<1?(o(e)-o(-e))/2:(i(e-1)-i(-e-1))*(Math.E/2)}})},FyA0:function(e,t,n){n("QN+J")("getOwnPropertyNames",function(){return n("y/ue").f})},GMpo:function(e,t,n){"use strict";n("NhIS")("italics",function(e){return function(){return e(this,"i","","")}})},GQSG:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0});var r=n("AP4T"),o=n("dmC+");r.Observable.prototype.map=o.a},GVIH:function(e,t,n){"use strict";var r=n("tose"),o=n("piOq");e.exports=function(e,t,n){t in e?r.f(e,t,o(0,n)):e[t]=n}},GWWY:function(e,t,n){n("mzUQ
 "),n("b8HQ"),e.exports=n("b4gG").Symbol},GWzR:function(e,t,n){"use strict";function r(e){for(var n in e)t.hasOwnProperty(n)||(t[n]=e[n])}var o=n("LMZF"),i=n("Un6q"),a=n("vCyR"),s=n("8AR9"),l=n("oXBO"),c=n("t+dn"),u=n("dy2L");r(n("vCyR")),r(n("8AR9")),r(n("oXBO")),r(n("t+dn")),r(n("ItmA")),t.Ng2Bs3ModalModule=function(){function e(){}return e.decorators=[{type:o.NgModule,args:[{imports:[i.CommonModule],declarations:[a.ModalComponent,s.ModalHeaderComponent,l.ModalBodyComponent,c.ModalFooterComponent,u.AutofocusDirective],exports:[a.ModalComponent,s.ModalHeaderComponent,l.ModalBodyComponent,c.ModalFooterComponent,u.AutofocusDirective]}]}],e.ctorParameters=[],e}()},"Gki+":function(e,t,n){var r=n("CDXM"),o=n("IU2P");r(r.P+r.F*(Date.prototype.toISOString!==o),"Date",{toISOString:o})},H3aY:function(e,t,n){var r=n("CDXM"),o=n("ptrv").isFinite;r(r.S,"Number",{isFinite:function(e){return"number"==typeof e&&o(e)}})},HCkn:function(e,t,n){var r=n("Ps07"),o=n("WGJ/"),i=n("gBtn"),a=n("+pQw"),s=n("
 TJLg"),l=i.keys,c=i.key,u=function(e,t){var n=l(e,t),i=s(e);if(null===i)return n;var a=u(i,t);return a.length?n.length?o(new r(n.concat(a))):a:n};i.exp({getMetadataKeys:function(e){return u(a(e),arguments.length<2?void 0:c(arguments[1]))}})},HK9U:function(e,t,n){"use strict";n("NhIS")("sup",function(e){return function(){return e(this,"sup","","")}})},HzDK:function(e,t,n){"use strict";var r=n("CDXM"),o=n("OGmI");r(r.P+r.F*!n("bhtb")([].reduce,!0),"Array",{reduce:function(e){return o(this,e,arguments.length,arguments[1],!1)}})},"I+CO":function(e,t,n){var r=n("3r0D")("toPrimitive"),o=Date.prototype;r in o||n("gxdV")(o,r,n("76yl"))},IGm2:function(e,t,n){"use strict";var r=n("CDXM"),o=n("F6ce");r(r.P+r.F*n("TmDx")("includes"),"String",{includes:function(e){return!!~o(this,e,"includes").indexOf(e,arguments.length>1?arguments[1]:void 0)}})},IJ3P:function(e,t,n){var r=n("gBtn"),o=n("+pQw"),i=r.has,a=r.key;r.exp({hasOwnMetadata:function(e,t){return i(e,o(t),arguments.length<3?void 0:a(argume
 nts[2]))}})},IU2P:function(e,t,n){"use strict";var r=n("umMR"),o=Date.prototype.getTime,i=Date.prototype.toISOString,a=function(e){return e>9?e:"0"+e};e.exports=r(function(){return"0385-07-25T07:06:39.999Z"!=i.call(new Date(-5e13-1))})||!r(function(){i.call(new Date(NaN))})?function(){if(!isFinite(o.call(this)))throw RangeError("Invalid time value");var e=this.getUTCFullYear(),t=this.getUTCMilliseconds(),n=e<0?"-":e>9999?"+":"";return n+("00000"+Math.abs(e)).slice(n?-6:-4)+"-"+a(this.getUTCMonth()+1)+"-"+a(this.getUTCDate())+"T"+a(this.getUTCHours())+":"+a(this.getUTCMinutes())+":"+a(this.getUTCSeconds())+"."+(t>99?t:"0"+a(t))+"Z"}:i},Iclu:function(e,t,n){var r=n("ptrv"),o="__core-js_shared__",i=r[o]||(r[o]={});e.exports=function(e){return i[e]||(i[e]={})}},ItmA:function(e,t,n){"use strict";function r(e){return"true"===e||"false"!==e&&e}function o(e){return new Promise(function(t,n){e.subscribe(function(e){t(e)})})}var i=n("AP4T");n("GQSG"),n("nbhv"),t.ModalInstance=function(){funct
 ion e(e){this.element=e,this.suffix=".ng2-bs3-modal",this.shownEventName="shown.bs.modal"+this.suffix,this.hiddenEventName="hidden.bs.modal"+this.suffix,this.visible=!1,this.init()}return e.prototype.open=function(){return this.show()},e.prototype.close=function(){return this.result=a.Close,this.hide()},e.prototype.dismiss=function(){return this.result=a.Dismiss,this.hide()},e.prototype.destroy=function(){var e=this;return this.hide().then(function(){e.$modal&&(e.$modal.data("bs.modal",null),e.$modal.remove())})},e.prototype.show=function(){var e=o(this.shown);return this.resetData(),this.$modal.modal(),e},e.prototype.hide=function(){if(this.$modal&&this.visible){var e=o(this.hidden);return this.$modal.modal("hide"),e}return Promise.resolve(this.result)},e.prototype.init=function(){var e=this;this.$modal=jQuery(this.element.nativeElement),this.$modal.appendTo("body"),this.shown=i.Observable.fromEvent(this.$modal,this.shownEventName).map(function(){e.visible=!0}),this.hidden=i.Observ
 able.fromEvent(this.$modal,this.hiddenEventName).map(function(){var t=e.result&&e.result!==a.None?e.result:a.Dismiss;return e.result=a.None,e.visible=!1,t})},e.prototype.resetData=function(){this.$modal.removeData(),this.$modal.data("backdrop",r(this.$modal.attr("data-backdrop"))),this.$modal.data("keyboard",r(this.$modal.attr("data-keyboard")))},e}(),function(e){e[e.None=0]="None",e[e.Close=1]="Close",e[e.Dismiss=2]="Dismiss"}(t.ModalResult||(t.ModalResult={}));var a=t.ModalResult},J9Eb:function(e,t){e.exports.id="ace/mode/html_worker",e.exports.src='"no use strict";(function(window){function resolveModuleId(id,paths){for(var testPath=id,tail="";testPath;){var alias=paths[testPath];if("string"==typeof alias)return alias+tail;if(alias)return alias.location.replace(/\\/*$/,"/")+(tail||alias.main||alias.name);if(alias===!1)return"";var i=testPath.lastIndexOf("/");if(-1===i)break;tail=testPath.substr(i)+tail,testPath=testPath.slice(0,i)}return id}if(!(void 0!==window.window&&window.doc
 ument||window.acequire&&window.define)){window.console||(window.console=function(){var msgs=Array.prototype.slice.call(arguments,0);postMessage({type:"log",data:msgs})},window.console.error=window.console.warn=window.console.log=window.console.trace=window.console),window.window=window,window.ace=window,window.onerror=function(message,file,line,col,err){postMessage({type:"error",data:{message:message,data:err.data,file:file,line:line,col:col,stack:err.stack}})},window.normalizeModule=function(parentId,moduleName){if(-1!==moduleName.indexOf("!")){var chunks=moduleName.split("!");return window.normalizeModule(parentId,chunks[0])+"!"+window.normalizeModule(parentId,chunks[1])}if("."==moduleName.charAt(0)){var base=parentId.split("/").slice(0,-1).join("/");for(moduleName=(base?base+"/":"")+moduleName;-1!==moduleName.indexOf(".")&&previous!=moduleName;){var previous=moduleName;moduleName=moduleName.replace(/^\\.\\//,"").replace(/\\/\\.\\//,"/").replace(/[^\\/]+\\/\\.\\.\\//,"")}}return m
 oduleName},window.acequire=function acequire(parentId,id){if(id||(id=parentId,parentId=null),!id.charAt)throw Error("worker.js acequire() accepts only (parentId, id) as arguments");id=window.normalizeModule(parentId,id);var module=window.acequire.modules[id];if(module)return module.initialized||(module.initialized=!0,module.exports=module.factory().exports),module.exports;if(!window.acequire.tlns)return console.log("unable to load "+id);var path=resolveModuleId(id,window.acequire.tlns);return".js"!=path.slice(-3)&&(path+=".js"),window.acequire.id=id,window.acequire.modules[id]={},importScripts(path),window.acequire(parentId,id)},window.acequire.modules={},window.acequire.tlns={},window.define=function(id,deps,factory){if(2==arguments.length?(factory=deps,"string"!=typeof id&&(deps=id,id=window.acequire.id)):1==arguments.length&&(factory=id,deps=[],id=window.acequire.id),"function"!=typeof factory)return window.acequire.modules[id]={exports:factory,initialized:!0},void 0;deps.length|
 |(deps=["require","exports","module"]);var req=function(childId){return window.acequire(id,childId)};window.acequire.modules[id]={exports:{},factory:function(){var module=this,returnExports=factory.apply(this,deps.map(function(dep){switch(dep){case"require":return req;case"exports":return module.exports;case"module":return module;default:return req(dep)}}));return returnExports&&(module.exports=returnExports),module}}},window.define.amd={},acequire.tlns={},window.initBaseUrls=function(topLevelNamespaces){for(var i in topLevelNamespaces)acequire.tlns[i]=topLevelNamespaces[i]},window.initSender=function(){var EventEmitter=window.acequire("ace/lib/event_emitter").EventEmitter,oop=window.acequire("ace/lib/oop"),Sender=function(){};return function(){oop.implement(this,EventEmitter),this.callback=function(data,callbackId){postMessage({type:"call",id:callbackId,data:data})},this.emit=function(name,data){postMessage({type:"event",name:name,data:data})}}.call(Sender.prototype),new Sender};va
 r main=window.main=null,sender=window.sender=null;window.onmessage=function(e){var msg=e.data;if(msg.event&&sender)sender._signal(msg.event,msg.data);else if(msg.command)if(main[msg.command])main[msg.command].apply(main,msg.args);else{if(!window[msg.command])throw Error("Unknown command:"+msg.command);window[msg.command].apply(window,msg.args)}else if(msg.init){window.initBaseUrls(msg.tlns),acequire("ace/lib/es5-shim"),sender=window.sender=window.initSender();var clazz=acequire(msg.module)[msg.classname];main=window.main=new clazz(sender)}}}})(this),ace.define("ace/lib/oop",["require","exports","module"],function(acequire,exports){"use strict";exports.inherits=function(ctor,superCtor){ctor.super_=superCtor,ctor.prototype=Object.create(superCtor.prototype,{constructor:{value:ctor,enumerable:!1,writable:!0,configurable:!0}})},exports.mixin=function(obj,mixin){for(var key in mixin)obj[key]=mixin[key];return obj},exports.implement=function(proto,mixin){exports.mixin(proto,mixin)}}),ace.
 define("ace/lib/lang",["require","exports","module"],function(acequire,exports){"use strict";exports.last=function(a){return a[a.length-1]},exports.stringReverse=function(string){return string.split("").reverse().join("")},exports.stringRepeat=function(string,count){for(var result="";count>0;)1&count&&(result+=string),(count>>=1)&&(string+=string);return result};var trimBeginRegexp=/^\\s\\s*/,trimEndRegexp=/\\s\\s*$/;exports.stringTrimLeft=function(string){return string.replace(trimBeginRegexp,"")},exports.stringTrimRight=function(string){return string.replace(trimEndRegexp,"")},exports.copyObject=function(obj){var copy={};for(var key in obj)copy[key]=obj[key];return copy},exports.copyArray=function(array){for(var copy=[],i=0,l=array.length;l>i;i++)copy[i]=array[i]&&"object"==typeof array[i]?this.copyObject(array[i]):array[i];return copy},exports.deepCopy=function deepCopy(obj){if("object"!=typeof obj||!obj)return obj;var copy;if(Array.isArray(obj)){copy=[];for(var key=0;obj.length>
 key;key++)copy[key]=deepCopy(obj[key]);return copy}if("[object Object]"!==Object.prototype.toString.call(obj))return obj;copy={};for(var key in obj)copy[key]=deepCopy(obj[key]);return copy},exports.arrayToMap=function(arr){for(var map={},i=0;arr.length>i;i++)map[arr[i]]=1;return map},exports.createMap=function(props){var map=Object.create(null);for(var i in props)map[i]=props[i];return map},exports.arrayRemove=function(array,value){for(var i=0;array.length>=i;i++)value===array[i]&&array.splice(i,1)},exports.escapeRegExp=function(str){return str.replace(/([.*+?^${}()|[\\]\\/\\\\])/g,"\\\\$1")},exports.escapeHTML=function(str){return str.replace(/&/g,"&#38;").replace(/"/g,"&#34;").replace(/\'/g,"&#39;").replace(/</g,"&#60;")},exports.getMatchOffsets=function(string,regExp){var matches=[];return string.replace(regExp,function(str){matches.push({offset:arguments[arguments.length-2],length:str.length})}),matches},exports.deferredCall=function(fcn){var timer=null,callback=function(){timer
 =null,fcn()},deferred=function(timeout){return deferred.cancel(),timer=setTimeout(callback,timeout||0),deferred};return deferred.schedule=deferred,deferred.call=function(){return this.cancel(),fcn(),deferred},deferred.cancel=function(){return clearTimeout(timer),timer=null,deferred},deferred.isPending=function(){return timer},deferred},exports.delayedCall=function(fcn,defaultTimeout){var timer=null,callback=function(){timer=null,fcn()},_self=function(timeout){null==timer&&(timer=setTimeout(callback,timeout||defaultTimeout))};return _self.delay=function(timeout){timer&&clearTimeout(timer),timer=setTimeout(callback,timeout||defaultTimeout)},_self.schedule=_self,_self.call=function(){this.cancel(),fcn()},_self.cancel=function(){timer&&clearTimeout(timer),timer=null},_self.isPending=function(){return timer},_self}}),ace.define("ace/range",["require","exports","module"],function(acequire,exports){"use strict";var comparePoints=function(p1,p2){return p1.row-p2.row||p1.column-p2.column},Ra
 nge=function(startRow,startColumn,endRow,endColumn){this.start={row:startRow,column:startColumn},this.end={row:endRow,column:endColumn}};(function(){this.isEqual=function(range){return this.start.row===range.start.row&&this.end.row===range.end.row&&this.start.column===range.start.column&&this.end.column===range.end.column},this.toString=function(){return"Range: ["+this.start.row+"/"+this.start.column+"] -> ["+this.end.row+"/"+this.end.column+"]"},this.contains=function(row,column){return 0==this.compare(row,column)},this.compareRange=function(range){var cmp,end=range.end,start=range.start;return cmp=this.compare(end.row,end.column),1==cmp?(cmp=this.compare(start.row,start.column),1==cmp?2:0==cmp?1:0):-1==cmp?-2:(cmp=this.compare(start.row,start.column),-1==cmp?-1:1==cmp?42:0)},this.comparePoint=function(p){return this.compare(p.row,p.column)},this.containsRange=function(range){return 0==this.comparePoint(range.start)&&0==this.comparePoint(range.end)},this.intersects=function(range){
 var cmp=this.compareRange(range);return-1==cmp||0==cmp||1==cmp},this.isEnd=function(row,column){return this.end.row==row&&this.end.column==column},this.isStart=function(row,column){return this.start.row==row&&this.start.column==column},this.setStart=function(row,column){"object"==typeof row?(this.start.column=row.column,this.start.row=row.row):(this.start.row=row,this.start.column=column)},this.setEnd=function(row,column){"object"==typeof row?(this.end.column=row.column,this.end.row=row.row):(this.end.row=row,this.end.column=column)},this.inside=function(row,column){return 0==this.compare(row,column)?this.isEnd(row,column)||this.isStart(row,column)?!1:!0:!1},this.insideStart=function(row,column){return 0==this.compare(row,column)?this.isEnd(row,column)?!1:!0:!1},this.insideEnd=function(row,column){return 0==this.compare(row,column)?this.isStart(row,column)?!1:!0:!1},this.compare=function(row,column){return this.isMultiLine()||row!==this.start.row?this.start.row>row?-1:row>this.end.r
 ow?1:this.start.row===row?column>=this.start.column?0:-1:this.end.row===row?this.end.column>=column?0:1:0:this.start.column>column?-1:column>this.end.column?1:0},this.compareStart=function(row,column){return this.start.row==row&&this.start.column==column?-1:this.compare(row,column)},this.compareEnd=function(row,column){return this.end.row==row&&this.end.column==column?1:this.compare(row,column)},this.compareInside=function(row,column){return this.end.row==row&&this.end.column==column?1:this.start.row==row&&this.start.column==column?-1:this.compare(row,column)},this.clipRows=function(firstRow,lastRow){if(this.end.row>lastRow)var end={row:lastRow+1,column:0};else if(firstRow>this.end.row)var end={row:firstRow,column:0};if(this.start.row>lastRow)var start={row:lastRow+1,column:0};else if(firstRow>this.start.row)var start={row:firstRow,column:0};return Range.fromPoints(start||this.start,end||this.end)},this.extend=function(row,column){var cmp=this.compare(row,column);if(0==cmp)return th
 is;if(-1==cmp)var start={row:row,column:column};else var end={row:row,column:column};return Range.fromPoints(start||this.start,end||this.end)},this.isEmpty=function(){return this.start.row===this.end.row&&this.start.column===this.end.column},this.isMultiLine=function(){return this.start.row!==this.end.row},this.clone=function(){return Range.fromPoints(this.start,this.end)},this.collapseRows=function(){return 0==this.end.column?new Range(this.start.row,0,Math.max(this.start.row,this.end.row-1),0):new Range(this.start.row,0,this.end.row,0)},this.toScreenRange=function(session){var screenPosStart=session.documentToScreenPosition(this.start),screenPosEnd=session.documentToScreenPosition(this.end);return new Range(screenPosStart.row,screenPosStart.column,screenPosEnd.row,screenPosEnd.column)},this.moveBy=function(row,column){this.start.row+=row,this.start.column+=column,this.end.row+=row,this.end.column+=column}}).call(Range.prototype),Range.fromPoints=function(start,end){return new Rang
 e(start.row,start.column,end.row,end.column)},Range.comparePoints=comparePoints,Range.comparePoints=function(p1,p2){return p1.row-p2.row||p1.column-p2.column},exports.Range=Range}),ace.define("ace/apply_delta",["require","exports","module"],function(acequire,exports){"use strict";exports.applyDelta=function(docLines,delta){var row=delta.start.row,startColumn=delta.start.column,line=docLines[row]||"";switch(delta.action){case"insert":var lines=delta.lines;if(1===lines.length)docLines[row]=line.substring(0,startColumn)+delta.lines[0]+line.substring(startColumn);else{var args=[row,1].concat(delta.lines);docLines.splice.apply(docLines,args),docLines[row]=line.substring(0,startColumn)+docLines[row],docLines[row+delta.lines.length-1]+=line.substring(startColumn)}break;case"remove":var endColumn=delta.end.column,endRow=delta.end.row;row===endRow?docLines[row]=line.substring(0,startColumn)+line.substring(endColumn):docLines.splice(row,endRow-row+1,line.substring(0,startColumn)+docLines[endR
 ow].substring(endColumn))}}}),ace.define("ace/lib/event_emitter",["require","exports","module"],function(acequire,exports){"use strict";var EventEmitter={},stopPropagation=function(){this.propagationStopped=!0},preventDefault=function(){this.defaultPrevented=!0};EventEmitter._emit=EventEmitter._dispatchEvent=function(eventName,e){this._eventRegistry||(this._eventRegistry={}),this._defaultHandlers||(this._defaultHandlers={});var listeners=this._eventRegistry[eventName]||[],defaultHandler=this._defaultHandlers[eventName];if(listeners.length||defaultHandler){"object"==typeof e&&e||(e={}),e.type||(e.type=eventName),e.stopPropagation||(e.stopPropagation=stopPropagation),e.preventDefault||(e.preventDefault=preventDefault),listeners=listeners.slice();for(var i=0;listeners.length>i&&(listeners[i](e,this),!e.propagationStopped);i++);return defaultHandler&&!e.defaultPrevented?defaultHandler(e,this):void 0}},EventEmitter._signal=function(eventName,e){var listeners=(this._eventRegistry||{})[eve
 ntName];if(listeners){listeners=listeners.slice();for(var i=0;listeners.length>i;i++)listeners[i](e,this)}},EventEmitter.once=function(eventName,callback){var _self=this;callback&&this.addEventListener(eventName,function newCallback(){_self.removeEventListener(eventName,newCallback),callback.apply(null,arguments)})},EventEmitter.setDefaultHandler=function(eventName,callback){var handlers=this._defaultHandlers;if(handlers||(handlers=this._defaultHandlers={_disabled_:{}}),handlers[eventName]){var old=handlers[eventName],disabled=handlers._disabled_[eventName];disabled||(handlers._disabled_[eventName]=disabled=[]),disabled.push(old);var i=disabled.indexOf(callback);-1!=i&&disabled.splice(i,1)}handlers[eventName]=callback},EventEmitter.removeDefaultHandler=function(eventName,callback){var handlers=this._defaultHandlers;if(handlers){var disabled=handlers._disabled_[eventName];if(handlers[eventName]==callback)handlers[eventName],disabled&&this.setDefaultHandler(eventName,disabled.pop());e
 lse if(disabled){var i=disabled.indexOf(callback);-1!=i&&disabled.splice(i,1)}}},EventEmitter.on=EventEmitter.addEventListener=function(eventName,callback,capturing){this._eventRegistry=this._eventRegistry||{};var listeners=this._eventRegistry[eventName];return listeners||(listeners=this._eventRegistry[eventName]=[]),-1==listeners.indexOf(callback)&&listeners[capturing?"unshift":"push"](callback),callback},EventEmitter.off=EventEmitter.removeListener=EventEmitter.removeEventListener=function(eventName,callback){this._eventRegistry=this._eventRegistry||{};var listeners=this._eventRegistry[eventName];if(listeners){var index=listeners.indexOf(callback);-1!==index&&listeners.splice(index,1)}},EventEmitter.removeAllListeners=function(eventName){this._eventRegistry&&(this._eventRegistry[eventName]=[])},exports.EventEmitter=EventEmitter}),ace.define("ace/anchor",["require","exports","module","ace/lib/oop","ace/lib/event_emitter"],function(acequire,exports){"use strict";var oop=acequire("./
 lib/oop"),EventEmitter=acequire("./lib/event_emitter").EventEmitter,Anchor=exports.Anchor=function(doc,row,column){this.$onChange=this.onChange.bind(this),this.attach(doc),column===void 0?this.setPosition(row.row,row.column):this.setPosition(row,column)};(function(){function $pointsInOrder(point1,point2,equalPointsInOrder){var bColIsAfter=equalPointsInOrder?point1.column<=point2.column:point1.column<point2.column;return point1.row<point2.row||point1.row==point2.row&&bColIsAfter}function $getTransformedPoint(delta,point,moveIfEqual){var deltaIsInsert="insert"==delta.action,deltaRowShift=(deltaIsInsert?1:-1)*(delta.end.row-delta.start.row),deltaColShift=(deltaIsInsert?1:-1)*(delta.end.column-delta.start.column),deltaStart=delta.start,deltaEnd=deltaIsInsert?deltaStart:delta.end;return $pointsInOrder(point,deltaStart,moveIfEqual)?{row:point.row,column:point.column}:$pointsInOrder(deltaEnd,point,!moveIfEqual)?{row:point.row+deltaRowShift,column:point.column+(point.row==deltaEnd.row?delta
 ColShift:0)}:{row:deltaStart.row,column:deltaStart.column}}oop.implement(this,EventEmitter),this.getPosition=function(){return this.$clipPositionToDocument(this.row,this.column)},this.getDocument=function(){return this.document},this.$insertRight=!1,this.onChange=function(delta){if(!(delta.start.row==delta.end.row&&delta.start.row!=this.row||delta.start.row>this.row)){var point=$getTransformedPoint(delta,{row:this.row,column:this.column},this.$insertRight);this.setPosition(point.row,point.column,!0)}},this.setPosition=function(row,column,noClip){var pos;if(pos=noClip?{row:row,column:column}:this.$clipPositionToDocument(row,column),this.row!=pos.row||this.column!=pos.column){var old={row:this.row,column:this.column};this.row=pos.row,this.column=pos.column,this._signal("change",{old:old,value:pos})}},this.detach=function(){this.document.removeEventListener("change",this.$onChange)},this.attach=function(doc){this.document=doc||this.document,this.document.on("change",this.$onChange)},th
 is.$clipPositionToDocument=function(row,column){var pos={};return row>=this.document.getLength()?(pos.row=Math.max(0,this.document.getLength()-1),pos.column=this.document.getLine(pos.row).length):0>row?(pos.row=0,pos.column=0):(pos.row=row,pos.column=Math.min(this.document.getLine(pos.row).length,Math.max(0,column))),0>column&&(pos.column=0),pos}}).call(Anchor.prototype)}),ace.define("ace/document",["require","exports","module","ace/lib/oop","ace/apply_delta","ace/lib/event_emitter","ace/range","ace/anchor"],function(acequire,exports){"use strict";var oop=acequire("./lib/oop"),applyDelta=acequire("./apply_delta").applyDelta,EventEmitter=acequire("./lib/event_emitter").EventEmitter,Range=acequire("./range").Range,Anchor=acequire("./anchor").Anchor,Document=function(textOrLines){this.$lines=[""],0===textOrLines.length?this.$lines=[""]:Array.isArray(textOrLines)?this.insertMergedLines({row:0,column:0},textOrLines):this.insert({row:0,column:0},textOrLines)};(function(){oop.implement(thi
 s,EventEmitter),this.setValue=function(text){var len=this.getLength()-1;this.remove(new Range(0,0,len,this.getLine(len).length)),this.insert({row:0,column:0},text)},this.getValue=function(){return this.getAllLines().join(this.getNewLineCharacter())},this.createAnchor=function(row,column){return new Anchor(this,row,column)},this.$split=0==="aaa".split(/a/).length?function(text){return text.replace(/\\r\\n|\\r/g,"\\n").split("\\n")}:function(text){return text.split(/\\r\\n|\\r|\\n/)},this.$detectNewLine=function(text){var match=text.match(/^.*?(\\r\\n|\\r|\\n)/m);this.$autoNewLine=match?match[1]:"\\n",this._signal("changeNewLineMode")},this.getNewLineCharacter=function(){switch(this.$newLineMode){case"windows":return"\\r\\n";case"unix":return"\\n";default:return this.$autoNewLine||"\\n"}},this.$autoNewLine="",this.$newLineMode="auto",this.setNewLineMode=function(newLineMode){this.$newLineMode!==newLineMode&&(this.$newLineMode=newLineMode,this._signal("changeNewLineMode"))},this.getNew
 LineMode=function(){return this.$newLineMode},this.isNewLine=function(text){return"\\r\\n"==text||"\\r"==text||"\\n"==text},this.getLine=function(row){return this.$lines[row]||""},this.getLines=function(firstRow,lastRow){return this.$lines.slice(firstRow,lastRow+1)},this.getAllLines=function(){return this.getLines(0,this.getLength())},this.getLength=function(){return this.$lines.length},this.getTextRange=function(range){return this.getLinesForRange(range).join(this.getNewLineCharacter())},this.getLinesForRange=function(range){var lines;if(range.start.row===range.end.row)lines=[this.getLine(range.start.row).substring(range.start.column,range.end.column)];else{lines=this.getLines(range.start.row,range.end.row),lines[0]=(lines[0]||"").substring(range.start.column);var l=lines.length-1;range.end.row-range.start.row==l&&(lines[l]=lines[l].substring(0,range.end.column))}return lines},this.insertLines=function(row,lines){return console.warn("Use of document.insertLines is deprecated. Use t
 he insertFullLines method instead."),this.insertFullLines(row,lines)},this.removeLines=function(firstRow,lastRow){return console.warn("Use of document.removeLines is deprecated. Use the removeFullLines method instead."),this.removeFullLines(firstRow,lastRow)},this.insertNewLine=function(position){return console.warn("Use of document.insertNewLine is deprecated. Use insertMergedLines(position, [\'\', \'\']) instead."),this.insertMergedLines(position,["",""])},this.insert=function(position,text){return 1>=this.getLength()&&this.$detectNewLine(text),this.insertMergedLines(position,this.$split(text))},this.insertInLine=function(position,text){var start=this.clippedPos(position.row,position.column),end=this.pos(position.row,position.column+text.length);return this.applyDelta({start:start,end:end,action:"insert",lines:[text]},!0),this.clonePos(end)},this.clippedPos=function(row,column){var length=this.getLength();void 0===row?row=length:0>row?row=0:row>=length&&(row=length-1,column=void 0
 );var line=this.getLine(row);return void 0==column&&(column=line.length),column=Math.min(Math.max(column,0),line.length),{row:row,column:column}},this.clonePos=function(pos){return{row:pos.row,column:pos.column}},this.pos=function(row,column){return{row:row,column:column}},this.$clipPosition=function(position){var length=this.getLength();return position.row>=length?(position.row=Math.max(0,length-1),position.column=this.getLine(length-1).length):(position.row=Math.max(0,position.row),position.column=Math.min(Math.max(position.column,0),this.getLine(position.row).length)),position},this.insertFullLines=function(row,lines){row=Math.min(Math.max(row,0),this.getLength());var column=0;this.getLength()>row?(lines=lines.concat([""]),column=0):(lines=[""].concat(lines),row--,column=this.$lines[row].length),this.insertMergedLines({row:row,column:column},lines)},this.insertMergedLines=function(position,lines){var start=this.clippedPos(position.row,position.column),end={row:start.row+lines.len
 gth-1,column:(1==lines.length?start.column:0)+lines[lines.length-1].length};return this.applyDelta({start:start,end:end,action:"insert",lines:lines}),this.clonePos(end)},this.remove=function(range){var start=this.clippedPos(range.start.row,range.start.column),end=this.clippedPos(range.end.row,range.end.column);return this.applyDelta({start:start,end:end,action:"remove",lines:this.getLinesForRange({start:start,end:end})}),this.clonePos(start)},this.removeInLine=function(row,startColumn,endColumn){var start=this.clippedPos(row,startColumn),end=this.clippedPos(row,endColumn);return this.applyDelta({start:start,end:end,action:"remove",lines:this.getLinesForRange({start:start,end:end})},!0),this.clonePos(start)},this.removeFullLines=function(firstRow,lastRow){firstRow=Math.min(Math.max(0,firstRow),this.getLength()-1),lastRow=Math.min(Math.max(0,lastRow),this.getLength()-1);var deleteFirstNewLine=lastRow==this.getLength()-1&&firstRow>0,deleteLastNewLine=this.getLength()-1>lastRow,startRow
 =deleteFirstNewLine?firstRow-1:firstRow,startCol=deleteFirstNewLine?this.getLine(startRow).length:0,endRow=deleteLastNewLine?lastRow+1:lastRow,endCol=deleteLastNewLine?0:this.getLine(endRow).length,range=new Range(startRow,startCol,endRow,endCol),deletedLines=this.$lines.slice(firstRow,lastRow+1);return this.applyDelta({start:range.start,end:range.end,action:"remove",lines:this.getLinesForRange(range)}),deletedLines},this.removeNewLine=function(row){this.getLength()-1>row&&row>=0&&this.applyDelta({start:this.pos(row,this.getLine(row).length),end:this.pos(row+1,0),action:"remove",lines:["",""]})},this.replace=function(range,text){if(range instanceof Range||(range=Range.fromPoints(range.start,range.end)),0===text.length&&range.isEmpty())return range.start;if(text==this.getTextRange(range))return range.end;this.remove(range);var end;return end=text?this.insert(range.start,text):range.start},this.applyDeltas=function(deltas){for(var i=0;deltas.length>i;i++)this.applyDelta(deltas[i])},th
 is.revertDeltas=function(deltas){for(var i=deltas.length-1;i>=0;i--)this.revertDelta(deltas[i])},this.applyDelta=function(delta,doNotValidate){var isInsert="insert"==delta.action;(isInsert?1>=delta.lines.length&&!delta.lines[0]:!Range.comparePoints(delta.start,delta.end))||(isInsert&&delta.lines.length>2e4&&this.$splitAndapplyLargeDelta(delta,2e4),applyDelta(this.$lines,delta,doNotValidate),this._signal("change",delta))},this.$splitAndapplyLargeDelta=function(delta,MAX){for(var lines=delta.lines,l=lines.length,row=delta.start.row,column=delta.start.column,from=0,to=0;;){from=to,to+=MAX-1;var chunk=lines.slice(from,to);if(to>l){delta.lines=chunk,delta.start.row=row+from,delta.start.column=column;break}chunk.push(""),this.applyDelta({start:this.pos(row+from,column),end:this.pos(row+to,column=0),action:delta.action,lines:chunk},!0)}},this.revertDelta=function(delta){this.applyDelta({start:this.clonePos(delta.start),end:this.clonePos(delta.end),action:"insert"==delta.action?"remove":"in
 sert",lines:delta.lines.slice()})},this.indexToPosition=function(index,startRow){for(var lines=this.$lines||this.getAllLines(),newlineLength=this.getNewLineCharacter().length,i=startRow||0,l=lines.length;l>i;i++)if(index-=lines[i].length+newlineLength,0>index)return{row:i,column:index+lines[i].length+newlineLength};return{row:l-1,column:lines[l-1].length}},this.positionToIndex=function(pos,startRow){for(var lines=this.$lines||this.getAllLines(),newlineLength=this.getNewLineCharacter().length,index=0,row=Math.min(pos.row,lines.length),i=startRow||0;row>i;++i)index+=lines[i].length+newlineLength;return index+pos.column}}).call(Document.prototype),exports.Document=Document}),ace.define("ace/worker/mirror",["require","exports","module","ace/range","ace/document","ace/lib/lang"],function(acequire,exports){"use strict";acequire("../range").Range;var Document=acequire("../document").Document,lang=acequire("../lib/lang"),Mirror=exports.Mirror=function(sender){this.sender=sender;var doc=this
 .doc=new Document(""),deferredUpdate=this.deferredUpdate=lang.delayedCall(this.onUpdate.bind(this)),_self=this;sender.on("change",function(e){var data=e.data;if(data[0].start)doc.applyDeltas(data);else for(var i=0;data.length>i;i+=2){if(Array.isArray(data[i+1]))var d={action:"insert",start:data[i],lines:data[i+1]};else var d={action:"remove",start:data[i],end:data[i+1]};doc.applyDelta(d,!0)}return _self.$timeout?deferredUpdate.schedule(_self.$timeout):(_self.onUpdate(),void 0)})};(function(){this.$timeout=500,this.setTimeout=function(timeout){this.$timeout=timeout},this.setValue=function(value){this.doc.setValue(value),this.deferredUpdate.schedule(this.$timeout)},this.getValue=function(callbackId){this.sender.callback(this.doc.getValue(),callbackId)},this.onUpdate=function(){},this.isPending=function(){return this.deferredUpdate.isPending()}}).call(Mirror.prototype)}),ace.define("ace/mode/html/saxparser",["require","exports","module"],function(acequire,exports,module){module.exports
 =function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a="function"==typeof acequire&&acequire;if(!u&&a)return a(o,!0);if(i)return i(o,!0);throw Error("Cannot find module \'"+o+"\'")}var f=n[o]={exports:{}};t[o][0].call(f.exports,function(e){var n=t[o][1][e];return s(n?n:e)},f,f.exports,e,t,n,r)}return n[o].exports}for(var i="function"==typeof acequire&&acequire,o=0;r.length>o;o++)s(r[o]);return s}({1:[function(_dereq_,module,exports){function isScopeMarker(node){return"http://www.w3.org/1999/xhtml"===node.namespaceURI?"applet"===node.localName||"caption"===node.localName||"marquee"===node.localName||"object"===node.localName||"table"===node.localName||"td"===node.localName||"th"===node.localName:"http://www.w3.org/1998/Math/MathML"===node.namespaceURI?"mi"===node.localName||"mo"===node.localName||"mn"===node.localName||"ms"===node.localName||"mtext"===node.localName||"annotation-xml"===node.localName:"http://www.w3.org/2000/svg"===node.namespaceURI?"foreignObject"===node.localN
 ame||"desc"===node.localName||"title"===node.localName:void 0}function isListItemScopeMarker(node){return isScopeMarker(node)||"http://www.w3.org/1999/xhtml"===node.namespaceURI&&"ol"===node.localName||"http://www.w3.org/1999/xhtml"===node.namespaceURI&&"ul"===node.localName}function isTableScopeMarker(node){return"http://www.w3.org/1999/xhtml"===node.namespaceURI&&"table"===node.localName||"http://www.w3.org/1999/xhtml"===node.namespaceURI&&"html"===node.localName}function isTableBodyScopeMarker(node){return"http://www.w3.org/1999/xhtml"===node.namespaceURI&&"tbody"===node.localName||"http://www.w3.org/1999/xhtml"===node.namespaceURI&&"tfoot"===node.localName||"http://www.w3.org/1999/xhtml"===node.namespaceURI&&"thead"===node.localName||"http://www.w3.org/1999/xhtml"===node.namespaceURI&&"html"===node.localName}function isTableRowScopeMarker(node){return"http://www.w3.org/1999/xhtml"===node.namespaceURI&&"tr"===node.localName||"http://www.w3.org/1999/xhtml"===node.namespaceURI&&"ht
 ml"===node.localName}function isButtonScopeMarker(node){return isScopeMarker(node)||"http://www.w3.org/1999/xhtml"===node.namespaceURI&&"button"===node.localName}function isSelectScopeMarker(node){return!("http://www.w3.org/1999/xhtml"===node.namespaceURI&&"optgroup"===node.localName||"http://www.w3.org/1999/xhtml"===node.namespaceURI&&"option"===node.localName)}function ElementStack(){this.elements=[],this.rootNode=null,this.headElement=null,this.bodyElement=null}ElementStack.prototype._inScope=function(localName,isMarker){for(var i=this.elements.length-1;i>=0;i--){var node=this.elements[i];if(node.localName===localName)return!0;if(isMarker(node))return!1}},ElementStack.prototype.push=function(item){this.elements.push(item)},ElementStack.prototype.pushHtmlElement=function(item){this.rootNode=item.node,this.push(item)},ElementStack.prototype.pushHeadElement=function(item){this.headElement=item.node,this.push(item)},ElementStack.prototype.pushBodyElement=function(item){this.bodyEleme
 nt=item.node,this.push(item)},ElementStack.prototype.pop=function(){return this.elements.pop()},ElementStack.prototype.remove=function(item){this.elements.splice(this.elements.indexOf(item),1)},ElementStack.prototype.popUntilPopped=function(localName){var element;do element=this.pop();while(element.localName!=localName)},ElementStack.prototype.popUntilTableScopeMarker=function(){for(;!isTableScopeMarker(this.top);)this.pop()},ElementStack.prototype.popUntilTableBodyScopeMarker=function(){for(;!isTableBodyScopeMarker(this.top);)this.pop()},ElementStack.prototype.popUntilTableRowScopeMarker=function(){for(;!isTableRowScopeMarker(this.top);)this.pop()},ElementStack.prototype.item=function(index){return this.elements[index]},ElementStack.prototype.contains=function(element){return-1!==this.elements.indexOf(element)},ElementStack.prototype.inScope=function(localName){return this._inScope(localName,isScopeMarker)},ElementStack.prototype.inListItemScope=function(localName){return this._inS
 cope(localName,isListItemScopeMarker)},ElementStack.prototype.inTableScope=function(localName){return this._inScope(localName,isTableScopeMarker)},ElementStack.prototype.inButtonScope=function(localName){return this._inScope(localName,isButtonScopeMarker)},ElementStack.prototype.inSelectScope=function(localName){return this._inScope(localName,isSelectScopeMarker)},ElementStack.prototype.hasNumberedHeaderElementInScope=function(){for(var i=this.elements.length-1;i>=0;i--){var node=this.elements[i];if(node.isNumberedHeader())return!0;if(isScopeMarker(node))return!1}},ElementStack.prototype.furthestBlockForFormattingElement=function(element){for(var furthestBlock=null,i=this.elements.length-1;i>=0;i--){var node=this.elements[i];\nif(node.node===element)break;node.isSpecial()&&(furthestBlock=node)}return furthestBlock},ElementStack.prototype.findIndex=function(localName){for(var i=this.elements.length-1;i>=0;i--)if(this.elements[i].localName==localName)return i;return-1},ElementStack.pr
 ototype.remove_openElements_until=function(callback){for(var element,finished=!1;!finished;)element=this.elements.pop(),finished=callback(element);return element},Object.defineProperty(ElementStack.prototype,"top",{get:function(){return this.elements[this.elements.length-1]}}),Object.defineProperty(ElementStack.prototype,"length",{get:function(){return this.elements.length}}),exports.ElementStack=ElementStack},{}],2:[function(_dereq_,module,exports){function isAlphaNumeric(c){return c>="0"&&"9">=c||c>="a"&&"z">=c||c>="A"&&"Z">=c}function isHexDigit(c){return c>="0"&&"9">=c||c>="a"&&"f">=c||c>="A"&&"F">=c}function isDecimalDigit(c){return c>="0"&&"9">=c}var entities=_dereq_("html5-entities"),InputStream=_dereq_("./InputStream").InputStream,namedEntityPrefixes={};Object.keys(entities).forEach(function(entityKey){for(var i=0;entityKey.length>i;i++)namedEntityPrefixes[entityKey.substring(0,i+1)]=!0});var EntityParser={};EntityParser.consumeEntity=function(buffer,tokenizer,additionalAllo
 wedCharacter){var decodedCharacter="",consumedCharacters="",ch=buffer.char();if(ch===InputStream.EOF)return!1;if(consumedCharacters+=ch,"\t"==ch||"\\n"==ch||"\v"==ch||" "==ch||"<"==ch||"&"==ch)return buffer.unget(consumedCharacters),!1;if(additionalAllowedCharacter===ch)return buffer.unget(consumedCharacters),!1;if("#"==ch){if(ch=buffer.shift(1),ch===InputStream.EOF)return tokenizer._parseError("expected-numeric-entity-but-got-eof"),buffer.unget(consumedCharacters),!1;consumedCharacters+=ch;var radix=10,isDigit=isDecimalDigit;if("x"==ch||"X"==ch){if(radix=16,isDigit=isHexDigit,ch=buffer.shift(1),ch===InputStream.EOF)return tokenizer._parseError("expected-numeric-entity-but-got-eof"),buffer.unget(consumedCharacters),!1;consumedCharacters+=ch}if(isDigit(ch)){for(var code="";ch!==InputStream.EOF&&isDigit(ch);)code+=ch,ch=buffer.char();code=parseInt(code,radix);var replacement=this.replaceEntityNumbers(code);if(replacement&&(tokenizer._parseError("invalid-numeric-entity-replaced"),code=
 replacement),code>65535&&1114111>=code){code-=65536;var first=((1047552&code)>>10)+55296,second=(1023&code)+56320;decodedCharacter=String.fromCharCode(first,second)}else decodedCharacter=String.fromCharCode(code);return";"!==ch&&(tokenizer._parseError("numeric-entity-without-semicolon"),buffer.unget(ch)),decodedCharacter}return buffer.unget(consumedCharacters),tokenizer._parseError("expected-numeric-entity"),!1}if(ch>="a"&&"z">=ch||ch>="A"&&"Z">=ch){for(var mostRecentMatch="";namedEntityPrefixes[consumedCharacters]&&(entities[consumedCharacters]&&(mostRecentMatch=consumedCharacters),";"!=ch)&&(ch=buffer.char(),ch!==InputStream.EOF);)consumedCharacters+=ch;return mostRecentMatch?(decodedCharacter=entities[mostRecentMatch],";"===ch||!additionalAllowedCharacter||!isAlphaNumeric(ch)&&"="!==ch?(consumedCharacters.length>mostRecentMatch.length&&buffer.unget(consumedCharacters.substring(mostRecentMatch.length)),";"!==ch&&tokenizer._parseError("named-entity-without-semicolon"),decodedCharac
 ter):(buffer.unget(consumedCharacters),!1)):(tokenizer._parseError("expected-named-entity"),buffer.unget(consumedCharacters),!1)}},EntityParser.replaceEntityNumbers=function(c){switch(c){case 0:return 65533;case 19:return 16;case 128:return 8364;case 129:return 129;case 130:return 8218;case 131:return 402;case 132:return 8222;case 133:return 8230;case 134:return 8224;case 135:return 8225;case 136:return 710;case 137:return 8240;case 138:return 352;case 139:return 8249;case 140:return 338;case 141:return 141;case 142:return 381;case 143:return 143;case 144:return 144;case 145:return 8216;case 146:return 8217;case 147:return 8220;case 148:return 8221;case 149:return 8226;case 150:return 8211;case 151:return 8212;case 152:return 732;case 153:return 8482;case 154:return 353;case 155:return 8250;case 156:return 339;case 157:return 157;case 158:return 382;case 159:return 376;default:if(c>=55296&&57343>=c||c>1114111)return 65533;if(c>=1&&8>=c||c>=14&&31>=c||c>=127&&159>=c||c>=64976&&65007>
 =c||11==c||65534==c||131070==c||3145726==c||196607==c||262142==c||262143==c||327678==c||327679==c||393214==c||393215==c||458750==c||458751==c||524286==c||524287==c||589822==c||589823==c||655358==c||655359==c||720894==c||720895==c||786430==c||786431==c||851966==c||851967==c||917502==c||917503==c||983038==c||983039==c||1048574==c||1048575==c||1114110==c||1114111==c)return c}},exports.EntityParser=EntityParser},{"./InputStream":3,"html5-entities":12}],3:[function(_dereq_,module,exports){function InputStream(){this.data="",this.start=0,this.committed=0,this.eof=!1,this.lastLocation={line:0,column:0}}InputStream.EOF=-1,InputStream.DRAIN=-2,InputStream.prototype={slice:function(){if(this.start>=this.data.length){if(!this.eof)throw InputStream.DRAIN;return InputStream.EOF}return this.data.slice(this.start,this.data.length)},"char":function(){if(!this.eof&&this.start>=this.data.length-1)throw InputStream.DRAIN;if(this.start>=this.data.length)return InputStream.EOF;var ch=this.data[this.star
 t++];return"\\r"===ch&&(ch="\\n"),ch},advance:function(amount){if(this.start+=amount,this.start>=this.data.length){if(!this.eof)throw InputStream.DRAIN;return InputStream.EOF}this.committed>this.data.length/2&&(this.lastLocation=this.location(),this.data=this.data.slice(this.committed),this.start=this.start-this.committed,this.committed=0)},matchWhile:function(re){if(this.eof&&this.start>=this.data.length)return"";var r=RegExp("^"+re+"+"),m=r.exec(this.slice());if(m){if(!this.eof&&m[0].length==this.data.length-this.start)throw InputStream.DRAIN;return this.advance(m[0].length),m[0]}return""},matchUntil:function(re){var m,s;if(s=this.slice(),s===InputStream.EOF)return"";if(m=RegExp(re+(this.eof?"|$":"")).exec(s)){var t=this.data.slice(this.start,this.start+m.index);return this.advance(m.index),t.replace(/\\r/g,"\\n").replace(/\\n{2,}/g,"\\n")}throw InputStream.DRAIN},append:function(data){this.data+=data},shift:function(n){if(!this.eof&&this.start+n>=this.data.length)throw InputStrea
 m.DRAIN;if(this.eof&&this.start>=this.data.length)return InputStream.EOF;var d=""+this.data.slice(this.start,this.start+n);return this.advance(Math.min(n,this.data.length-this.start)),d},peek:function(n){if(!this.eof&&this.start+n>=this.data.length)throw InputStream.DRAIN;return this.eof&&this.start>=this.data.length?InputStream.EOF:""+this.data.slice(this.start,Math.min(this.start+n,this.data.length))},length:function(){return this.data.length-this.start-1},unget:function(d){d!==InputStream.EOF&&(this.start-=d.length)},undo:function(){this.start=this.committed},commit:function(){this.committed=this.start},location:function(){var lastLine=this.lastLocation.line,lastColumn=this.lastLocation.column,read=this.data.slice(0,this.committed),newlines=read.match(/\\n/g),line=newlines?lastLine+newlines.length:lastLine,column=newlines?read.length-read.lastIndexOf("\\n")-1:lastColumn+read.length;return{line:line,column:column}}},exports.InputStream=InputStream},{}],4:[function(_dereq_,module,e
 xports){function StackItem(namespaceURI,localName,attributes,node){this.localName=localName,this.namespaceURI=namespaceURI,this.attributes=attributes,this.node=node}function getAttribute(item,name){for(var i=0;item.attributes.length>i;i++)if(item.attributes[i].nodeName==name)return item.attributes[i].nodeValue;return null}var SpecialElements={"http://www.w3.org/1999/xhtml":["address","applet","area","article","aside","base","basefont","bgsound","blockquote","body","br","button","caption","center","col","colgroup","dd","details","dir","div","dl","dt","embed","fieldset","figcaption","figure","footer","form","frame","frameset","h1","h2","h3","h4","h5","h6","head","header","hgroup","hr","html","iframe","img","input","isindex","li","link","listing","main","marquee","menu","menuitem","meta","nav","noembed","noframes","noscript","object","ol","p","param","plaintext","pre","script","section","select","source","style","summary","table","tbody","td","textarea","tfoot","th","thead","title","tr
 ","track","ul","wbr","xmp"],"http://www.w3.org/1998/Math/MathML":["mi","mo","mn","ms","mtext","annotation-xml"],"http://www.w3.org/2000/svg":["foreignObject","desc","title"]};StackItem.prototype.isSpecial=function(){return this.namespaceURI in SpecialElements&&SpecialElements[this.namespaceURI].indexOf(this.localName)>-1},StackItem.prototype.isFosterParenting=function(){return"http://www.w3.org/1999/xhtml"===this.namespaceURI?"table"===this.localName||"tbody"===this.localName||"tfoot"===this.localName||"thead"===this.localName||"tr"===this.localName:!1},StackItem.prototype.isNumberedHeader=function(){return"http://www.w3.org/1999/xhtml"===this.namespaceURI?"h1"===this.localName||"h2"===this.localName||"h3"===this.localName||"h4"===this.localName||"h5"===this.localName||"h6"===this.localName:!1},StackItem.prototype.isForeign=function(){return"http://www.w3.org/1999/xhtml"!=this.namespaceURI},StackItem.prototype.isHtmlIntegrationPoint=function(){if("http://www.w3.org/1998/Math/MathML"
 ===this.namespaceURI){if("annotation-xml"!==this.localName)return!1;var encoding=getAttribute(this,"encoding");return encoding?(encoding=encoding.toLowerCase(),"text/html"===encoding||"application/xhtml+xml"===encoding):!1}return"http://www.w3.org/2000/svg"===this.namespaceURI?"foreignObject"===this.localName||"desc"===this.localName||"title"===this.localName:!1},StackItem.prototype.isMathMLTextIntegrationPoint=function(){return"http://www.w3.org/1998/Math/MathML"===this.namespaceURI?"mi"===this.localName||"mo"===this.localName||"mn"===this.localName||"ms"===this.localName||"mtext"===this.localName:!1},exports.StackItem=StackItem},{}],5:[function(_dereq_,module,exports){function isWhitespace(c){return" "===c||"\\n"===c||"\t"===c||"\\r"===c||"\\f"===c}function isAlpha(c){return c>="A"&&"Z">=c||c>="a"&&"z">=c}function Tokenizer(tokenHandler){this._tokenHandler=tokenHandler,this._state=Tokenizer.DATA,this._inputStream=new InputStream,this._currentToken=null,this._temporaryBuffer="",thi
 s._additionalAllowedCharacter=""}var InputStream=_dereq_("./InputStream").InputStream,EntityParser=_dereq_("./EntityParser").EntityParser;Tokenizer.prototype._parseError=function(code,args){this._tokenHandler.parseError(code,args)},Tokenizer.prototype._emitToken=function(token){if("StartTag"===token.type)for(var i=1;token.data.length>i;i++)token.data[i].nodeName||token.data.splice(i--,1);else"EndTag"===token.type&&(token.selfClosing&&this._parseError("self-closing-flag-on-end-tag"),0!==token.data.length&&this._parseError("attributes-in-end-tag"));this._tokenHandler.processToken(token),"StartTag"===token.type&&token.selfClosing&&!this._tokenHandler.isSelfClosingFlagAcknowledged()&&this._parseError("non-void-element-with-trailing-solidus",{name:token.name})},Tokenizer.prototype._emitCurrentToken=function(){this._state=Tokenizer.DATA,this._emitToken(this._currentToken)},Tokenizer.prototype._currentAttribute=function(){return this._currentToken.data[this._currentToken.data.length-1]},To
 kenizer.prototype.setState=function(state){this._state=state},Tokenizer.prototype.tokenize=function(source){function data_state(buffer){var data=buffer.char();if(data===InputStream.EOF)return tokenizer._emitToken({type:"EOF",data:null}),!1;if("&"===data)tokenizer.setState(character_reference_in_data_state);else if("<"===data)tokenizer.setState(tag_open_state);else if("\\0"===data)tokenizer._emitToken({type:"Characters",data:data}),buffer.commit();else{var chars=buffer.matchUntil("&|<|\\0");tokenizer._emitToken({type:"Characters",data:data+chars}),buffer.commit()}return!0}function character_reference_in_data_state(buffer){var character=EntityParser.consumeEntity(buffer,tokenizer);return tokenizer.setState(data_state),tokenizer._emitToken({type:"Characters",data:character||"&"}),!0}function rcdata_state(buffer){var data=buffer.char();if(data===InputStream.EOF)return tokenizer._emitToken({type:"EOF",data:null}),!1;if("&"===data)tokenizer.setState(character_reference_in_rcdata_state);el
 se if("<"===data)tokenizer.setState(rcdata_less_than_sign_state);else if("\\0"===data)tokenizer._parseError("invalid-codepoint"),tokenizer._emitToken({type:"Characters",data:"\ufffd"}),buffer.commit();else{var chars=buffer.matchUntil("&|<|\\0");tokenizer._emitToken({type:"Characters",data:data+chars}),buffer.commit()}return!0}function character_reference_in_rcdata_state(buffer){var character=EntityParser.consumeEntity(buffer,tokenizer);return tokenizer.setState(rcdata_state),tokenizer._emitToken({type:"Characters",data:character||"&"}),!0}function rawtext_state(buffer){var data=buffer.char();if(data===InputStream.EOF)return tokenizer._emitToken({type:"EOF",data:null}),!1;if("<"===data)tokenizer.setState(rawtext_less_than_sign_state);else if("\\0"===data)tokenizer._parseError("invalid-codepoint"),tokenizer._emitToken({type:"Characters",data:"\ufffd"}),buffer.commit();else{var chars=buffer.matchUntil("<|\\0");tokenizer._emitToken({type:"Characters",data:data+chars})}return!0}function 
 plaintext_state(buffer){var data=buffer.char();if(data===InputStream.EOF)return tokenizer._emitToken({type:"EOF",data:null}),!1;if("\\0"===data)tokenizer._parseError("invalid-codepoint"),tokenizer._emitToken({type:"Characters",data:"\ufffd"}),buffer.commit();else{var chars=buffer.matchUntil("\\0");tokenizer._emitToken({type:"Characters",data:data+chars})}return!0}function script_data_state(buffer){var data=buffer.char();if(data===InputStream.EOF)return tokenizer._emitToken({type:"EOF",data:null}),!1;if("<"===data)tokenizer.setState(script_data_less_than_sign_state);else if("\\0"===data)tokenizer._parseError("invalid-codepoint"),tokenizer._emitToken({type:"Characters",data:"\ufffd"}),buffer.commit();else{var chars=buffer.matchUntil("<|\\0");tokenizer._emitToken({type:"Characters",data:data+chars})}return!0}function rcdata_less_than_sign_state(buffer){var data=buffer.char();return"/"===data?(this._temporaryBuffer="",tokenizer.setState(rcdata_end_tag_open_state)):(tokenizer._emitToken(
 {type:"Characters",data:"<"}),buffer.unget(data),tokenizer.setState(rcdata_state)),!0}function rcdata_end_tag_open_state(buffer){var data=buffer.char();return isAlpha(data)?(this._temporaryBuffer+=data,tokenizer.setState(rcdata_end_tag_name_state)):(tokenizer._emitToken({type:"Characters",data:"</"}),buffer.unget(data),tokenizer.setState(rcdata_state)),!0}function rcdata_end_tag_name_state(buffer){var appropriate=tokenizer._currentToken&&tokenizer._currentToken.name===this._temporaryBuffer.toLowerCase(),data=buffer.char();return isWhitespace(data)&&appropriate?(tokenizer._currentToken={type:"EndTag",name:this._temporaryBuffer,data:[],selfClosing:!1},tokenizer.setState(before_attribute_name_state)):"/"===data&&appropriate?(tokenizer._currentToken={type:"EndTag",name:this._temporaryBuffer,data:[],selfClosing:!1},tokenizer.setState(self_closing_tag_state)):">"===data&&appropriate?(tokenizer._currentToken={type:"EndTag",name:this._temporaryBuffer,data:[],selfClosing:!1},tokenizer._emitC
 urrentToken(),tokenizer.setState(data_state)):isAlpha(data)?(this._temporaryBuffer+=data,buffer.commit()):(tokenizer._emitToken({type:"Characters",data:"</"+this._temporaryBuffer}),buffer.unget(data),tokenizer.setState(rcdata_state)),!0}function rawtext_less_than_sign_state(buffer){var data=buffer.char();return"/"===data?(this._temporaryBuffer="",tokenizer.setState(rawtext_end_tag_open_state)):(tokenizer._emitToken({type:"Characters",data:"<"}),buffer.unget(data),tokenizer.setState(rawtext_state)),!0}function rawtext_end_tag_open_state(buffer){var data=buffer.char();return isAlpha(data)?(this._temporaryBuffer+=data,tokenizer.setState(rawtext_end_tag_name_state)):(tokenizer._emitToken({type:"Characters",data:"</"}),buffer.unget(data),tokenizer.setState(rawtext_state)),!0}function rawtext_end_tag_name_state(buffer){var appropriate=tokenizer._currentToken&&tokenizer._currentToken.name===this._temporaryBuffer.toLowerCase(),data=buffer.char();return isWhitespace(data)&&appropriate?(token
 izer._currentToken={type:"EndTag",name:this._temporaryBuffer,data:[],selfClosing:!1},tokenizer.setState(before_attribute_name_state)):"/"===data&&appropriate?(tokenizer._currentToken={type:"EndTag",name:this._temporaryBuffer,data:[],selfClosing:!1},tokenizer.setState(self_closing_tag_state)):">"===data&&appropriate?(tokenizer._currentToken={type:"EndTag",name:this._temporaryBuffer,data:[],selfClosing:!1},tokenizer._emitCurrentToken(),tokenizer.setState(data_state)):isAlpha(data)?(this._temporaryBuffer+=data,buffer.commit()):(tokenizer._emitToken({type:"Characters",data:"</"+this._temporaryBuffer}),buffer.unget(data),tokenizer.setState(rawtext_state)),!0}function script_data_less_than_sign_state(buffer){var data=buffer.char();return"/"===data?(this._temporaryBuffer="",tokenizer.setState(script_data_end_tag_open_state)):"!"===data?(tokenizer._emitToken({type:"Characters",data:"<!"}),tokenizer.setState(script_data_escape_start_state)):(tokenizer._emitToken({type:"Characters",data:"<"})
 ,buffer.unget(data),tokenizer.setState(script_data_state)),!0}function script_data_end_tag_open_state(buffer){var data=buffer.char();return isAlpha(data)?(this._temporaryBuffer+=data,tokenizer.setState(script_data_end_tag_name_state)):(tokenizer._emitToken({type:"Characters",data:"</"}),buffer.unget(data),tokenizer.setState(script_data_state)),!0}function script_data_end_tag_name_state(buffer){var appropriate=tokenizer._currentToken&&tokenizer._currentToken.name===this._temporaryBuffer.toLowerCase(),data=buffer.char();return isWhitespace(data)&&appropriate?(tokenizer._currentToken={type:"EndTag",name:"script",data:[],selfClosing:!1},tokenizer.setState(before_attribute_name_state)):"/"===data&&appropriate?(tokenizer._currentToken={type:"EndTag",name:"script",data:[],selfClosing:!1},tokenizer.setState(self_closing_tag_state)):">"===data&&appropriate?(tokenizer._currentToken={type:"EndTag",name:"script",data:[],selfClosing:!1},tokenizer._emitCurrentToken()):isAlpha(data)?(this._tempora
 ryBuffer+=data,buffer.commit()):(tokenizer._emitToken({type:"Characters",data:"</"+this._temporaryBuffer}),buffer.unget(data),tokenizer.setState(script_data_state)),!0}function script_data_escape_start_state(buffer){var data=buffer.char();return"-"===data?(tokenizer._emitToken({type:"Characters",data:"-"}),tokenizer.setState(script_data_escape_start_dash_state)):(buffer.unget(data),tokenizer.setState(script_data_state)),!0}function script_data_escape_start_dash_state(buffer){var data=buffer.char();return"-"===data?(tokenizer._emitToken({type:"Characters",data:"-"}),tokenizer.setState(script_data_escaped_dash_dash_state)):(buffer.unget(data),tokenizer.setState(script_data_state)),!0}function script_data_escaped_state(buffer){var data=buffer.char();if(data===InputStream.EOF)buffer.unget(data),tokenizer.setState(data_state);else if("-"===data)tokenizer._emitToken({type:"Characters",data:"-"}),tokenizer.setState(script_data_escaped_dash_state);else if("<"===data)tokenizer.setState(scrip
 t_data_escaped_less_then_sign_state);else if("\\0"===data)tokenizer._parseError("invalid-codepoint"),tokenizer._emitToken({type:"Characters",data:"\ufffd"}),buffer.commit();else{var chars=buffer.matchUntil("<|-|\\0");tokenizer._emitToken({type:"Characters",data:data+chars})}return!0}function script_data_escaped_dash_state(buffer){var data=buffer.char();return data===InputStream.EOF?(buffer.unget(data),tokenizer.setState(data_state)):"-"===data?(tokenizer._emitToken({type:"Characters",data:"-"}),tokenizer.setState(script_data_escaped_dash_dash_state)):"<"===data?tokenizer.setState(script_data_escaped_less_then_sign_state):"\\0"===data?(tokenizer._parseError("invalid-codepoint"),tokenizer._emitToken({type:"Characters",data:"\ufffd"}),tokenizer.setState(script_data_escaped_state)):(tokenizer._emitToken({type:"Characters",data:data}),tokenizer.setState(script_data_escaped_state)),!0}function script_data_escaped_dash_dash_state(buffer){var data=buffer.char();return data===InputStream.EOF
 ?(tokenizer._parseError("eof-in-script"),buffer.unget(data),tokenizer.setState(data_state)):"<"===data?tokenizer.setState(script_data_escaped_less_then_sign_state):">"===data?(tokenizer._emitToken({type:"Characters",data:">"}),tokenizer.setState(script_data_state)):"\\0"===data?(tokenizer._parseError("invalid-codepoint"),tokenizer._emitToken({type:"Characters",data:"\ufffd"}),tokenizer.setState(script_data_escaped_state)):(tokenizer._emitToken({type:"Characters",data:data}),tokenizer.setState(script_data_escaped_state)),!0}function script_data_escaped_less_then_sign_state(buffer){var data=buffer.char();return"/"===data?(this._temporaryBuffer="",tokenizer.setState(script_data_escaped_end_tag_open_state)):isAlpha(data)?(tokenizer._emitToken({type:"Characters",data:"<"+data}),this._temporaryBuffer=data,tokenizer.setState(script_data_double_escape_start_state)):(tokenizer._emitToken({type:"Characters",data:"<"}),buffer.unget(data),tokenizer.setState(script_data_escaped_state)),!0}functi
 on script_data_escaped_end_tag_open_state(buffer){var data=buffer.char();return isAlpha(data)?(this._temporaryBuffer=data,tokenizer.setState(script_data_escaped_end_tag_name_state)):(tokenizer._emitToken({type:"Characters",data:"</"}),buffer.unget(data),tokenizer.setState(script_data_escaped_state)),!0}function script_data_escaped_end_tag_name_state(buffer){var appropriate=tokenizer._currentToken&&tokenizer._currentToken.name===this._temporaryBuffer.toLowerCase(),data=buffer.char();return isWhitespace(data)&&appropriate?(tokenizer._currentToken={type:"EndTag",name:"script",data:[],selfClosing:!1},tokenizer.setState(before_attribute_name_state)):"/"===data&&appropriate?(tokenizer._currentToken={type:"EndTag",name:"script",data:[],selfClosing:!1},tokenizer.setState(self_closing_tag_state)):">"===data&&appropriate?(tokenizer._currentToken={type:"EndTag",name:"script",data:[],selfClosing:!1},tokenizer.setState(data_state),tokenizer._emitCurrentToken()):isAlpha(data)?(this._temporaryBuff
 er+=data,buffer.commit()):(tokenizer._emitToken({type:"Characters",data:"</"+this._temporaryBuffer}),buffer.unget(data),tokenizer.setState(script_data_escaped_state)),!0}function script_data_double_escape_start_state(buffer){var data=buffer.char();return isWhitespace(data)||"/"===data||">"===data?(tokenizer._emitToken({type:"Characters",data:data}),"script"===this._temporaryBuffer.toLowerCase()?tokenizer.setState(script_data_double_escaped_state):tokenizer.setState(script_data_escaped_state)):isAlpha(data)?(tokenizer._emitToken({type:"Characters",data:data}),this._temporaryBuffer+=data,buffer.commit()):(buffer.unget(data),tokenizer.setState(script_data_escaped_state)),!0}function script_data_double_escaped_state(buffer){var data=buffer.char();return data===InputStream.EOF?(tokenizer._parseError("eof-in-script"),buffer.unget(data),tokenizer.setState(data_state)):"-"===data?(tokenizer._emitToken({type:"Characters",data:"-"}),tokenizer.setState(script_data_double_escaped_dash_state)):"
 <"===data?(tokenizer._emitToken({type:"Characters",data:"<"}),tokenizer.setState(script_data_double_escaped_less_than_sign_state)):"\\0"===data?(tokenizer._parseError("invalid-codepoint"),tokenizer._emitToken({type:"Characters",data:"\ufffd"}),buffer.commit()):(tokenizer._emitToken({type:"Characters",data:data}),buffer.commit()),!0}function script_data_double_escaped_dash_state(buffer){var data=buffer.char();return data===InputStream.EOF?(tokenizer._parseError("eof-in-script"),buffer.unget(data),tokenizer.setState(data_state)):"-"===data?(tokenizer._emitToken({type:"Characters",data:"-"}),tokenizer.setState(script_data_double_escaped_dash_dash_state)):"<"===data?(tokenizer._emitToken({type:"Characters",data:"<"}),tokenizer.setState(script_data_double_escaped_less_than_sign_state)):"\\0"===data?(tokenizer._parseError("invalid-codepoint"),tokenizer._emitToken({type:"Characters",data:"\ufffd"}),tokenizer.setState(script_data_double_escaped_state)):(tokenizer._emitToken({type:"Character
 s",data:data}),tokenizer.setState(script_data_double_escaped_state)),!0}function script_data_double_escaped_dash_dash_state(buffer){var data=buffer.char();return data===InputStream.EOF?(tokenizer._parseError("eof-in-script"),buffer.unget(data),tokenizer.setState(data_state)):"-"===data?(tokenizer._emitToken({type:"Characters",data:"-"}),buffer.commit()):"<"===data?(tokenizer._emitToken({type:"Characters",data:"<"}),tokenizer.setState(script_data_double_escaped_less_than_sign_state)):">"===data?(tokenizer._emitToken({type:"Characters",data:">"}),tokenizer.setState(script_data_state)):"\\0"===data?(tokenizer._parseError("invalid-codepoint"),tokenizer._emitToken({type:"Characters",data:"\ufffd"}),tokenizer.setState(script_data_double_escaped_state)):(tokenizer._emitToken({type:"Characters",data:data}),tokenizer.setState(script_data_double_escaped_state)),!0}function script_data_double_escaped_less_than_sign_state(buffer){var data=buffer.char();return"/"===data?(tokenizer._emitToken({ty
 pe:"Characters",data:"/"}),this._temporaryBuffer="",tokenizer.setState(script_data_double_escape_end_state)):(buffer.unget(data),tokenizer.setState(script_data_double_escaped_state)),!0}function script_data_double_escape_end_state(buffer){var data=buffer.char();return isWhitespace(data)||"/"===data||">"===data?(tokenizer._emitToken({type:"Characters",data:data}),"script"===this._temporaryBuffer.toLowerCase()?tokenizer.setState(script_data_escaped_state):tokenizer.setState(script_data_double_escaped_state)):isAlpha(data)?(tokenizer._emitToken({type:"Characters",data:data}),this._temporaryBuffer+=data,buffer.commit()):(buffer.unget(data),tokenizer.setState(script_data_double_escaped_state)),!0}function tag_open_state(buffer){var data=buffer.char();return data===InputStream.EOF?(tokenizer._parseError("bare-less-than-sign-at-eof"),tokenizer._emitToken({type:"Characters",data:"<"}),buffer.unget(data),tokenizer.setState(data_state)):isAlpha(data)?(tokenizer._currentToken={type:"StartTag",
 name:data.toLowerCase(),data:[]},tokenizer.setState(tag_name_state)):"!"===data?tokenizer.setState(markup_declaration_open_state):"/"===data?tokenizer.setState(close_tag_open_state):">"===data?(tokenizer._parseError("expected-tag-name-but-got-right-bracket"),tokenizer._emitToken({type:"Characters",data:"<>"}),tokenizer.setState(data_state)):"?"===data?(tokenizer._parseError("expected-tag-name-but-got-question-mark"),buffer.unget(data),tokenizer.setState(bogus_comment_state)):(tokenizer._parseError("expected-tag-name"),tokenizer._emitToken({type:"Characters",data:"<"}),buffer.unget(data),tokenizer.setState(data_state)),!0}function close_tag_open_state(buffer){var data=buffer.char();return data===InputStream.EOF?(tokenizer._parseError("expected-closing-tag-but-got-eof"),tokenizer._emitToken({type:"Characters",data:"</"}),buffer.unget(data),tokenizer.setState(data_state)):isAlpha(data)?(tokenizer._currentToken={type:"EndTag",name:data.toLowerCase(),data:[]},tokenizer.setState(tag_name_
 state)):">"===data?(tokenizer._parseError("expected-closing-tag-but-got-right-bracket"),tokenizer.setState(data_state)):(tokenizer._parseError("expected-closing-tag-but-got-char",{data:data}),buffer.unget(data),tokenizer.setState(bogus_comment_state)),!0}function tag_name_state(buffer){var data=buffer.char();return data===InputStream.EOF?(tokenizer._parseError("eof-in-tag-name"),buffer.unget(data),tokenizer.setState(data_state)):isWhitespace(data)?tokenizer.setState(before_attribute_name_state):isAlpha(data)?tokenizer._currentToken.name+=data.toLowerCase():">"===data?tokenizer._emitCurrentToken():"/"===data?tokenizer.setState(self_closing_tag_state):"\\0"===data?(tokenizer._parseError("invalid-codepoint"),tokenizer._currentToken.name+="\ufffd"):tokenizer._currentToken.name+=data,buffer.commit(),!0}function before_attribute_name_state(buffer){var data=buffer.char();if(data===InputStream.EOF)tokenizer._parseError("expected-attribute-name-but-got-eof"),buffer.unget(data),tokenizer.setS
 tate(data_state);else{if(isWhitespace(data))return!0;isAlpha(data)?(tokenizer._currentToken.data.push({nodeName:data.toLower

<TRUNCATED>

[31/49] knox git commit: KNOX-1132 - Address Coverity Defects in gateway-service-remoteconfig (Phil Zampino via lmccay)

Posted by mo...@apache.org.
KNOX-1132 - Address Coverity Defects in gateway-service-remoteconfig (Phil Zampino via lmccay)

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/bfb556c9
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/bfb556c9
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/bfb556c9

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: bfb556c91c8722d42db1895a2a2e71fa7466baea
Parents: 19362b9
Author: Larry McCay <lm...@hortonworks.com>
Authored: Wed Dec 6 11:29:09 2017 -0500
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Wed Dec 6 11:29:28 2017 -0500

----------------------------------------------------------------------
 .../config/remote/zk/CuratorClientService.java  |  7 +++---
 .../RemoteConfigurationRegistryJAASConfig.java  | 24 ++++++++++++++------
 2 files changed, 21 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/bfb556c9/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/CuratorClientService.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/CuratorClientService.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/CuratorClientService.java
index 0000f48..f9b5ab3 100644
--- a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/CuratorClientService.java
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/CuratorClientService.java
@@ -366,9 +366,10 @@ class CuratorClientService implements ZooKeeperClientService {
                 throws Exception {
             ChildData childData = pathChildrenCacheEvent.getData();
             if (childData != null) {
-                delegate.childEvent(client,
-                                    adaptType(pathChildrenCacheEvent.getType()),
-                                    childData.getPath());
+                ChildEntryListener.Type eventType = adaptType(pathChildrenCacheEvent.getType());
+                if (eventType != null) {
+                    delegate.childEvent(client, eventType, childData.getPath());
+                }
             }
         }
 

http://git-wip-us.apache.org/repos/asf/knox/blob/bfb556c9/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfig.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfig.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfig.java
index d51d7d5..0b5a693 100644
--- a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfig.java
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfig.java
@@ -36,6 +36,7 @@ class RemoteConfigurationRegistryJAASConfig extends Configuration {
 
     // Underlying SASL mechanisms supported
     enum SASLMechanism {
+        Unsupported,
         Kerberos,
         Digest
     }
@@ -92,9 +93,16 @@ class RemoteConfigurationRegistryJAASConfig extends Configuration {
     }
 
     private AppConfigurationEntry[] createEntries(RemoteConfigurationRegistryConfig config) {
-        // Only supporting a single app config entry per configuration/context
-        AppConfigurationEntry[] result = new AppConfigurationEntry[1];
-        result[0] = createEntry(config);
+        AppConfigurationEntry[] result = null;
+
+        AppConfigurationEntry entry = createEntry(config);
+        if (entry != null) {
+            // Only supporting a single app config entry per configuration/context
+            result = new AppConfigurationEntry[1];
+            result[0] = createEntry(config);
+        } else {
+            result = new AppConfigurationEntry[0];
+        }
         return result;
     }
 
@@ -130,9 +138,11 @@ class RemoteConfigurationRegistryJAASConfig extends Configuration {
                 opts.put("principal", config.getPrincipal());
         }
 
-        entry = new AppConfigurationEntry(getLoginModuleName(config.getRegistryType(), saslMechanism),
-                                          AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
-                                          opts);
+        if (!opts.isEmpty()) {
+            entry = new AppConfigurationEntry(getLoginModuleName(config.getRegistryType(), saslMechanism),
+                                              AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+                                              opts);
+        }
 
         return entry;
     }
@@ -155,7 +165,7 @@ class RemoteConfigurationRegistryJAASConfig extends Configuration {
     }
 
     private static SASLMechanism getSASLMechanism(String authType) {
-        SASLMechanism result = null;
+        SASLMechanism result = SASLMechanism.Unsupported;
         for (SASLMechanism at : SASLMechanism.values()) {
             if (at.name().equalsIgnoreCase(authType)) {
                 result = at;


[09/49] knox git commit: KNOX-1128 - Readonly protection for generated topologies in Knox Admin UI

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-applications/src/main/resources/applications/admin-ui/app/vendor.48771018d3da89d3269f.bundle.map
----------------------------------------------------------------------
diff --git a/gateway-applications/src/main/resources/applications/admin-ui/app/vendor.48771018d3da89d3269f.bundle.map b/gateway-applications/src/main/resources/applications/admin-ui/app/vendor.48771018d3da89d3269f.bundle.map
deleted file mode 100644
index e7783d4..0000000
--- a/gateway-applications/src/main/resources/applications/admin-ui/app/vendor.48771018d3da89d3269f.bundle.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"sources":["webpack:///vendor.48771018d3da89d3269f.bundle.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/index.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_export.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/facade/lang.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/facade/lang.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_an-object.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/output/output_ast.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_fails.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_is-object.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_wks.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/
 @angular/compiler/src/identifiers.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_core.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_global.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/private_import_core.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_object-dp.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/dom/dom_adapter.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_descriptors.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/compile_metadata.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_has.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_redefine.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js
 /modules/_string-html.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/facade/lang.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/facade/errors.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_to-length.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/parse_util.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/di.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/control_value_accessor.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_strict-method.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_to-iobject.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_to-object.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/Observ
 able.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/compiler_util/identifier_util.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/ml_parser/interpolation_config.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/template_parser/template_ast.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/validators.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/facade/lang.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_object-sap.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/util.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/control_container.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_array-methods.js","webpack:////U
 sers/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_defined.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_hide.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_metadata.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_object-gpo.js","webpack:///(webpack)/buildin/global.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/pipes/invalid_pipe_argument_error.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/ml_parser/ast.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/schema/element_schema_registry.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/shared.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/http/src/enums.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-
 admin-ui/~/core-js/modules/_meta.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_object-gopd.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/directive_wrapper_compiler.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/ml_parser/tags.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/view_compiler/util.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/ng_control.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_a-function.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_cof.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_property-desc.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_to-primitive.js","webpack:////Users/sumit.gupta/Projects/k
 nox/gateway-admin-ui/~/@angular/compiler/src/config.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/facade/collection.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/ml_parser/parser.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/util/decorators.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/facade/async.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/facade/lang.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/dom/events/event_manager.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_ctx.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_object-create.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_object-keys.js","webpack:////User
 s/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_to-integer.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/compiler_util/expression_converter.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/expression_parser/parser.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/url_resolver.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/di/injector.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/facade/collection.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/linker/compiler.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/abstract_form_group_directive.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/ng_form.js","webpack:////Users/sumit.gupta/Projec
 ts/knox/gateway-admin-ui/~/@angular/forms/src/directives/radio_control_value_accessor.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/reactive_directives/form_group_directive.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/reactive_directives/form_group_name.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/http/src/headers.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/http/src/interfaces.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_add-to-unscopables.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_iobject.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_iterators.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_object-gopn.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-a
 dmin-ui/~/core-js/modules/_to-index.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_uid.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/ng2-bs3-modal/components/modal.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/Subject.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/util/root.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/localization.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/location/location_strategy.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/expression_parser/lexer.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/ml_parser/html_parser.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/selector.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/
 @angular/compiler/src/template_parser/template_parser.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/view_compiler/constants.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/view_compiler/view_compiler.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/application_tokens.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/change_detection/change_detection.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/change_detection/change_detection_util.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/change_detection/constants.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/di/metadata.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/linker/component_factory_resolver.js","webpack:////Users/sumit.g
 upta/Projects/knox/gateway-admin-ui/~/@angular/core/src/linker/element_ref.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/linker/view_type.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/linker/view_utils.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/profile/profile.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/checkbox_value_accessor.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/default_value_accessor.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/ng_model_group.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/reactive_errors.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/select_control_value_accessor.js","webpack://
 //Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/select_multiple_control_value_accessor.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/model.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/http/src/base_response_options.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/http/src/http_utils.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/http/src/url_search_params.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/dom/dom_tokens.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_fix-re-wks.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_for-of.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_object-gops.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js
 /modules/_object-pie.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_set-to-string-tag.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_shared.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_string-trim.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.object.to-string.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.iterator.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/Subscription.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/index.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/location/location.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/location/platform_location.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/c
 ompiler/src/animation/animation_parser.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/chars.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/directive_normalizer.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/directive_resolver.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/expression_parser/ast.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/i18n/digest.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/i18n/parse_util.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/metadata_resolver.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/ml_parser/html_tags.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/ng_module_comp
 iler.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/ng_module_resolver.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/output/abstract_emitter.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/output/class_builder.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/pipe_resolver.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/resource_loader.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/style_compiler.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/view_compiler/compile_method.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/view_compiler/deps.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/animation/animation_player.js
 ","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/application_init.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/application_ref.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/change_detection/differs/default_iterable_differ.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/console.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/di/forward_ref.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/di/opaque_token.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/di/reflective_key.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/di/reflective_provider.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/facade/async.js","webpack:////Users/sumit.gupta/Projects/knox/g
 ateway-admin-ui/~/@angular/core/src/linker/component_factory.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/reflection/reflection.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/reflection/reflector_reader.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/render/api.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/testability/testability.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/type.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/util/lang.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/zone/ng_zone.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/abstract_control_directive.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directive
 s/ng_control_status.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/ng_model.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/number_value_accessor.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/range_value_accessor.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/reactive_directives/form_control_directive.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/reactive_directives/form_control_name.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/validators.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/http/index.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/http/src/backends/browser_xhr.js","webpack:////Users/sumit.gupta/Projects/knox/g
 ateway-admin-ui/~/@angular/http/src/base_request_options.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/http/src/static_response.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/index.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/dom/animation_driver.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/dom/debug/ng_probe.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/dom/dom_renderer.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/dom/events/hammer_gestures.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/dom/shared_styles_host.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/security/url_sanitizer.js","webpack:////Use
 rs/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_an-instance.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_collection.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_enum-bug-keys.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_fails-is-regexp.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_flags.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_inherit-if-required.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_is-array.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_is-regexp.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_iter-define.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_library.js","webpack:////Users/sumit.gupta/Projects/knox/
 gateway-admin-ui/~/core-js/modules/_math-expm1.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_math-sign.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_redefine-all.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_set-proto.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_set-species.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_shared-key.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_string-context.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_string-ws.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/Subscriber.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/symbol/rxSubscriber.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/util/errorObject.js","we
 bpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/util/isFunction.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/directives/index.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/directives/ng_switch.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/facade/intl.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/pipes/index.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/animation/animation_ast.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/animation/animation_compiler.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/assertions.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/compiler_util/binding_util.js","webpack:////Users/sumit.gupta/Projects/knox/g
 ateway-admin-ui/~/@angular/compiler/src/compiler_util/render_util.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/i18n/extractor_merger.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/i18n/i18n_ast.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/i18n/i18n_html_parser.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/i18n/message_bundle.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/i18n/serializers/serializer.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/i18n/serializers/xliff.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/i18n/serializers/xmb.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/i18n/serializers/xml_helper.js","webpack:////Users/sumit.gupt
 a/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/i18n/serializers/xtb.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/ml_parser/xml_parser.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/output/ts_emitter.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/output/value_util.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/provider_analyzer.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/runtime_compiler.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/schema/dom_element_schema_registry.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/style_url_resolver.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/template_parser/binding_parser.js","webpack:////Users
 /sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/template_parser/template_preparser.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/view_compiler/compile_element.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/view_compiler/compile_query.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/view_compiler/compile_view.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/animation/animation_constants.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/animation/animation_group_player.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/animation/animation_queue.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/animation/animation_sequence_player.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@a
 ngular/core/src/animation/animation_transition_event.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/animation/metadata.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/change_detection/differs/default_keyvalue_differ.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/change_detection/differs/iterable_differs.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/change_detection/differs/keyvalue_differs.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/debug/debug_node.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/di/reflective_errors.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/error_handler.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/i18n/tokens.js","webpack:////Users/sumit.gupta/Proje
 cts/knox/gateway-admin-ui/~/@angular/core/src/linker/debug_context.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/linker/errors.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/linker/ng_module_factory.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/linker/ng_module_factory_loader.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/linker/template_ref.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/linker/view_container_ref.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/linker/view_ref.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/metadata.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/metadata/lifecycle_hooks.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angula
 r/core/src/metadata/view.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/reflection/reflection_capabilities.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/reflection/reflector.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/security.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/error_examples.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/template_driven_errors.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/facade/collection.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/form_builder.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/private_import_core.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/http/src/backends/browser_
 jsonp.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/http/src/backends/jsonp_backend.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/http/src/backends/xhr_backend.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/http/src/body.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/http/src/http.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/http/src/static_request.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser-dynamic/src/platform_providers.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser-dynamic/src/resource_loader/resource_loader_impl.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/browser.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/browser/browser_adap
 ter.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/browser/location/browser_platform_location.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/browser/testability.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/browser/title.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/dom/events/dom_events.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/dom/events/key_events.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/dom/web_animations_driver.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/private_import_core.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/security/dom_sanitization_service.js","web
 pack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/brace/index.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/brace/mode/html.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/brace/theme/monokai.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_a-number-value.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_array-includes.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_array-reduce.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_bind.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_classof.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_collection-strong.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_create-property.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/
 core-js/modules/_dom-create.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_html.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_ie8-dom-define.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_is-array-iter.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_is-integer.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_iter-call.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_iter-create.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_iter-detect.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_iter-step.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_math-log1p.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_object-assign.js","
 webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_object-dps.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_object-gopn-ext.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_object-keys-internal.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_parse-float.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_parse-int.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_string-at.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_string-repeat.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_wks-ext.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/core.get-iterator-method.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.array.iterator.js","web
 pack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.map.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.regexp.flags.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.regexp.match.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.regexp.replace.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.regexp.search.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.regexp.split.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.set.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.symbol.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/web.dom.iterable.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/ng2-bs3-modal/components/modal-body.js","webpack:
 ////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/ng2-bs3-modal/components/modal-footer.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/ng2-bs3-modal/components/modal-header.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/ng2-bs3-modal/components/modal-instance.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/ng2-bs3-modal/ng2-bs3-modal.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/add/operator/toPromise.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/operator/toPromise.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/util/tryCatch.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/common_module.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/directives/ng_class.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/directives/ng_for.js"
 ,"webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/directives/ng_if.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/directives/ng_plural.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/directives/ng_style.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/directives/ng_template_outlet.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/facade/collection.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/facade/errors.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/location.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/location/hash_location_strategy.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/location/path_location_strategy.js","webpack://
 //Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/pipes/async_pipe.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/pipes/date_pipe.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/pipes/i18n_plural_pipe.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/pipes/i18n_select_pipe.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/pipes/json_pipe.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/pipes/lowercase_pipe.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/pipes/number_pipe.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/pipes/slice_pipe.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/common/src/pipes/uppercase_pipe.js","webpack:////Users/sumit.gupta/Projects/knox
 /gateway-admin-ui/~/@angular/common/src/private_import_core.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/animation/styles_collection.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/compiler.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/i18n/i18n_parser.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/i18n/serializers/placeholder.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/i18n/translation_bundle.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/lifecycle_reflector.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/ml_parser/icu_ast_expander.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/ml_parser/lexer.js","webpack:////Users/sumit.gupta/Projects/kno
 x/gateway-admin-ui/~/@angular/compiler/src/ml_parser/xml_tags.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/offline_compiler.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/output/abstract_js_emitter.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/output/output_interpreter.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/output/output_jit.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/output/path_util.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/schema/dom_security_schema.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/shadow_css.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/view_compiler/compile_pipe.js","webpack:////Users/sumit.gupta/Projects/k
 nox/gateway-admin-ui/~/@angular/compiler/src/view_compiler/event_binder.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/view_compiler/lifecycle_binder.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/view_compiler/property_binder.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/view_compiler/view_binder.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/compiler/src/view_compiler/view_builder.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/animation/animation_keyframe.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/animation/animation_style_util.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/animation/animation_styles.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/animation/animation_
 transition.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/animation/view_animation_map.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/application_module.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/change_detection.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/change_detection/change_detector_ref.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/core.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/core_private_export.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/debug/debug_renderer.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/di/reflective_injector.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/linker.js","webpack:////Users/sumit.gupta
 /Projects/knox/gateway-admin-ui/~/@angular/core/src/linker/animation_view_context.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/linker/element_injector.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/linker/query_list.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/linker/system_js_ng_module_factory_loader.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/linker/view.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/linker/view_container.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/metadata/di.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/metadata/directives.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/metadata/ng_module.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-a
 dmin-ui/~/@angular/core/src/platform_core_providers.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/profile/wtf_impl.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/render.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/util.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/core/src/zone.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/index.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/directives/normalize_validator.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/form_providers.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/forms/src/forms.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~
 /@angular/http/src/http_module.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser-dynamic/index.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser-dynamic/src/facade/lang.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser-dynamic/src/platform-browser-dynamic.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser-dynamic/src/private_export.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser-dynamic/src/private_import_platform-browser.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser-dynamic/src/resource_loader/resource_loader_cache.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/browser/generic_browser_adapter.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular
 /platform-browser/src/browser/location/history.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/browser/tools/common_tools.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/browser/tools/tools.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/dom/debug/by.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/dom/web_animations_player.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/facade/browser.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/facade/collection.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/platform-browser.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/private_export.js","webpack://
 //Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/security/html_sanitizer.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/@angular/platform-browser/src/security/style_sanitizer.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/ng2-ace-editor/src/component.ts","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/ng2-ace-editor/src/directive.ts","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/brace/worker/css.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/brace/worker/html.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/brace/worker/javascript.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/es6/array.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/es6/date.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/es6/function.js","webpack:////Users/sumit.gupta/Projects/knox/g
 ateway-admin-ui/~/core-js/es6/map.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/es6/math.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/es6/number.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/es6/object.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/es6/parse-float.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/es6/parse-int.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/es6/reflect.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/es6/regexp.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/es6/set.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/es6/string.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/es6/symbol.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/es7/reflect.j
 s","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_array-copy-within.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_array-fill.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_array-from-iterable.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_array-species-constructor.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_array-species-create.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_collection-weak.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_date-to-primitive.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_enum-keys.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_invoke.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_k
 eyof.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_own-keys.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_same-value.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/_wks-define.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.array.copy-within.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.array.every.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.array.fill.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.array.filter.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.array.find-index.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.array.find.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.array.fo
 r-each.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.array.from.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.array.index-of.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.array.is-array.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.array.join.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.array.last-index-of.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.array.map.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.array.of.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.array.reduce-right.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.array.reduce.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js
 /modules/es6.array.slice.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.array.some.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.array.sort.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.array.species.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.date.now.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.date.to-iso-string.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.date.to-json.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.date.to-primitive.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.date.to-string.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.function.bind.js","webpack:////Users/sumit.gupta/Projects/knox/gateway
 -admin-ui/~/core-js/modules/es6.function.has-instance.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.function.name.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.math.acosh.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.math.asinh.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.math.atanh.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.math.cbrt.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.math.clz32.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.math.cosh.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.math.expm1.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.math.fround.js","webpack:////Users/sumit.gupta/Projects/knox/g
 ateway-admin-ui/~/core-js/modules/es6.math.hypot.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.math.imul.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.math.log10.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.math.log1p.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.math.log2.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.math.sign.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.math.sinh.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.math.tanh.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.math.trunc.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.number.constructor.js","webpack:////Users/sumit.gupta/Projects/knox/gatew
 ay-admin-ui/~/core-js/modules/es6.number.epsilon.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.number.is-finite.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.number.is-integer.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.number.is-nan.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.number.is-safe-integer.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.number.max-safe-integer.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.number.min-safe-integer.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.number.parse-float.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.number.parse-int.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.
 number.to-fixed.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.number.to-precision.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.object.assign.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.object.create.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.object.define-properties.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.object.define-property.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.object.freeze.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.object.get-own-property-descriptor.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.object.get-own-property-names.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.object.get-pro
 totype-of.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.object.is-extensible.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.object.is-frozen.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.object.is-sealed.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.object.is.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.object.keys.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.object.prevent-extensions.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.object.seal.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.object.set-prototype-of.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.parse-float.js","webpack:////Users/sumit.gupta/Projects/
 knox/gateway-admin-ui/~/core-js/modules/es6.parse-int.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.reflect.apply.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.reflect.construct.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.reflect.define-property.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.reflect.delete-property.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.reflect.enumerate.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.reflect.get-own-property-descriptor.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.reflect.get-prototype-of.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.reflect.get.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/
 core-js/modules/es6.reflect.has.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.reflect.is-extensible.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.reflect.own-keys.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.reflect.prevent-extensions.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.reflect.set-prototype-of.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.reflect.set.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.regexp.constructor.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.regexp.to-string.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.anchor.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.big.js","webp
 ack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.blink.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.bold.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.code-point-at.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.ends-with.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.fixed.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.fontcolor.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.fontsize.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.from-code-point.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.includes.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/
 ~/core-js/modules/es6.string.italics.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.link.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.raw.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.repeat.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.small.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.starts-with.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.strike.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.sub.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.sup.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es6.string.trim.js","webpack:////Users/sumit.gupta/Projects/knox/ga
 teway-admin-ui/~/core-js/modules/es6.weak-map.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es7.reflect.define-metadata.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es7.reflect.delete-metadata.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es7.reflect.get-metadata-keys.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es7.reflect.get-metadata.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es7.reflect.get-own-metadata-keys.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es7.reflect.get-own-metadata.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es7.reflect.has-metadata.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/core-js/modules/es7.reflect.has-own-metadata.js","webpack:////Users/sumit.gupta/Projects/knox/ga
 teway-admin-ui/~/core-js/modules/es7.reflect.metadata.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/ng2-ace-editor/ng2-ace-editor.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/ng2-bs3-modal/directives/autofocus.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/process/browser.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/Observer.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/SubjectSubscription.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/add/observable/fromEvent.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/add/operator/map.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/observable/FromEventObservable.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/observable/PromiseObservable.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/observable/fromE
 vent.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/observable/fromPromise.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/operator/map.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/symbol/observable.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/util/ObjectUnsubscribedError.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/util/UnsubscriptionError.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/util/isArray.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/util/isObject.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/rxjs/util/toSubscriber.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/w3c-blob/browser.js","webpack:///(webpack)/buildin/amd-define.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/~/zone.js/dist/zone.js"],"names":["webpackJsonp","module","
 exports","__webpack_require__","__WEBPACK_IMPORTED_MODULE_0__src_core__","d","global","core","hide","redefine","ctx","PROTOTYPE","$export","type","name","source","key","own","out","exp","IS_FORCED","F","IS_GLOBAL","G","IS_STATIC","S","IS_PROTO","P","IS_BIND","B","target","expProto","undefined","Function","call","U","W","R","isPresent","obj","isBlank","isStrictStringMap","Object","getPrototypeOf","STRING_MAP_PROTO","stringify","token","overriddenName","res","toString","newLineIndex","indexOf","substring","isJsObject","o","getSymbolIterator","_symbolIterator","globalScope","Symbol","iterator","keys","getOwnPropertyNames","Map","prototype","i","length","isPrimitive","escapeRegExp","s","replace","NumberWrapper","window","WorkerGlobalScope","self","_global","assert","condition","parseIntAutoRadix","text","result","parseInt","isNaN","Error","isNumeric","value","parseFloat","scheduleMicroTask","fn","Zone","current","getTypeNameForDebugging","looseIdentical","a","b","print","console","log",
 "warn","isObject","it","TypeError","replaceVarInExpression","varName","newValue","expression","transformer","_ReplaceVariableTransformer","visitExpression","findReadVarNames","stmts","finder","_VariableFinder","visitAllStatements","varNames","variable","ReadVarExpr","importExpr","id","typeParams","ExternalExpr","importType","typeModifiers","__WEBPACK_IMPORTED_MODULE_0__facade_lang__","ExternalType","literalArr","values","LiteralArrayExpr","literalMap","LiteralMapExpr","not","expr","NotExpr","params","body","FunctionExpr","literal","LiteralExpr","TypeModifier","Type","BuiltinTypeName","ArrayType","MapType","DYNAMIC_TYPE","BOOL_TYPE","NUMBER_TYPE","STRING_TYPE","FUNCTION_TYPE","NULL_TYPE","BinaryOperator","Expression","BuiltinVar","BuiltinMethod","FnParam","BinaryOperatorExpr","THIS_EXPR","SUPER_EXPR","NULL_EXPR","StmtModifier","Statement","DeclareVarStmt","ExpressionStatement","ReturnStatement","ClassField","ClassMethod","ClassGetter","ClassStmt","IfStmt","ExpressionTransformer","__e
 xtends","this","__","constructor","p","hasOwnProperty","create","modifiers","hasModifier","modifier","BuiltinType","_super","visitType","visitor","context","visitBuiltintType","visitExternalType","of","visitArrayType","valueType","visitMapType","Dynamic","Bool","Int","Number","String","Null","prop","ReadPropExpr","index","ReadKeyExpr","callMethod","InvokeMethodExpr","callFn","InvokeFunctionExpr","instantiate","InstantiateExpr","conditional","trueCase","falseCase","ConditionalExpr","equals","rhs","Equals","notEquals","NotEquals","identical","Identical","notIdentical","NotIdentical","minus","Minus","plus","Plus","divide","Divide","multiply","Multiply","modulo","Modulo","and","And","or","Or","lower","Lower","lowerEquals","LowerEquals","bigger","Bigger","biggerEquals","BiggerEquals","TYPED_NULL_EXPR","cast","CastExpr","toStmt","builtin","visitReadVarExpr","set","WriteVarExpr","visitWriteVarExpr","toDeclStmt","WriteKeyExpr","receiver","visitWriteKeyExpr","WritePropExpr","visitWritePropEx
 pr","method","args","visitInvokeMethodExpr","visitInvokeFunctionExpr","classExpr","visitInstantiateExpr","visitLiteralExpr","visitExternalExpr","visitConditionalExpr","visitNotExpr","visitCastExpr","statements","visitFunctionExpr","DeclareFunctionStmt","operator","lhs","visitBinaryOperatorExpr","visitReadPropExpr","visitReadKeyExpr","entries","visitLiteralArrayExpr","visitLiteralMapExpr","This","Super","CatchError","CatchStack","visitStatement","visitDeclareVarStmt","visitDeclareFunctionStmt","visitExpressionStmt","visitReturnStmt","AbstractClassPart","parent","fields","getters","constructorMethod","methods","visitDeclareClassStmt","visitIfStmt","TryCatchStmt","CommentStmt","comment","visitCommentStmt","bodyStmts","catchStmts","visitTryCatchStmt","ThrowStmt","error","visitThrowStmt","ast","visitAllExpressions","_this","map","entry","exprs","stmt","RecursiveExpressionVisitor","forEach","_varName","_newValue","apply","arguments","Set","add","exec","e","store","uid","USE_SYMBOL","$expo
 rts","assetUrl","pkg","path","resolveIdentifier","identifier","__WEBPACK_IMPORTED_MODULE_1__compile_metadata__","moduleUrl","reference","__WEBPACK_IMPORTED_MODULE_2__private_import_core__","runtime","identifierToken","resolveIdentifierToken","resolveEnumIdentifier","enumType","resolvedEnum","resolveEnum","__WEBPACK_IMPORTED_MODULE_0__angular_core__","Identifiers","APP_VIEW_MODULE_URL","VIEW_UTILS_MODULE_URL","CD_MODULE_URL","ANIMATION_STYLE_UTIL_ASSET_URL","ANALYZE_FOR_ENTRY_COMPONENTS","ViewUtils","AppView","DebugAppView","ViewContainer","ElementRef","ViewContainerRef","ChangeDetectorRef","RenderComponentType","QueryList","TemplateRef","TemplateRef_","CodegenComponentFactoryResolver","ComponentFactoryResolver","ComponentFactory","ComponentRef_","ComponentRef","NgModuleFactory","NgModuleInjector","RegisterModuleFactoryFn","ValueUnwrapper","Injector","ViewEncapsulation","ViewType","ChangeDetectionStrategy","StaticNodeDebugInfo","DebugContext","Renderer","SimpleChange","UNINITIALIZED"
 ,"ChangeDetectorStatus","checkBinding","devModeEqual","inlineInterpolate","interpolate","castByValue","EMPTY_ARRAY","EMPTY_MAP","createRenderElement","selectOrCreateRenderHostElement","pureProxies","pureProxy1","pureProxy2","pureProxy3","pureProxy4","pureProxy5","pureProxy6","pureProxy7","pureProxy8","pureProxy9","pureProxy10","SecurityContext","AnimationKeyframe","AnimationStyles","NoOpAnimationPlayer","AnimationGroupPlayer","AnimationSequencePlayer","prepareFinalAnimationStyles","balanceAnimationKeyframes","clearStyles","renderStyles","collectAndResolveStyles","LOCALE_ID","TRANSLATIONS_FORMAT","setBindingDebugInfo","setBindingDebugInfoForChanges","AnimationTransition","InlineArray","inlineArrays","InlineArray2","InlineArray4","InlineArray8","InlineArray16","EMPTY_INLINE_ARRAY","InlineArrayDynamic","subscribeToRenderElement","createRenderComponentType","noop","version","__e","Math","__g","isDefaultChangeDetectionStrategy","LifecycleHooks","LIFECYCLE_HOOKS_VALUES","ReflectorReader",
 "registerModuleFactory","view_utils","Console","reflector","Reflector","ReflectionCapabilities","ANY_STATE","DEFAULT_STATE","EMPTY_STATE","FILL_STYLE_FLAG","ComponentStillLoadingError","RenderDebugInfo","AnimationPlayer","ViewMetadata","anObject","IE8_DOM_DEFINE","toPrimitive","dP","defineProperty","f","O","Attributes","getDOM","_DOM","setRootDomAdapter","adapter","DomAdapter","resourceLoaderType","get","_attrToPropMap","enumerable","configurable","unimplemented","createHostComponentMeta","compMeta","template","__WEBPACK_IMPORTED_MODULE_3__selector__","parse","selector","getMatchingElementTemplate","CompileDirectiveMetadata","CompileTypeMetadata","isHost","CompileTemplateMetadata","encapsulation","None","templateUrl","styles","styleUrls","ngContentSelectors","animations","changeDetection","Default","inputs","outputs","host","isComponent","providers","viewProviders","queries","viewQueries","_normalizeArray","isStaticSymbol","__WEBPACK_IMPORTED_MODULE_1__facade_collection__","__WEBPAC
 K_IMPORTED_MODULE_2__facade_lang__","__WEBPACK_IMPORTED_MODULE_4__util__","CompileAnimationEntryMetadata","CompileAnimationStateDeclarationMetadata","CompileAnimationStateTransitionMetadata","CompileAnimationKeyframesSequenceMetadata","CompileAnimationStyleMetadata","CompileAnimationAnimateMetadata","CompileAnimationWithStepsMetadata","CompileAnimationSequenceMetadata","CompileAnimationGroupMetadata","CompileIdentifierMetadata","CompileDiDependencyMetadata","CompileProviderMetadata","CompileFactoryMetadata","CompileTokenMetadata","CompileQueryMetadata","CompileStylesheetMetadata","CompilePipeMetadata","CompileNgModuleMetadata","TransitiveCompileNgModuleMetadata","ProviderMeta","HOST_REG_EXP","CompileMetadataWithIdentifier","definitions","CompileAnimationStateMetadata","stateNameExpr","stateChangeExpr","steps","CompileAnimationMetadata","offset","timings","_a","_b","prefix","isAttribute","isSelf","isSkipSelf","isOptional","isValue","useClass","useValue","useExisting","useFactory","de
 ps","multi","diDeps","identifierIsInstance","lifecycleHooks","selectors","descendants","first","propertyName","read","externalStylesheets","interpolation","flatten","toSummary","isSummary","anim","exportAs","hostListeners","hostProperties","hostAttributes","entryComponents","matches","match","inputsMap","bindConfig","parts","outputsMap","pure","declaredDirectives","exportedDirectives","declaredPipes","exportedPipes","bootstrapComponents","importedModules","exportedModules","schemas","transitiveModule","directiveLoaders","toInjectorSummary","toDirectiveSummary","modules","directives","pipes","directivesSet","pipesSet","dir","pipe","dependencies","has","SRC","TO_STRING","$toString","TPL","split","inspectSource","val","safe","isFunction","join","fails","defined","quot","createHTML","string","tag","attribute","p1","NAME","test","toLowerCase","isDate","Date","valueOf","BaseError","WrappedError","message","nativeError","_nativeError","stack","originalError","toInteger","min","ParseLocatio
 n","ParseSourceFile","ParseSourceSpan","ParseErrorLevel","ParseError","file","line","col","url","content","start","end","details","span","msg","level","FATAL","ctxStart","contextStr","ctxEnd","ctxLen","ctxLines","__WEBPACK_IMPORTED_MODULE_0__di_metadata__","__WEBPACK_IMPORTED_MODULE_1__di_forward_ref__","__WEBPACK_IMPORTED_MODULE_2__di_injector__","__WEBPACK_IMPORTED_MODULE_3__di_reflective_injector__","__WEBPACK_IMPORTED_MODULE_4__di_reflective_provider__","__WEBPACK_IMPORTED_MODULE_5__di_reflective_key__","__WEBPACK_IMPORTED_MODULE_6__di_opaque_token__","NG_VALUE_ACCESSOR","arg","IObject","root_1","toSubscriber_1","observable_1","Observable","subscribe","_isScalar","_subscribe","lift","observable","observerOrNext","complete","sink","toSubscriber","syncErrorThrowable","syncErrorThrown","syncErrorValue","next","PromiseCtor","root","Rx","config","Promise","resolve","reject","subscription","err","unsubscribe","subscriber","$$observable","createDiTokenExpression","__WEBPACK_IMPORTED_MO
 DULE_2__output_output_ast__","Const","createInlineArray","__WEBPACK_IMPORTED_MODULE_1__identifiers__","log2","ceil","identifierSpec","concat","createPureProxy","argCount","pureProxyProp","builder","push","pureProxyId","ctorStmts","createEnumExpression","enumValue","enumName","find","propName","__WEBPACK_IMPORTED_MODULE_0__assertions__","InterpolationConfig","DEFAULT_INTERPOLATION_CONFIG","fromArray","markers","templateVisitAll","asts","visit","astResult","TextAst","BoundTextAst","AttrAst","BoundElementPropertyAst","BoundEventAst","ReferenceAst","VariableAst","ElementAst","EmbeddedTemplateAst","BoundDirectivePropertyAst","DirectiveAst","ProviderAst","ProviderAstType","NgContentAst","PropertyBindingType","ngContentIndex","sourceSpan","visitText","visitBoundText","visitAttr","securityContext","needsRuntimeSecurityContext","unit","visitElementProperty","Animation","phase","handler","calcFullName","visitEvent","visitReference","visitVariable","attrs","references","hasViewContainer","chil
 dren","endSourceSpan","visitElement","variables","visitEmbeddedTemplate","directiveName","templateName","visitDirectiveProperty","directive","hostEvents","visitDirective","multiProvider","eager","providerType","visitNgContent","isEmptyInputValue","_convertToPromise","__WEBPACK_IMPORTED_MODULE_4__private_import_core__","__WEBPACK_IMPORTED_MODULE_1_rxjs_operator_toPromise__","_executeValidators","control","validators","v","_executeAsyncValidators","_mergeErrors","arrayOfErrors","reduce","errors","__WEBPACK_IMPORTED_MODULE_3__facade_lang__","__WEBPACK_IMPORTED_MODULE_2__facade_collection__","merge","n","NG_VALIDATORS","NG_ASYNC_VALIDATORS","Validators","required","minLength","minlength","requiredLength","actualLength","maxLength","maxlength","pattern","nullValidator","regex","regexStr","RegExp","requiredPattern","actualValue","c","compose","presentValidators","filter","composeAsync","promises","all","then","setValueOnPath","name_1","shift","KEY","dashCaseToCamelCase","input","DASH_CASE
 _REGEXP","m","_i","toUpperCase","splitAtColon","defaultValues","_splitAt","splitAtPeriod","character","characterIndex","slice","trim","sanitizeIdentifier","visitValue","Array","isArray","visitArray","visitStringMap","visitPrimitive","visitOther","MODULE_SUFFIX","ValueTransformer","SyncAsyncResult","arr","syncResult","asyncResult","__WEBPACK_IMPORTED_MODULE_0__abstract_control_directive__","ControlContainer","toObject","toLength","asc","TYPE","$create","IS_MAP","IS_FILTER","IS_SOME","IS_EVERY","IS_FIND_INDEX","NO_HOLES","$this","callbackfn","that","createDesc","object","shared","getOrCreateMetadataMap","targetKey","targetMetadata","keyMetadata","ordinaryHasOwnMetadata","MetadataKey","metadataMap","ordinaryGetOwnMetadata","ordinaryDefineOwnMetadata","MetadataValue","ordinaryOwnMetadataKeys","_","toMetaKey","IE_PROTO","ObjectProto","g","eval","__WEBPACK_IMPORTED_MODULE_0__facade_errors__","__WEBPACK_IMPORTED_MODULE_1__facade_lang__","InvalidPipeArgumentError","visitAll","nodes","Text",
 "Expansion","ExpansionCase","Attribute","Element","Comment","switchValue","cases","switchValueSourceSpan","visitExpansion","valueSourceSpan","expSourceSpan","visitExpansionCase","valueSpan","visitAttribute","startSourceSpan","visitComment","ElementSchemaRegistry","controlPath","setUpControl","_throwError","valueAccessor","validator","__WEBPACK_IMPORTED_MODULE_1__validators__","asyncValidator","writeValue","registerOnChange","viewToModelUpdate","markAsDirty","setValue","emitModelToViewChange","registerOnTouched","markAsTouched","emitModelEvent","setDisabledState","registerOnDisabledChange","isDisabled","_rawValidators","registerOnValidatorChange","updateValueAndValidity","_rawAsyncValidators","cleanUpControl","_noControlError","_clearChangeFns","setUpFormContainer","messageEnd","composeValidators","__WEBPACK_IMPORTED_MODULE_4__normalize_validator__","composeAsyncValidators","isPropertyUpdated","changes","viewModel","change","isFirstChange","currentValue","isBuiltInAccessor","BUILTIN_
 ACCESSORS","some","selectValueAccessor","valueAccessors","defaultAccessor","builtinAccessor","customAccessor","__WEBPACK_IMPORTED_MODULE_3__default_value_accessor__","__WEBPACK_IMPORTED_MODULE_2__checkbox_value_accessor__","__WEBPACK_IMPORTED_MODULE_5__number_value_accessor__","__WEBPACK_IMPORTED_MODULE_6__radio_control_value_accessor__","__WEBPACK_IMPORTED_MODULE_7__range_value_accessor__","__WEBPACK_IMPORTED_MODULE_8__select_control_value_accessor__","__WEBPACK_IMPORTED_MODULE_9__select_multiple_control_value_accessor__","RequestMethod","ReadyState","ResponseType","ContentType","ResponseContentType","META","setDesc","isExtensible","FREEZE","preventExtensions","setMeta","w","fastKey","getWeak","onFreeze","meta","NEED","pIE","toIObject","gOPD","getOwnPropertyDescriptor","addNgDoCheckMethod","changedVar","__WEBPACK_IMPORTED_MODULE_9__output_output_ast__","CHANGED_FIELD_NAME","lifecycleStmts","genChanges","onChangesStmts","ngOnChanges","CONTEXT_FIELD_NAME","CHANGES_FIELD_NAME","compil
 erConfig","logBindingUpdate","__WEBPACK_IMPORTED_MODULE_6__identifiers__","VIEW_VAR","RENDER_EL_VAR","RESET_CHANGES_STMT","ngOnInit","ngDoCheck","THROW_ON_CHANGE_VAR","addCheckInputMethod","field","__WEBPACK_IMPORTED_MODULE_1__compiler_util_binding_util__","onChangeStatements","CURR_VALUE_VAR","methodBody","currValExpr","forceUpdate","FORCE_UPDATE_VAR","addCheckHostMethod","hostProps","methodParams","COMPONENT_VIEW_VAR","hostProp","hostPropIdx","evalResult","__WEBPACK_IMPORTED_MODULE_2__compiler_util_expression_converter__","bindingId","securityContextExpr","checkBindingStmts","isAnimation","__WEBPACK_IMPORTED_MODULE_3__compiler_util_render_util__","EVENT_HANDLER_FIELD_NAME","updateStmts","detachStmts","addHandleEventMethod","resultVar","actionStmts","hostListener","eventIdx","trueStmts","preventDefault","EVENT_NAME_VAR","fullName","event","addSubscribeMethod","dirMeta","emitterPropName","emitterIdx","eventName","paramName","subscriptionFieldName","SubscribeObservable","Bind","destr
 oyStmts","parseHostBindings","exprParser","schemaRegistry","parser","__WEBPACK_IMPORTED_MODULE_13__template_parser_binding_parser__","__WEBPACK_IMPORTED_MODULE_7__ml_parser_interpolation_config__","sourceFileName","sourceFile","__WEBPACK_IMPORTED_MODULE_10__parse_util__","parsedHostProps","createDirectiveHostPropertyAsts","parsedHostListeners","createDirectiveHostEventAsts","ParseResult","reportParseErrors","parseErrors","warnings","WARNING","_console","__WEBPACK_IMPORTED_MODULE_4__config__","__WEBPACK_IMPORTED_MODULE_5__expression_parser_parser__","__WEBPACK_IMPORTED_MODULE_8__output_class_builder__","__WEBPACK_IMPORTED_MODULE_11__private_import_core__","__WEBPACK_IMPORTED_MODULE_12__schema_element_schema_registry__","DirectiveWrapperCompiler","DirectiveWrapperExpressions","DirectiveWrapperCompileResult","dirWrapperClassVar","_exprParser","_schemaRegistry","dirWrapperClassName","compile","hostParseResult","DirectiveWrapperBuilder","inputFieldName","classStmt","build","decorators","
 ctorParameters","dirLifecycleHooks","OnChanges","OnInit","DoCheck","ngOnDestroy","OnDestroy","dirDepParamNames","Private","ctorParams","builders","depsExpr","dirWrapper","view","renderElement","throwOnChange","checkHost","componentView","runtimeSecurityContexts","ngOnDetach","renderEl","usedEvents","eventListener","needsSubscribe","eventFlags","eventUsed","handleEvent","splitNsName","elementName","colonIndex","getNsPrefix","mergeNsAndName","localName","TagContentType","NAMED_ENTITIES","Aacute","aacute","Acirc","acirc","acute","AElig","aelig","Agrave","agrave","alefsym","Alpha","alpha","amp","ang","apos","Aring","aring","asymp","Atilde","atilde","Auml","auml","bdquo","Beta","beta","brvbar","bull","cap","Ccedil","ccedil","cedil","cent","Chi","chi","circ","clubs","cong","copy","crarr","cup","curren","dagger","Dagger","darr","dArr","deg","Delta","delta","diams","Eacute","eacute","Ecirc","ecirc","Egrave","egrave","empty","emsp","ensp","Epsilon","epsilon","equiv","Eta","eta","ETH","eth","
 Euml","euml","euro","exist","fnof","forall","frac12","frac14","frac34","frasl","Gamma","gamma","ge","gt","harr","hArr","hearts","hellip","Iacute","iacute","Icirc","icirc","iexcl","Igrave","igrave","image","infin","int","Iota","iota","iquest","isin","Iuml","iuml","Kappa","kappa","Lambda","lambda","lang","laquo","larr","lArr","lceil","ldquo","le","lfloor","lowast","loz","lrm","lsaquo","lsquo","lt","macr","mdash","micro","middot","Mu","mu","nabla","nbsp","ndash","ne","ni","notin","nsub","Ntilde","ntilde","Nu","nu","Oacute","oacute","Ocirc","ocirc","OElig","oelig","Ograve","ograve","oline","Omega","omega","Omicron","omicron","oplus","ordf","ordm","Oslash","oslash","Otilde","otilde","otimes","Ouml","ouml","para","permil","perp","Phi","phi","Pi","pi","piv","plusmn","pound","prime","Prime","prod","Psi","psi","radic","rang","raquo","rarr","rArr","rceil","rdquo","real","reg","rfloor","Rho","rho","rlm","rsaquo","rsquo","sbquo","Scaron","scaron","sdot","sect","shy","Sigma","sigma","sigmaf","si
 m","spades","sub","sube","sum","sup","sup1","sup2","sup3","supe","szlig","Tau","tau","there4","Theta","theta","thetasym","thinsp","THORN","thorn","tilde","times","trade","Uacute","uacute","uarr","uArr","Ucirc","ucirc","Ugrave","ugrave","uml","upsih","Upsilon","upsilon","Uuml","uuml","weierp","Xi","xi","Yacute","yacute","yen","yuml","Yuml","Zeta","zeta","zwj","zwnj","getPropertyInView","property","callingView","definedView","viewProp","currView","declarationElement","_ReplaceViewTransformer","injectFromViewParentInjector","optional","viewExpr","viewType","__WEBPACK_IMPORTED_MODULE_3__private_import_core__","HOST","__WEBPACK_IMPORTED_MODULE_0__compiler_util_identifier_util__","getViewClassName","component","embeddedTemplateIndex","getHandleEventMethodName","elementIndex","_viewExpr","_view","_isThis","classType","NgControl","_parent","bitmap","writable","CompilerConfig","_c","renderTypes","DefaultRenderTypes","_d","defaultEncapsulation","Emulated","genDebugInfo","_e","useJit","_genDeb
 ugInfo","_logBindingUpdate","RenderTypes","renderText","renderComment","renderNode","renderEvent","StringMapWrapper","ListWrapper","m1","m2","k","k1","k2","removeAll","list","items","splice","remove","el","flat","item","flatItem","lastOnStack","element","__WEBPACK_IMPORTED_MODULE_1__parse_util__","__WEBPACK_IMPORTED_MODULE_2__ast__","__WEBPACK_IMPORTED_MODULE_3__interpolation_config__","__WEBPACK_IMPORTED_MODULE_4__lexer__","__WEBPACK_IMPORTED_MODULE_5__tags__","ParseTreeResult","Parser","TreeError","rootNodes","getTagDefinition","parseExpansionForms","interpolationConfig","tokensAndErrors","treeAndErrors","_TreeBuilder","tokens","_index","_rootNodes","_errors","_elementStack","_advance","_peek","EOF","TAG_OPEN_START","_consumeStartTag","TAG_CLOSE","_consumeEndTag","CDATA_START","_closeVoidElement","_consumeCdata","COMMENT_START","_consumeComment","TEXT","RAW_TEXT","ESCAPABLE_RAW_TEXT","_consumeText","EXPANSION_FORM_START","_consumeExpansion","prev","_advanceIf","startToken","CDATA_
 END","COMMENT_END","_addToParent","EXPANSION_CASE_VALUE","expCase","_parseExpansionCase","EXPANSION_FORM_END","EXPANSION_CASE_EXP_START","_collectExpansionExpTokens","parsedExp","expansionFormStack","EXPANSION_CASE_EXP_END","pop","parent_1","_getParentElement","ignoreFirstLf","isVoid","startTagToken","ATTR_NAME","_consumeAttr","_getElementFullName","selfClosing","TAG_OPEN_END_VOID","tagDef","canSelfClose","TAG_OPEN_END","_pushElement","_popElement","parentEl","isClosedByChild","_getParentElementSkippingContainers","container","requireExtraParent","newParent","parentToAdd","_insertBeforeContainer","endTagToken","stackIndex","closedByParent","attrName","ATTR_VALUE","valueToken","node","parentElement","implicitNamespacePrefix","extractAnnotation","annotation","applyParams","fnOrArray","annotations","annoLength","paramsAnnotations","ii","paramAnnotations","j","Reflect","defineMetadata","Class","clsDef","proto","extends","constructorName","_nextClassId","makeDecorator","props","parentCla
 ss","chainFn","DecoratorFactory","objOrType","getMetadata","metaCtor","annotationInstance","chainAnnotation","TypeDecorator","cls","getOwnMetadata","makeMetadataCtor","annotationCls","argVal","makeParamDecorator","ParamDecoratorFactory","ParamDecorator","unusedKey","parameters","bind","makePropDecorator","PropDecoratorFactory","decoratorInstance","unshift","__WEBPACK_IMPORTED_MODULE_0_rxjs_Subject__","__WEBPACK_IMPORTED_MODULE_1_rxjs_Observable__","EventEmitter","isAsync","__isAsync","emit","generatorOrNext","schedulerFn","errorFn","completeFn","setTimeout","__WEBPACK_IMPORTED_MODULE_1__dom_adapter__","EVENT_MANAGER_PLUGINS","EventManager","EventManagerPlugin","plugins","_zone","_eventNameToPlugin","manager","_plugins","reverse","addEventListener","plugin","_findPluginFor","addGlobalEventListener","getZone","plugin_1","supports","getGlobalEventTarget","aFunction","dPs","enumBugKeys","Empty","createDict","iframeDocument","iframe","style","display","appendChild","src","contentWindow",
 "document","open","write","close","Properties","$keys","floor","convertPropertyBinding","nameResolver","implicitReceiver","createCurrValueExpr","DefaultNameResolver","_AstToIrVisitor","VAL_UNWRAPPER_VAR","outputExpr","_Mode","temporaryCount","temporaryDeclaration","needsValueUnwrapper","initValueUnwrapperStmt","__WEBPACK_IMPORTED_MODULE_3__output_output_ast__","Final","ConvertPropertyBindingResult","convertActionBinding","action","flattenStatements","prependTemporaryDecls","lastIndex","preventDefaultVar","lastStatement","returnExpr","convertStmtIntoExpression","createPreventDefaultVar","ConvertActionBindingResult","createSharedBindingVariablesIfNeeded","unwrapperStmts","readVars","__WEBPACK_IMPORTED_MODULE_2__identifiers__","temporaryName","temporaryNumber","ensureStatementMode","mode","ensureExpressionMode","convertToStatementIfNeeded","output","createCachedLiteralArray","proxyExpr","proxyParams","proxyReturnEntries","__WEBPACK_IMPORTED_MODULE_4__identifier_util__","createCachedLit
 eralMap","__WEBPACK_IMPORTED_MODULE_0__expression_parser_ast__","EventHandlerVars","_builder","_nameResolver","_implicitReceiver","_valueUnwrapper","isAction","_nodeMap","_resultMap","_currentTemporary","visitBinary","op","operation","left","right","visitChain","expressions","visitConditional","trueExp","falseExp","visitPipe","callPipe","visitFunctionCall","visitImplicitReceiver","visitInterpolation","strings","visitKeyedRead","visitKeyedWrite","visitLiteralArray","visitLiteralMap","visitLiteralPrimitive","_getLocal","getLocal","visitMethodCall","leftMostSafe","leftMostSafeNode","convertSafeAccess","varExpr","visitPrefixNot","visitPropertyRead","visitPropertyWrite","visitSafePropertyRead","visitSafeMethodCall","visitQuote","temporary","guardedExpression","needsTemporary","allocateTemporary","access","delete","releaseTemporary","visitSome","tempNumber","max","_createInterpolateRegExp","__WEBPACK_IMPORTED_MODULE_1__chars__","__WEBPACK_IMPORTED_MODULE_3__ml_parser_interpolation_config_
 _","__WEBPACK_IMPORTED_MODULE_4__ast__","__WEBPACK_IMPORTED_MODULE_5__lexer__","SplitInterpolation","offsets

<TRUNCATED>

[08/49] knox git commit: Update CHANGES for v0.14.0

Posted by mo...@apache.org.
Update CHANGES for v0.14.0

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/2ff3e756
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/2ff3e756
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/2ff3e756

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 2ff3e756c6486bf34b605052df463488a5f9123e
Parents: 6474b61
Author: Larry McCay <lm...@hortonworks.com>
Authored: Tue Nov 28 23:21:06 2017 -0500
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Tue Nov 28 23:22:02 2017 -0500

----------------------------------------------------------------------
 CHANGES | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/2ff3e756/CHANGES
----------------------------------------------------------------------
diff --git a/CHANGES b/CHANGES
index d613a78..8815d3e 100644
--- a/CHANGES
+++ b/CHANGES
@@ -75,6 +75,7 @@ Release Notes - Apache Knox - Version 0.14.0
    * [KNOX-989] - Report metrics at service level (/webhdfs/v1) instead of url with args (/webhdfs/v1/?op=LISTSTATUS) (Mohammad Kamrul Islam via Sandeep More)
    * [KNOX-993] - The samples README for knoxshell references the 0.5.0 userguide
    * [KNOX-992] - Add README for the knoxshell distribution (Colm O hEigeartaigh and lmccay via lmccay)
+   * [KNOX-1119] - Pac4J OAuth/OpenID Principal Needs to be Configurable (Andreas Hildebrandt via lmccay)
 ** Bug
    * [KNOX-1003] - Fix the rewrite rules for Zeppelin 0.7.2 UI
    * [KNOX-1004] - Failing (flaky) Knox unit tests


[18/49] knox git commit: KNOX-1120 - Pac4J Stop Using ConfigSingleton

Posted by mo...@apache.org.
KNOX-1120 - Pac4J Stop Using ConfigSingleton

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/55be1592
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/55be1592
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/55be1592

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 55be159283d77b382fa3325df5987410548640c9
Parents: 27217ea
Author: Larry McCay <lm...@hortonworks.com>
Authored: Fri Dec 1 08:12:45 2017 -0500
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Fri Dec 1 08:13:12 2017 -0500

----------------------------------------------------------------------
 .../gateway/pac4j/filter/Pac4jDispatcherFilter.java      | 11 ++++++-----
 .../gateway/pac4j/filter/Pac4jIdentityAdapter.java       |  6 ++++--
 .../hadoop/gateway/pac4j/MockHttpServletRequest.java     |  8 +++++++-
 3 files changed, 17 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/55be1592/gateway-provider-security-pac4j/src/main/java/org/apache/hadoop/gateway/pac4j/filter/Pac4jDispatcherFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-pac4j/src/main/java/org/apache/hadoop/gateway/pac4j/filter/Pac4jDispatcherFilter.java b/gateway-provider-security-pac4j/src/main/java/org/apache/hadoop/gateway/pac4j/filter/Pac4jDispatcherFilter.java
index fb5fa4a..ca3b26d 100644
--- a/gateway-provider-security-pac4j/src/main/java/org/apache/hadoop/gateway/pac4j/filter/Pac4jDispatcherFilter.java
+++ b/gateway-provider-security-pac4j/src/main/java/org/apache/hadoop/gateway/pac4j/filter/Pac4jDispatcherFilter.java
@@ -29,8 +29,6 @@ import org.apache.hadoop.gateway.services.security.CryptoService;
 import org.pac4j.config.client.PropertiesConfigFactory;
 import org.pac4j.core.client.Client;
 import org.pac4j.core.config.Config;
-import org.pac4j.core.config.ConfigSingleton;
-import org.pac4j.core.context.J2EContext;
 import org.pac4j.core.util.CommonHelper;
 import org.pac4j.http.client.indirect.IndirectBasicAuthClient;
 import org.pac4j.http.credentials.authenticator.test.SimpleTestUsernamePasswordAuthenticator;
@@ -71,6 +69,8 @@ public class Pac4jDispatcherFilter implements Filter {
 
   private static final String PAC4J_COOKIE_DOMAIN_SUFFIX_PARAM = "pac4j.cookie.domain.suffix";
 
+  private static final String PAC4J_CONFIG = "pac4j.config";
+
   private CallbackFilter callbackFilter;
 
   private SecurityFilter securityFilter;
@@ -153,13 +153,13 @@ public class Pac4jDispatcherFilter implements Filter {
     }
 
     callbackFilter = new CallbackFilter();
+    callbackFilter.setConfigOnly(config);
     securityFilter = new SecurityFilter();
     securityFilter.setClients(clientName);
-    securityFilter.setConfig(config);
+    securityFilter.setConfigOnly(config);
 
     final String domainSuffix = filterConfig.getInitParameter(PAC4J_COOKIE_DOMAIN_SUFFIX_PARAM);
     config.setSessionStore(new KnoxSessionStore(cryptoService, clusterName, domainSuffix));
-    ConfigSingleton.setConfig(config);
   }
 
   private void addDefaultConfig(String clientNameParameter, Map<String, String> properties) {
@@ -196,7 +196,8 @@ public class Pac4jDispatcherFilter implements Filter {
 
     final HttpServletRequest request = (HttpServletRequest) servletRequest;
     final HttpServletResponse response = (HttpServletResponse) servletResponse;
-    final J2EContext context = new J2EContext(request, response, ConfigSingleton.getConfig().getSessionStore());
+    request.setAttribute(PAC4J_CONFIG, securityFilter.getConfig());
+//    final J2EContext context = new J2EContext(request, response, securityFilter.getConfig().getSessionStore());
 
     // it's a callback from an identity provider
     if (request.getParameter(PAC4J_CALLBACK_PARAMETER) != null) {

http://git-wip-us.apache.org/repos/asf/knox/blob/55be1592/gateway-provider-security-pac4j/src/main/java/org/apache/hadoop/gateway/pac4j/filter/Pac4jIdentityAdapter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-pac4j/src/main/java/org/apache/hadoop/gateway/pac4j/filter/Pac4jIdentityAdapter.java b/gateway-provider-security-pac4j/src/main/java/org/apache/hadoop/gateway/pac4j/filter/Pac4jIdentityAdapter.java
index 20084b1..d8b3489 100644
--- a/gateway-provider-security-pac4j/src/main/java/org/apache/hadoop/gateway/pac4j/filter/Pac4jIdentityAdapter.java
+++ b/gateway-provider-security-pac4j/src/main/java/org/apache/hadoop/gateway/pac4j/filter/Pac4jIdentityAdapter.java
@@ -21,7 +21,7 @@ import org.apache.hadoop.gateway.audit.api.*;
 import org.apache.hadoop.gateway.audit.log4j.audit.AuditConstants;
 import org.apache.hadoop.gateway.filter.AbstractGatewayFilter;
 import org.apache.hadoop.gateway.security.PrimaryPrincipal;
-import org.pac4j.core.config.ConfigSingleton;
+import org.pac4j.core.config.Config;
 import org.pac4j.core.context.J2EContext;
 import org.pac4j.core.profile.CommonProfile;
 import org.pac4j.core.profile.ProfileManager;
@@ -47,6 +47,7 @@ public class Pac4jIdentityAdapter implements Filter {
   private static final Logger logger = LoggerFactory.getLogger(Pac4jIdentityAdapter.class);
 
   public static final String PAC4J_ID_ATTRIBUTE = "pac4j.id_attribute";
+  private static final String PAC4J_CONFIG = "pac4j.config";
 
   private static AuditService auditService = AuditServiceFactory.getAuditService();
   private static Auditor auditor = auditService.getAuditor(
@@ -70,7 +71,8 @@ public class Pac4jIdentityAdapter implements Filter {
 
     final HttpServletRequest request = (HttpServletRequest) servletRequest;
     final HttpServletResponse response = (HttpServletResponse) servletResponse;
-    final J2EContext context = new J2EContext(request, response, ConfigSingleton.getConfig().getSessionStore());
+    final J2EContext context = new J2EContext(request, response,
+        ((Config)request.getAttribute(PAC4J_CONFIG)).getSessionStore());
     final ProfileManager<CommonProfile> manager = new ProfileManager<CommonProfile>(context);
     final Optional<CommonProfile> optional = manager.get(true);
     if (optional.isPresent()) {

http://git-wip-us.apache.org/repos/asf/knox/blob/55be1592/gateway-provider-security-pac4j/src/test/java/org/apache/hadoop/gateway/pac4j/MockHttpServletRequest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-pac4j/src/test/java/org/apache/hadoop/gateway/pac4j/MockHttpServletRequest.java b/gateway-provider-security-pac4j/src/test/java/org/apache/hadoop/gateway/pac4j/MockHttpServletRequest.java
index b09adc9..5d2c5b6 100644
--- a/gateway-provider-security-pac4j/src/test/java/org/apache/hadoop/gateway/pac4j/MockHttpServletRequest.java
+++ b/gateway-provider-security-pac4j/src/test/java/org/apache/hadoop/gateway/pac4j/MockHttpServletRequest.java
@@ -31,6 +31,7 @@ public class MockHttpServletRequest extends HttpServletRequestWrapper {
     private String serverName;
     private Map<String, String> parameters = new HashMap<>();
     private Map<String, String> headers = new HashMap<>();
+    private Map<String, Object> attributes = new HashMap<>();
 
     public MockHttpServletRequest() {
         super(mock(HttpServletRequest.class));
@@ -82,7 +83,12 @@ public class MockHttpServletRequest extends HttpServletRequestWrapper {
     }
 
     @Override
+    public void setAttribute(String name, Object value) {
+        attributes.put(name, value);
+    }
+
+    @Override
     public Object getAttribute(String name) {
-        return null;
+        return attributes.get(name);
     }
 }


[05/49] knox git commit: KNOX-1107 - Remote Configuration Registry Client Service (Phil Zampino via lmccay)

Posted by mo...@apache.org.
KNOX-1107 - Remote Configuration Registry Client Service (Phil Zampino via lmccay)

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/5af2413c
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/5af2413c
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/5af2413c

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 5af2413c4ea4593a081e4f5ad8ba6b2d3cf78e12
Parents: 24d51ad
Author: Larry McCay <lm...@hortonworks.com>
Authored: Mon Nov 27 12:46:35 2017 -0500
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Mon Nov 27 12:46:35 2017 -0500

----------------------------------------------------------------------
 gateway-server/pom.xml                          |   9 +
 .../apache/hadoop/gateway/GatewayMessages.java  |  40 +-
 .../gateway/config/impl/GatewayConfigImpl.java  |  47 +-
 .../gateway/services/CLIGatewayServices.java    |  10 +
 .../services/DefaultGatewayServices.java        |  14 +-
 .../topology/impl/DefaultTopologyService.java   |  38 +-
 .../DefaultConfigurationMonitorProvider.java    |  31 ++
 .../DefaultRemoteConfigurationMonitor.java      | 163 +++++++
 .../RemoteConfigurationMonitorFactory.java      |  74 ++++
 .../simple/SimpleDescriptorHandler.java         |   1 +
 ...y.monitor.RemoteConfigurationMonitorProvider |  19 +
 .../config/impl/GatewayConfigImplTest.java      |  28 ++
 .../topology/DefaultTopologyServiceTest.java    |  10 +-
 .../ZooKeeperConfigurationMonitorTest.java      | 355 ++++++++++++++++
 .../apache/hadoop/gateway/util/KnoxCLITest.java |  26 +-
 .../hadoop/gateway/websockets/BadUrlTest.java   |  11 +
 .../gateway/websockets/WebsocketEchoTest.java   |  11 +
 .../WebsocketMultipleConnectionTest.java        |  11 +
 gateway-service-remoteconfig/pom.xml            |  89 ++++
 .../remote/RemoteConfigurationMessages.java     |  46 ++
 ...nfigurationRegistryClientServiceFactory.java |  41 ++
 ...figurationRegistryClientServiceProvider.java |  27 ++
 .../RemoteConfigurationRegistryConfig.java      |  43 ++
 .../DefaultRemoteConfigurationRegistries.java   | 104 +++++
 .../config/RemoteConfigurationRegistries.java   |  33 ++
 .../RemoteConfigurationRegistriesAccessor.java  |  60 +++
 .../RemoteConfigurationRegistriesParser.java    |  48 +++
 .../config/RemoteConfigurationRegistry.java     | 139 ++++++
 .../config/remote/zk/CuratorClientService.java  | 423 ++++++++++++++++++
 .../RemoteConfigurationRegistryJAASConfig.java  | 169 ++++++++
 .../remote/zk/ZooKeeperClientService.java       |  25 ++
 .../zk/ZooKeeperClientServiceProvider.java      |  34 ++
 ...teConfigurationRegistryClientServiceProvider |  19 +
 ...efaultRemoteConfigurationRegistriesTest.java | 184 ++++++++
 ...teConfigurationRegistryConfigParserTest.java | 108 +++++
 .../util/RemoteRegistryConfigTestUtils.java     | 117 +++++
 ...eConfigurationRegistryClientServiceTest.java | 424 +++++++++++++++++++
 ...moteConfigurationRegistryJAASConfigTest.java | 255 +++++++++++
 .../hadoop/gateway/config/GatewayConfig.java    |  34 ++
 .../gateway/services/GatewayServices.java       |   2 +
 .../RemoteConfigurationRegistryClient.java      |  74 ++++
 ...emoteConfigurationRegistryClientService.java |  28 ++
 .../monitor/RemoteConfigurationMonitor.java     |  24 ++
 .../RemoteConfigurationMonitorProvider.java     |  34 ++
 .../hadoop/gateway/GatewayTestConfig.java       |  26 ++
 .../java/org/apache/hadoop/test/TestUtils.java  |   2 +-
 gateway-test/pom.xml                            |   6 +
 .../monitor/RemoteConfigurationMonitorTest.java | 397 +++++++++++++++++
 pom.xml                                         |  18 +-
 49 files changed, 3918 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-server/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-server/pom.xml b/gateway-server/pom.xml
index 0c05625..0a43584 100644
--- a/gateway-server/pom.xml
+++ b/gateway-server/pom.xml
@@ -188,6 +188,10 @@
             <artifactId>gateway-server-xforwarded-filter</artifactId>
         </dependency>
         <dependency>
+            <groupId>org.apache.knox</groupId>
+            <artifactId>gateway-service-remoteconfig</artifactId>
+        </dependency>
+        <dependency>
             <groupId>net.lingala.zip4j</groupId>
             <artifactId>zip4j</artifactId>
         </dependency>
@@ -316,6 +320,11 @@
             <artifactId>metrics-servlets</artifactId>
         </dependency>
 
+        <dependency>
+            <groupId>org.apache.curator</groupId>
+            <artifactId>curator-test</artifactId>
+            <scope>test</scope>
+        </dependency>
     </dependencies>
 
 </project>

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
index 4cb4c40..d78ef71 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
@@ -514,19 +514,57 @@ public interface GatewayMessages {
   void topologyPortMappingCannotFindTopology(final String topology, final int port);
 
 
+  @Message( level = MessageLevel.WARN, text = "There is no registry client defined for remote configuration monitoring." )
+  void missingClientConfigurationForRemoteMonitoring();
+
+  @Message( level = MessageLevel.WARN, text = "Could not resolve a remote configuration registry client for {0}." )
+  void unresolvedClientConfigurationForRemoteMonitoring(final String clientName);
+
   @Message( level = MessageLevel.INFO, text = "Monitoring simple descriptors in directory: {0}" )
   void monitoringDescriptorChangesInDirectory(String descriptorsDir);
 
-
   @Message( level = MessageLevel.INFO, text = "Monitoring shared provider configurations in directory: {0}" )
   void monitoringProviderConfigChangesInDirectory(String sharedProviderDir);
 
+  @Message( level = MessageLevel.ERROR, text = "Error registering listener for remote configuration path {0} : {1}" )
+  void errorAddingRemoteConfigurationListenerForPath(final String path,
+                                                     @StackTrace( level = MessageLevel.DEBUG ) Exception e);
+
+  @Message( level = MessageLevel.ERROR, text = "Error unregistering listener for remote configuration path {0} : {1}" )
+  void errorRemovingRemoteConfigurationListenerForPath(final String path,
+                                                       @StackTrace( level = MessageLevel.DEBUG ) Exception e);
+
+  @Message( level = MessageLevel.ERROR, text = "Error downloading remote configuration {0} : {1}" )
+  void errorDownloadingRemoteConfiguration(final String path,
+                                           @StackTrace( level = MessageLevel.DEBUG ) Exception e);
+
   @Message( level = MessageLevel.INFO, text = "Prevented deletion of shared provider configuration because there are referencing descriptors: {0}" )
   void preventedDeletionOfSharedProviderConfiguration(String providerConfigurationPath);
 
   @Message( level = MessageLevel.INFO, text = "Generated topology {0} because the associated descriptor {1} changed." )
   void generatedTopologyForDescriptorChange(String topologyName, String descriptorName);
 
+  @Message( level = MessageLevel.WARN, text = "An error occurred while attempting to initialize the remote configuration monitor: {0}" )
+  void remoteConfigurationMonitorInitFailure(final String errorMessage,
+                                             @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+  @Message( level = MessageLevel.WARN, text = "An error occurred while attempting to start the remote configuration monitor {0} : {1}" )
+  void remoteConfigurationMonitorStartFailure(final String monitorType,
+                                              final String errorMessage,
+                                              @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+  @Message( level = MessageLevel.INFO, text = "Starting remote configuration monitor for source {0} ..." )
+  void startingRemoteConfigurationMonitor(final String address);
+
+  @Message( level = MessageLevel.INFO, text = "Monitoring remote configuration source {0}" )
+  void monitoringRemoteConfigurationSource(final String address);
+
+  @Message( level = MessageLevel.INFO, text = "Remote configuration monitor downloaded {0} configuration file {1}" )
+  void downloadedRemoteConfigFile(final String type, final String configFileName);
+
+  @Message( level = MessageLevel.INFO, text = "Remote configuration monitor deleted {0} configuration file {1} based on remote change." )
+  void deletedRemoteConfigFile(final String type, final String configFileName);
+
   @Message( level = MessageLevel.ERROR, text = "An error occurred while processing {0} : {1}" )
   void simpleDescriptorHandlingError(final String simpleDesc,
                                      @StackTrace(level = MessageLevel.DEBUG) Exception e);

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
index 4202a18..17c2552 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
@@ -184,6 +184,8 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
   public static final String DEFAULT_DEPLOYMENT_DIR = "deployments";
   public static final String DEFAULT_SECURITY_DIR = "security";
   public static final String DEFAULT_DATA_DIR = "data";
+  private static final String PROVIDERCONFIG_DIR_NAME = "shared-providers";
+  private static final String DESCRIPTORS_DIR_NAME = "descriptors";
 
   /* Websocket defaults */
   public static final boolean DEFAULT_WEBSOCKET_FEATURE_ENABLED = false;
@@ -214,6 +216,10 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
   private static final String CRYPTO_KEY_LENGTH = GATEWAY_CONFIG_FILE_PREFIX + ".crypto.key.length";
   public static final String SERVER_HEADER_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".server.header.enabled";
 
+  /* @since 0.15 Remote configuration monitoring */
+  static final String CONFIG_REGISTRY_PREFIX = GATEWAY_CONFIG_FILE_PREFIX + ".remote.config.registry";
+  static final String REMOTE_CONFIG_MONITOR_CLIENT_NAME = GATEWAY_CONFIG_FILE_PREFIX + ".remote.config.monitor.client";
+
   private static List<String> DEFAULT_GLOBAL_RULES_SERVICES;
 
 
@@ -264,7 +270,7 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
     } else {
       dataDir = get(DATA_DIR, getGatewayHomeDir() + File.separator + DEFAULT_DATA_DIR);
     }
-    return dataDir;
+    return FilenameUtils.normalize(dataDir);
   }
 
   @Override
@@ -412,6 +418,16 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
   }
 
   @Override
+  public String getGatewayProvidersConfigDir() {
+    return getGatewayConfDir() + File.separator + PROVIDERCONFIG_DIR_NAME;
+  }
+
+  @Override
+  public String getGatewayDescriptorsDir() {
+    return getGatewayConfDir() + File.separator + DESCRIPTORS_DIR_NAME;
+  }
+
+  @Override
   public String getGatewayTopologyDir() {
     return getGatewayConfDir() + File.separator + "topologies";
   }
@@ -923,4 +939,33 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
   public boolean isGatewayServerHeaderEnabled() {
     return Boolean.parseBoolean(getVar(SERVER_HEADER_ENABLED, "true"));
   }
+
+  @Override
+  public List<String> getRemoteRegistryConfigurationNames() {
+    List<String> result = new ArrayList<>();
+
+    // Iterate over all the properties in this configuration
+    for (Map.Entry<String, String> entry : this) {
+      String propertyName = entry.getKey();
+
+      // Search for all the remote config registry properties
+      if (propertyName.startsWith(CONFIG_REGISTRY_PREFIX)) {
+        String registryName = propertyName.substring(CONFIG_REGISTRY_PREFIX.length() + 1);
+        result.add(registryName);
+      }
+    }
+
+    return result;
+  }
+
+  @Override
+  public String getRemoteRegistryConfiguration(String name) {
+    return get(CONFIG_REGISTRY_PREFIX + "." + name );
+  }
+
+  @Override
+  public String getRemoteConfigurationMonitorClientName() {
+    return get(REMOTE_CONFIG_MONITOR_CLIENT_NAME);
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-server/src/main/java/org/apache/hadoop/gateway/services/CLIGatewayServices.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/CLIGatewayServices.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/CLIGatewayServices.java
index 114aa83..74dc4d3 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/CLIGatewayServices.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/CLIGatewayServices.java
@@ -23,6 +23,8 @@ import org.apache.hadoop.gateway.deploy.DeploymentContext;
 import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
 import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
 import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceFactory;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
 import org.apache.hadoop.gateway.services.topology.impl.DefaultTopologyService;
 import org.apache.hadoop.gateway.services.security.impl.DefaultAliasService;
 import org.apache.hadoop.gateway.services.security.impl.DefaultCryptoService;
@@ -71,6 +73,12 @@ public class CLIGatewayServices implements GatewayServices {
     DefaultTopologyService tops = new DefaultTopologyService();
     tops.init(  config, options  );
     services.put(TOPOLOGY_SERVICE, tops);
+
+    RemoteConfigurationRegistryClientService registryClientService =
+                                                    RemoteConfigurationRegistryClientServiceFactory.newInstance(config);
+    registryClientService.setAliasService(alias);
+    registryClientService.init(config, options);
+    services.put(REMOTE_REGISTRY_CLIENT_SERVICE, registryClientService);
   }
   
   public void start() throws ServiceLifecycleException {
@@ -83,6 +91,8 @@ public class CLIGatewayServices implements GatewayServices {
 
     DefaultTopologyService tops = (DefaultTopologyService)services.get(TOPOLOGY_SERVICE);
     tops.start();
+
+    (services.get(REMOTE_REGISTRY_CLIENT_SERVICE)).start();
   }
 
   public void stop() throws ServiceLifecycleException {

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java
index 02ac154..9dca344 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java
@@ -23,6 +23,8 @@ import org.apache.hadoop.gateway.deploy.DeploymentContext;
 import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
 import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
 import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceFactory;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
 import org.apache.hadoop.gateway.services.registry.impl.DefaultServiceDefinitionRegistry;
 import org.apache.hadoop.gateway.services.metrics.impl.DefaultMetricsService;
 import org.apache.hadoop.gateway.services.topology.impl.DefaultTopologyService;
@@ -104,6 +106,12 @@ public class DefaultGatewayServices implements GatewayServices {
     sis.init( config, options );
     services.put( SERVER_INFO_SERVICE, sis );
 
+    RemoteConfigurationRegistryClientService registryClientService =
+                                                    RemoteConfigurationRegistryClientServiceFactory.newInstance(config);
+    registryClientService.setAliasService(alias);
+    registryClientService.init(config, options);
+    services.put(REMOTE_REGISTRY_CLIENT_SERVICE, registryClientService);
+
     DefaultTopologyService tops = new DefaultTopologyService();
     tops.setAliasService(alias);
     tops.init(  config, options  );
@@ -117,7 +125,7 @@ public class DefaultGatewayServices implements GatewayServices {
     metricsService.init( config, options );
     services.put( METRICS_SERVICE, metricsService );
   }
-  
+
   public void start() throws ServiceLifecycleException {
     ms.start();
 
@@ -132,6 +140,10 @@ public class DefaultGatewayServices implements GatewayServices {
     ServerInfoService sis = (ServerInfoService) services.get(SERVER_INFO_SERVICE);
     sis.start();
 
+    RemoteConfigurationRegistryClientService clientService =
+                            (RemoteConfigurationRegistryClientService)services.get(REMOTE_REGISTRY_CLIENT_SERVICE);
+    clientService.start();
+
     DefaultTopologyService tops = (DefaultTopologyService)services.get(TOPOLOGY_SERVICE);
     tops.start();
 

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
index 39e8029..5fc3620 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
@@ -46,6 +46,8 @@ import org.apache.hadoop.gateway.topology.TopologyListener;
 import org.apache.hadoop.gateway.topology.TopologyMonitor;
 import org.apache.hadoop.gateway.topology.TopologyProvider;
 import org.apache.hadoop.gateway.topology.builder.TopologyBuilder;
+import org.apache.hadoop.gateway.topology.monitor.RemoteConfigurationMonitor;
+import org.apache.hadoop.gateway.topology.monitor.RemoteConfigurationMonitorFactory;
 import org.apache.hadoop.gateway.topology.simple.SimpleDescriptorHandler;
 import org.apache.hadoop.gateway.topology.validation.TopologyValidator;
 import org.apache.hadoop.gateway.topology.xml.AmbariFormatXmlTopologyRules;
@@ -101,6 +103,7 @@ public class DefaultTopologyService
   private volatile Map<File, Topology> topologies;
   private AliasService aliasService;
 
+  private RemoteConfigurationMonitor remoteMonitor = null;
 
   private Topology loadTopology(File file) throws IOException, SAXException, URISyntaxException, InterruptedException {
     final long TIMEOUT = 250; //ms
@@ -214,6 +217,16 @@ public class DefaultTopologyService
     return events;
   }
 
+  private File calculateAbsoluteProvidersConfigDir(GatewayConfig config) {
+    File pcDir = new File(config.getGatewayProvidersConfigDir());
+    return pcDir.getAbsoluteFile();
+  }
+
+  private File calculateAbsoluteDescriptorsDir(GatewayConfig config) {
+    File descDir = new File(config.getGatewayDescriptorsDir());
+    return descDir.getAbsoluteFile();
+  }
+
   private File calculateAbsoluteTopologiesDir(GatewayConfig config) {
     File topoDir = new File(config.getGatewayTopologyDir());
     topoDir = topoDir.getAbsoluteFile();
@@ -221,7 +234,7 @@ public class DefaultTopologyService
   }
 
   private File calculateAbsoluteConfigDir(GatewayConfig config) {
-    File configDir = null;
+    File configDir;
 
     String path = config.getGatewayConfDir();
     configDir = (path != null) ? new File(path) : (new File(config.getGatewayTopologyDir())).getParentFile();
@@ -468,16 +481,32 @@ public class DefaultTopologyService
 
   @Override
   public void startMonitor() throws Exception {
+    // Start the local configuration monitors
     for (FileAlterationMonitor monitor : monitors) {
       monitor.start();
     }
+
+    // Start the remote configuration monitor, if it has been initialized
+    if (remoteMonitor != null) {
+      try {
+        remoteMonitor.start();
+      } catch (Exception e) {
+        log.remoteConfigurationMonitorStartFailure(remoteMonitor.getClass().getTypeName(), e.getLocalizedMessage(), e);
+      }
+    }
   }
 
   @Override
   public void stopMonitor() throws Exception {
+    // Stop the local configuration monitors
     for (FileAlterationMonitor monitor : monitors) {
       monitor.stop();
     }
+
+    // Stop the remote configuration monitor, if it has been initialized
+    if (remoteMonitor != null) {
+      remoteMonitor.stop();
+    }
   }
 
   @Override
@@ -532,7 +561,7 @@ public class DefaultTopologyService
   public void init(GatewayConfig config, Map<String, String> options) throws ServiceLifecycleException {
 
     try {
-      listeners = new HashSet<>();
+      listeners  = new HashSet<>();
       topologies = new HashMap<>();
 
       topologiesDirectory = calculateAbsoluteTopologiesDir(config);
@@ -567,6 +596,9 @@ public class DefaultTopologyService
           }
       }
 
+      // Initialize the remote configuration monitor, if it has been configured
+      remoteMonitor = RemoteConfigurationMonitorFactory.get(config);
+
     } catch (IOException | SAXException io) {
       throw new ServiceLifecycleException(io.getMessage());
     }
@@ -582,7 +614,7 @@ public class DefaultTopologyService
    * @return A List of the Files on the directory.
    */
   private static List<File> listFiles(File directory) {
-    List<File> result = null;
+    List<File> result;
     File[] files = directory.listFiles();
     if (files != null) {
       result = Arrays.asList(files);

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultConfigurationMonitorProvider.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultConfigurationMonitorProvider.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultConfigurationMonitorProvider.java
new file mode 100644
index 0000000..7b34e3d
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultConfigurationMonitorProvider.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.monitor;
+
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+
+
+public class DefaultConfigurationMonitorProvider implements RemoteConfigurationMonitorProvider {
+
+    @Override
+    public RemoteConfigurationMonitor newInstance(final GatewayConfig                            config,
+                                                  final RemoteConfigurationRegistryClientService clientService) {
+        return new DefaultRemoteConfigurationMonitor(config, clientService);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java
new file mode 100644
index 0000000..1dd71ac
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/DefaultRemoteConfigurationMonitor.java
@@ -0,0 +1,163 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.monitor;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.gateway.GatewayMessages;
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient.ChildEntryListener;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient.EntryListener;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+
+
+class DefaultRemoteConfigurationMonitor implements RemoteConfigurationMonitor {
+
+    private static final String NODE_KNOX = "/knox";
+    private static final String NODE_KNOX_CONFIG = NODE_KNOX + "/config";
+    private static final String NODE_KNOX_PROVIDERS = NODE_KNOX_CONFIG + "/shared-providers";
+    private static final String NODE_KNOX_DESCRIPTORS = NODE_KNOX_CONFIG + "/descriptors";
+
+    private static GatewayMessages log = MessagesFactory.get(GatewayMessages.class);
+
+    private RemoteConfigurationRegistryClient client = null;
+
+    private File providersDir;
+    private File descriptorsDir;
+
+    /**
+     * @param config                The gateway configuration
+     * @param registryClientService The service from which the remote registry client should be acquired.
+     */
+    DefaultRemoteConfigurationMonitor(GatewayConfig                            config,
+                                      RemoteConfigurationRegistryClientService registryClientService) {
+        this.providersDir   = new File(config.getGatewayProvidersConfigDir());
+        this.descriptorsDir = new File(config.getGatewayDescriptorsDir());
+
+        if (registryClientService != null) {
+            String clientName = config.getRemoteConfigurationMonitorClientName();
+            if (clientName != null) {
+                this.client = registryClientService.get(clientName);
+                if (this.client == null) {
+                    log.unresolvedClientConfigurationForRemoteMonitoring(clientName);
+                }
+            } else {
+                log.missingClientConfigurationForRemoteMonitoring();
+            }
+        }
+    }
+
+    @Override
+    public void start() throws Exception {
+        if (client == null) {
+            throw new IllegalStateException("Failed to acquire a remote configuration registry client.");
+        }
+
+        final String monitorSource = client.getAddress();
+        log.startingRemoteConfigurationMonitor(monitorSource);
+
+        // Confirm access to the remote provider configs directory znode
+        List<String> providerConfigs = client.listChildEntries(NODE_KNOX_PROVIDERS);
+        if (providerConfigs == null) {
+            // Either the ZNode does not exist, or there is an authentication problem
+            throw new IllegalStateException("Unable to access remote path: " + NODE_KNOX_PROVIDERS);
+        }
+
+        // Confirm access to the remote descriptors directory znode
+        List<String> descriptors = client.listChildEntries(NODE_KNOX_DESCRIPTORS);
+        if (descriptors == null) {
+            // Either the ZNode does not exist, or there is an authentication problem
+            throw new IllegalStateException("Unable to access remote path: " + NODE_KNOX_DESCRIPTORS);
+        }
+
+        // Register a listener for provider config znode additions/removals
+        client.addChildEntryListener(NODE_KNOX_PROVIDERS, new ConfigDirChildEntryListener(providersDir));
+
+        // Register a listener for descriptor znode additions/removals
+        client.addChildEntryListener(NODE_KNOX_DESCRIPTORS, new ConfigDirChildEntryListener(descriptorsDir));
+
+        log.monitoringRemoteConfigurationSource(monitorSource);
+    }
+
+
+    @Override
+    public void stop() throws Exception {
+    }
+
+
+    private static class ConfigDirChildEntryListener implements ChildEntryListener {
+        File localDir;
+
+        ConfigDirChildEntryListener(File localDir) {
+            this.localDir = localDir;
+        }
+
+        @Override
+        public void childEvent(RemoteConfigurationRegistryClient client, Type type, String path) {
+            File localFile = new File(localDir, path.substring(path.lastIndexOf("/") + 1));
+
+            switch (type) {
+                case REMOVED:
+                    FileUtils.deleteQuietly(localFile);
+                    log.deletedRemoteConfigFile(localDir.getName(), localFile.getName());
+                    try {
+                        client.removeEntryListener(path);
+                    } catch (Exception e) {
+                        log.errorRemovingRemoteConfigurationListenerForPath(path, e);
+                    }
+                    break;
+                case ADDED:
+                    try {
+                        client.addEntryListener(path, new ConfigEntryListener(localDir));
+                    } catch (Exception e) {
+                        log.errorAddingRemoteConfigurationListenerForPath(path, e);
+                    }
+                    break;
+            }
+        }
+    }
+
+    private static class ConfigEntryListener implements EntryListener {
+        private File localDir;
+
+        ConfigEntryListener(File localDir) {
+            this.localDir = localDir;
+        }
+
+        @Override
+        public void entryChanged(RemoteConfigurationRegistryClient client, String path, byte[] data) {
+            File localFile = new File(localDir, path.substring(path.lastIndexOf("/")));
+            if (data != null) {
+                try {
+                    FileUtils.writeByteArrayToFile(localFile, data);
+                    log.downloadedRemoteConfigFile(localDir.getName(), localFile.getName());
+                } catch (IOException e) {
+                    log.errorDownloadingRemoteConfiguration(path, e);
+                }
+            } else {
+                FileUtils.deleteQuietly(localFile);
+                log.deletedRemoteConfigFile(localDir.getName(), localFile.getName());
+            }
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorFactory.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorFactory.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorFactory.java
new file mode 100644
index 0000000..4d2df45
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorFactory.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.monitor;
+
+import org.apache.hadoop.gateway.GatewayMessages;
+import org.apache.hadoop.gateway.GatewayServer;
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.services.GatewayServices;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+
+import java.util.ServiceLoader;
+
+public class RemoteConfigurationMonitorFactory {
+
+    private static final GatewayMessages log = MessagesFactory.get(GatewayMessages.class);
+
+    private static RemoteConfigurationRegistryClientService remoteConfigRegistryClientService = null;
+
+    public static void setClientService(RemoteConfigurationRegistryClientService clientService) {
+        remoteConfigRegistryClientService = clientService;
+    }
+
+    private static RemoteConfigurationRegistryClientService getClientService() {
+        if (remoteConfigRegistryClientService == null) {
+            GatewayServices services = GatewayServer.getGatewayServices();
+            if (services != null) {
+                remoteConfigRegistryClientService = services.getService(GatewayServices.REMOTE_REGISTRY_CLIENT_SERVICE);
+            }
+        }
+
+        return remoteConfigRegistryClientService;
+    }
+
+    /**
+     *
+     * @param config The GatewayConfig
+     *
+     * @return The first RemoteConfigurationMonitor extension that is found.
+     */
+    public static RemoteConfigurationMonitor get(GatewayConfig config) {
+        RemoteConfigurationMonitor rcm = null;
+
+        ServiceLoader<RemoteConfigurationMonitorProvider> providers =
+                                                 ServiceLoader.load(RemoteConfigurationMonitorProvider.class);
+        for (RemoteConfigurationMonitorProvider provider : providers) {
+            try {
+                rcm = provider.newInstance(config, getClientService());
+                if (rcm != null) {
+                    break;
+                }
+            } catch (Exception e) {
+                log.remoteConfigurationMonitorInitFailure(e.getLocalizedMessage(), e);
+            }
+        }
+
+        return rcm;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
index 089925d..d1dc11d 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
@@ -185,6 +185,7 @@ public class SimpleDescriptorHandler {
 
             // Write the service declarations
             for (String serviceName : serviceNames) {
+                fw.write("\n");
                 fw.write("    <service>\n");
                 fw.write("        <role>" + serviceName + "</role>\n");
 

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-server/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.monitor.RemoteConfigurationMonitorProvider
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.monitor.RemoteConfigurationMonitorProvider b/gateway-server/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.monitor.RemoteConfigurationMonitorProvider
new file mode 100644
index 0000000..bd4023e
--- /dev/null
+++ b/gateway-server/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.monitor.RemoteConfigurationMonitorProvider
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.hadoop.gateway.topology.monitor.DefaultConfigurationMonitorProvider

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-server/src/test/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImplTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImplTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImplTest.java
index a9347f4..cd56f11 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImplTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImplTest.java
@@ -11,6 +11,10 @@ import static org.hamcrest.CoreMatchers.notNullValue;
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.Matchers.hasItems;
 import static org.hamcrest.Matchers.nullValue;
+import static org.junit.Assert.assertTrue;
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertFalse;
+import static org.testng.Assert.assertNotNull;
 
 /**
  * Licensed to the Apache Software Foundation (ASF) under one
@@ -217,4 +221,28 @@ public class GatewayConfigImplTest {
     assertThat( serverHeaderEnabled, is(false));
   }
 
+
+  @Test
+  public void testGetRemoteConfigurationRegistryNames() {
+    GatewayConfigImpl config = new GatewayConfigImpl();
+
+    List<String> registryNames = config.getRemoteRegistryConfigurationNames();
+    assertNotNull(registryNames);
+    assertTrue(registryNames.isEmpty());
+
+    config.set(GatewayConfigImpl.CONFIG_REGISTRY_PREFIX + ".test1",
+               "type=ZooKeeper;address=host1:2181;authType=digest;principal=itsme;credentialAlias=testAlias");
+    registryNames = config.getRemoteRegistryConfigurationNames();
+    assertNotNull(registryNames);
+    assertFalse(registryNames.isEmpty());
+    assertEquals(1, registryNames.size());
+
+    config.set(GatewayConfigImpl.CONFIG_REGISTRY_PREFIX + ".test2",
+               "type=ZooKeeper;address=host2:2181,host3:2181,host4:2181");
+    registryNames = config.getRemoteRegistryConfigurationNames();
+    assertNotNull(registryNames);
+    assertFalse(registryNames.isEmpty());
+    assertEquals(registryNames.size(), 2);
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java
index 2357ad6..e4a0eba 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java
@@ -96,6 +96,12 @@ public class DefaultTopologyServiceTest {
     File dir = createDir();
     File topologyDir = new File(dir, "topologies");
 
+    File descriptorsDir = new File(dir, "descriptors");
+    descriptorsDir.mkdirs();
+
+    File sharedProvidersDir = new File(dir, "shared-providers");
+    sharedProvidersDir.mkdirs();
+
     long time = topologyDir.lastModified();
     try {
       createFile(topologyDir, "one.xml", "org/apache/hadoop/gateway/topology/file/topology-one.xml", time);
@@ -108,7 +114,9 @@ public class DefaultTopologyServiceTest {
 
       GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
       EasyMock.expect(config.getGatewayTopologyDir()).andReturn(topologyDir.getAbsolutePath()).anyTimes();
-      EasyMock.expect(config.getGatewayConfDir()).andReturn(topologyDir.getParentFile().getAbsolutePath()).anyTimes();
+      EasyMock.expect(config.getGatewayConfDir()).andReturn(descriptorsDir.getParentFile().getAbsolutePath()).anyTimes();
+      EasyMock.expect(config.getGatewayProvidersConfigDir()).andReturn(sharedProvidersDir.getAbsolutePath()).anyTimes();
+      EasyMock.expect(config.getGatewayDescriptorsDir()).andReturn(descriptorsDir.getAbsolutePath()).anyTimes();
       EasyMock.replay(config);
 
       provider.init(config, c);

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/monitor/ZooKeeperConfigurationMonitorTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/monitor/ZooKeeperConfigurationMonitorTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/monitor/ZooKeeperConfigurationMonitorTest.java
new file mode 100644
index 0000000..1c4ed6e
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/monitor/ZooKeeperConfigurationMonitorTest.java
@@ -0,0 +1,355 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.monitor;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.apache.curator.test.InstanceSpec;
+import org.apache.curator.test.TestingCluster;
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.service.config.remote.zk.ZooKeeperClientService;
+import org.apache.hadoop.gateway.service.config.remote.zk.ZooKeeperClientServiceProvider;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.test.TestUtils;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.ZooDefs;
+import org.apache.zookeeper.data.ACL;
+import org.easymock.EasyMock;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/**
+ * Test the ZooKeeperConfigMonitor WITHOUT SASL configured or znode ACLs applied.
+ * The implementation of the monitor is the same regardless, since the ACLs are defined by the ZooKeeper znode
+ * creator, and the SASL config is purely JAAS (and external to the implementation).
+ */
+public class ZooKeeperConfigurationMonitorTest {
+
+    private static final String PATH_KNOX = "/knox";
+    private static final String PATH_KNOX_CONFIG = PATH_KNOX + "/config";
+    private static final String PATH_KNOX_PROVIDERS = PATH_KNOX_CONFIG + "/shared-providers";
+    private static final String PATH_KNOX_DESCRIPTORS = PATH_KNOX_CONFIG + "/descriptors";
+
+    private static File testTmp;
+    private static File providersDir;
+    private static File descriptorsDir;
+
+    private static TestingCluster zkCluster;
+
+    private static CuratorFramework client;
+
+    private GatewayConfig gc;
+
+
+    @BeforeClass
+    public static void setupSuite() throws Exception {
+        testTmp = TestUtils.createTempDir(ZooKeeperConfigurationMonitorTest.class.getName());
+        File confDir   = TestUtils.createTempDir(testTmp + "/conf");
+        providersDir   = TestUtils.createTempDir(confDir + "/shared-providers");
+        descriptorsDir = TestUtils.createTempDir(confDir + "/descriptors");
+
+        configureAndStartZKCluster();
+    }
+
+    private static void configureAndStartZKCluster() throws Exception {
+        // Configure security for the ZK cluster instances
+        Map<String, Object> customInstanceSpecProps = new HashMap<>();
+        customInstanceSpecProps.put("authProvider.1", "org.apache.zookeeper.server.auth.SASLAuthenticationProvider");
+        customInstanceSpecProps.put("requireClientAuthScheme", "sasl");
+
+        // Define the test cluster
+        List<InstanceSpec> instanceSpecs = new ArrayList<>();
+        for (int i = 0 ; i < 3 ; i++) {
+            InstanceSpec is = new InstanceSpec(null, -1, -1, -1, false, (i+1), -1, -1, customInstanceSpecProps);
+            instanceSpecs.add(is);
+        }
+        zkCluster = new TestingCluster(instanceSpecs);
+
+        // Start the cluster
+        zkCluster.start();
+
+        // Create the client for the test cluster
+        client = CuratorFrameworkFactory.builder()
+                                        .connectString(zkCluster.getConnectString())
+                                        .retryPolicy(new ExponentialBackoffRetry(100, 3))
+                                        .build();
+        assertNotNull(client);
+        client.start();
+
+        // Create the knox config paths with an ACL for the sasl user configured for the client
+        List<ACL> acls = new ArrayList<>();
+        acls.add(new ACL(ZooDefs.Perms.ALL, ZooDefs.Ids.ANYONE_ID_UNSAFE));
+
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_DESCRIPTORS);
+        assertNotNull("Failed to create node:" + PATH_KNOX_DESCRIPTORS,
+                client.checkExists().forPath(PATH_KNOX_DESCRIPTORS));
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_PROVIDERS);
+        assertNotNull("Failed to create node:" + PATH_KNOX_PROVIDERS,
+                client.checkExists().forPath(PATH_KNOX_PROVIDERS));
+    }
+
+    @AfterClass
+    public static void tearDownSuite() throws Exception {
+        // Clean up the ZK nodes, and close the client
+        if (client != null) {
+            client.delete().deletingChildrenIfNeeded().forPath(PATH_KNOX);
+            client.close();
+        }
+
+        // Shutdown the ZK cluster
+        zkCluster.close();
+
+        // Delete the working dir
+        testTmp.delete();
+    }
+
+    @Test
+    public void testZooKeeperConfigMonitor() throws Exception {
+        String configMonitorName = "remoteConfigMonitorClient";
+
+        // Setup the base GatewayConfig mock
+        gc = EasyMock.createNiceMock(GatewayConfig.class);
+        EasyMock.expect(gc.getGatewayProvidersConfigDir()).andReturn(providersDir.getAbsolutePath()).anyTimes();
+        EasyMock.expect(gc.getGatewayDescriptorsDir()).andReturn(descriptorsDir.getAbsolutePath()).anyTimes();
+        EasyMock.expect(gc.getRemoteRegistryConfigurationNames())
+                .andReturn(Collections.singletonList(configMonitorName))
+                .anyTimes();
+        final String registryConfig =
+                                GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
+                                GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString();
+        EasyMock.expect(gc.getRemoteRegistryConfiguration(configMonitorName))
+                .andReturn(registryConfig)
+                .anyTimes();
+        EasyMock.expect(gc.getRemoteConfigurationMonitorClientName()).andReturn(configMonitorName).anyTimes();
+        EasyMock.replay(gc);
+
+        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
+        EasyMock.replay(aliasService);
+
+        RemoteConfigurationRegistryClientService clientService = (new ZooKeeperClientServiceProvider()).newInstance();
+        clientService.setAliasService(aliasService);
+        clientService.init(gc, Collections.emptyMap());
+        clientService.start();
+
+        DefaultRemoteConfigurationMonitor cm = new DefaultRemoteConfigurationMonitor(gc, clientService);
+
+        try {
+            cm.start();
+        } catch (Exception e) {
+            fail("Failed to start monitor: " + e.getMessage());
+        }
+
+        try {
+            final String pc_one_znode = getProviderPath("providers-config1.xml");
+            final File pc_one         = new File(providersDir, "providers-config1.xml");
+            final String pc_two_znode = getProviderPath("providers-config2.xml");
+            final File pc_two         = new File(providersDir, "providers-config2.xml");
+
+            client.create().withMode(CreateMode.PERSISTENT).forPath(pc_one_znode, TEST_PROVIDERS_CONFIG_1.getBytes());
+            Thread.sleep(100);
+            assertTrue(pc_one.exists());
+            assertEquals(TEST_PROVIDERS_CONFIG_1, FileUtils.readFileToString(pc_one));
+
+            client.create().withMode(CreateMode.PERSISTENT).forPath(getProviderPath("providers-config2.xml"), TEST_PROVIDERS_CONFIG_2.getBytes());
+            Thread.sleep(100);
+            assertTrue(pc_two.exists());
+            assertEquals(TEST_PROVIDERS_CONFIG_2, FileUtils.readFileToString(pc_two));
+
+            client.setData().forPath(pc_two_znode, TEST_PROVIDERS_CONFIG_1.getBytes());
+            Thread.sleep(100);
+            assertTrue(pc_two.exists());
+            assertEquals(TEST_PROVIDERS_CONFIG_1, FileUtils.readFileToString(pc_two));
+
+            client.delete().forPath(pc_two_znode);
+            Thread.sleep(100);
+            assertFalse(pc_two.exists());
+
+            client.delete().forPath(pc_one_znode);
+            Thread.sleep(100);
+            assertFalse(pc_one.exists());
+
+            final String desc_one_znode   = getDescriptorPath("test1.json");
+            final String desc_two_znode   = getDescriptorPath("test2.json");
+            final String desc_three_znode = getDescriptorPath("test3.json");
+            final File desc_one           = new File(descriptorsDir, "test1.json");
+            final File desc_two           = new File(descriptorsDir, "test2.json");
+            final File desc_three         = new File(descriptorsDir, "test3.json");
+
+            client.create().withMode(CreateMode.PERSISTENT).forPath(desc_one_znode, TEST_DESCRIPTOR_1.getBytes());
+            Thread.sleep(100);
+            assertTrue(desc_one.exists());
+            assertEquals(TEST_DESCRIPTOR_1, FileUtils.readFileToString(desc_one));
+
+            client.create().withMode(CreateMode.PERSISTENT).forPath(desc_two_znode, TEST_DESCRIPTOR_1.getBytes());
+            Thread.sleep(100);
+            assertTrue(desc_two.exists());
+            assertEquals(TEST_DESCRIPTOR_1, FileUtils.readFileToString(desc_two));
+
+            client.setData().forPath(desc_two_znode, TEST_DESCRIPTOR_2.getBytes());
+            Thread.sleep(100);
+            assertTrue(desc_two.exists());
+            assertEquals(TEST_DESCRIPTOR_2, FileUtils.readFileToString(desc_two));
+
+            client.create().withMode(CreateMode.PERSISTENT).forPath(desc_three_znode, TEST_DESCRIPTOR_1.getBytes());
+            Thread.sleep(100);
+            assertTrue(desc_three.exists());
+            assertEquals(TEST_DESCRIPTOR_1, FileUtils.readFileToString(desc_three));
+
+            client.delete().forPath(desc_two_znode);
+            Thread.sleep(100);
+            assertFalse("Expected test2.json to have been deleted.", desc_two.exists());
+
+            client.delete().forPath(desc_three_znode);
+            Thread.sleep(100);
+            assertFalse(desc_three.exists());
+
+            client.delete().forPath(desc_one_znode);
+            Thread.sleep(100);
+            assertFalse(desc_one.exists());
+        } finally {
+            cm.stop();
+        }
+    }
+
+    private static String getDescriptorPath(String descriptorName) {
+        return PATH_KNOX_DESCRIPTORS + "/" + descriptorName;
+    }
+
+    private static String getProviderPath(String providerConfigName) {
+        return PATH_KNOX_PROVIDERS + "/" + providerConfigName;
+    }
+
+
+    private static final String TEST_PROVIDERS_CONFIG_1 =
+            "<gateway>\n" +
+            "    <provider>\n" +
+            "        <role>identity-assertion</role>\n" +
+            "        <name>Default</name>\n" +
+            "        <enabled>true</enabled>\n" +
+            "    </provider>\n" +
+            "    <provider>\n" +
+            "        <role>hostmap</role>\n" +
+            "        <name>static</name>\n" +
+            "        <enabled>true</enabled>\n" +
+            "        <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" +
+            "    </provider>\n" +
+            "</gateway>\n";
+
+    private static final String TEST_PROVIDERS_CONFIG_2 =
+            "<gateway>\n" +
+            "    <provider>\n" +
+            "        <role>authentication</role>\n" +
+            "        <name>ShiroProvider</name>\n" +
+            "        <enabled>true</enabled>\n" +
+            "        <param>\n" +
+            "            <name>sessionTimeout</name>\n" +
+            "            <value>30</value>\n" +
+            "        </param>\n" +
+            "        <param>\n" +
+            "            <name>main.ldapRealm</name>\n" +
+            "            <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>\n" +
+            "        </param>\n" +
+            "        <param>\n" +
+            "            <name>main.ldapContextFactory</name>\n" +
+            "            <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
+            "        </param>\n" +
+            "        <param>\n" +
+            "            <name>main.ldapRealm.contextFactory</name>\n" +
+            "            <value>$ldapContextFactory</value>\n" +
+            "        </param>\n" +
+            "        <param>\n" +
+            "            <name>main.ldapRealm.userDnTemplate</name>\n" +
+            "            <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
+            "        </param>\n" +
+            "        <param>\n" +
+            "            <name>main.ldapRealm.contextFactory.url</name>\n" +
+            "            <value>ldap://localhost:33389</value>\n" +
+            "        </param>\n" +
+            "        <param>\n" +
+            "            <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
+            "            <value>simple</value>\n" +
+            "        </param>\n" +
+            "        <param>\n" +
+            "            <name>urls./**</name>\n" +
+            "            <value>authcBasic</value>\n" +
+            "        </param>\n" +
+            "    </provider>\n" +
+            "</gateway>\n";
+
+    private static final String TEST_DESCRIPTOR_1 =
+            "{\n" +
+            "  \"discovery-type\":\"AMBARI\",\n" +
+            "  \"discovery-address\":\"http://sandbox.hortonworks.com:8080\",\n" +
+            "  \"discovery-user\":\"maria_dev\",\n" +
+            "  \"discovery-pwd-alias\":\"sandbox.ambari.discovery.password\",\n" +
+            "  \"provider-config-ref\":\"sandbox-providers.xml\",\n" +
+            "  \"cluster\":\"Sandbox\",\n" +
+            "  \"services\":[\n" +
+            "    {\"name\":\"NODEUI\"},\n" +
+            "    {\"name\":\"YARNUI\"},\n" +
+            "    {\"name\":\"HDFSUI\"},\n" +
+            "    {\"name\":\"OOZIEUI\"},\n" +
+            "    {\"name\":\"HBASEUI\"},\n" +
+            "    {\"name\":\"NAMENODE\"},\n" +
+            "    {\"name\":\"JOBTRACKER\"},\n" +
+            "    {\"name\":\"WEBHDFS\"},\n" +
+            "    {\"name\":\"WEBHCAT\"},\n" +
+            "    {\"name\":\"OOZIE\"},\n" +
+            "    {\"name\":\"WEBHBASE\"},\n" +
+            "    {\"name\":\"RESOURCEMANAGER\"},\n" +
+            "    {\"name\":\"AMBARI\", \"urls\":[\"http://c6401.ambari.apache.org:8080\"]},\n" +
+            "    {\"name\":\"AMBARIUI\", \"urls\":[\"http://c6401.ambari.apache.org:8080\"]}\n" +
+            "  ]\n" +
+            "}\n";
+
+    private static final String TEST_DESCRIPTOR_2 =
+            "{\n" +
+            "  \"discovery-type\":\"AMBARI\",\n" +
+            "  \"discovery-address\":\"http://sandbox.hortonworks.com:8080\",\n" +
+            "  \"discovery-user\":\"maria_dev\",\n" +
+            "  \"discovery-pwd-alias\":\"sandbox.ambari.discovery.password\",\n" +
+            "  \"provider-config-ref\":\"sandbox-providers.xml\",\n" +
+            "  \"cluster\":\"Sandbox\",\n" +
+            "  \"services\":[\n" +
+            "    {\"name\":\"NAMENODE\"},\n" +
+            "    {\"name\":\"JOBTRACKER\"},\n" +
+            "    {\"name\":\"WEBHDFS\"},\n" +
+            "    {\"name\":\"WEBHCAT\"},\n" +
+            "    {\"name\":\"OOZIE\"},\n" +
+            "    {\"name\":\"WEBHBASE\"},\n" +
+            "    {\"name\":\"RESOURCEMANAGER\"}\n" +
+            "  ]\n" +
+            "}\n";
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-server/src/test/java/org/apache/hadoop/gateway/util/KnoxCLITest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/util/KnoxCLITest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/util/KnoxCLITest.java
index 0352fa3..838f114 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/util/KnoxCLITest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/util/KnoxCLITest.java
@@ -20,8 +20,11 @@ package org.apache.hadoop.gateway.util;
 import com.mycila.xmltool.XMLDoc;
 import com.mycila.xmltool.XMLTag;
 import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.gateway.config.impl.GatewayConfigImpl;
 import org.apache.hadoop.gateway.services.GatewayServices;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
 import org.apache.hadoop.gateway.services.security.AliasService;
 import org.apache.hadoop.gateway.services.security.MasterService;
 import org.junit.Before;
@@ -42,6 +45,7 @@ import static org.hamcrest.CoreMatchers.notNullValue;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertThat;
 import static org.junit.Assert.assertTrue;
 
@@ -60,7 +64,27 @@ public class KnoxCLITest {
   }
 
   @Test
-  public void testSuccessfulAlaisLifecycle() throws Exception {
+  public void testRemoteConfigurationRegistryClientService() throws Exception {
+    outContent.reset();
+    KnoxCLI cli = new KnoxCLI();
+    Configuration config = new GatewayConfigImpl();
+    config.set("gateway.remote.config.registry.test_client", "type=ZooKeeper;address=localhost:2181");
+    cli.setConf(config);
+
+    // This is only to get the gateway services initialized
+    cli.run(new String[]{"version"});
+
+    RemoteConfigurationRegistryClientService service =
+                                   cli.getGatewayServices().getService(GatewayServices.REMOTE_REGISTRY_CLIENT_SERVICE);
+    assertNotNull(service);
+    RemoteConfigurationRegistryClient client = service.get("test_client");
+    assertNotNull(client);
+
+    assertNull(service.get("bogus"));
+  }
+
+  @Test
+  public void testSuccessfulAliasLifecycle() throws Exception {
     outContent.reset();
     String[] args1 = {"create-alias", "alias1", "--value", "testvalue1", "--master", "master"};
     int rc = 0;

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/BadUrlTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/BadUrlTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/BadUrlTest.java
index 559b2a1..c465585 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/BadUrlTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/BadUrlTest.java
@@ -23,6 +23,7 @@ import java.io.IOException;
 import java.net.URI;
 import java.net.URL;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -205,6 +206,12 @@ public class BadUrlTest {
     EasyMock.expect(gatewayConfig.getGatewayTopologyDir())
         .andReturn(topoDir.toString()).anyTimes();
 
+    EasyMock.expect(gatewayConfig.getGatewayProvidersConfigDir())
+            .andReturn(topoDir.getAbsolutePath() + "/shared-providers").anyTimes();
+
+    EasyMock.expect(gatewayConfig.getGatewayDescriptorsDir())
+            .andReturn(topoDir.getAbsolutePath() + "/descriptors").anyTimes();
+
     EasyMock.expect(gatewayConfig.getGatewayServicesDir())
         .andReturn(serviceUrl.getFile()).anyTimes();
 
@@ -247,6 +254,10 @@ public class BadUrlTest {
     EasyMock.expect(gatewayConfig.getWebsocketIdleTimeout())
         .andReturn(GatewayConfigImpl.DEFAULT_WEBSOCKET_IDLE_TIMEOUT).anyTimes();
 
+    EasyMock.expect(gatewayConfig.getRemoteRegistryConfigurationNames())
+            .andReturn(Collections.emptyList())
+            .anyTimes();
+
     EasyMock.replay(gatewayConfig);
 
     try {

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/WebsocketEchoTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/WebsocketEchoTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/WebsocketEchoTest.java
index 4b0fe08..5d5f280 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/WebsocketEchoTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/WebsocketEchoTest.java
@@ -26,6 +26,7 @@ import java.io.IOException;
 import java.net.URI;
 import java.net.URL;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -290,6 +291,12 @@ public class WebsocketEchoTest {
     EasyMock.expect(gatewayConfig.getGatewayTopologyDir())
         .andReturn(topoDir.toString()).anyTimes();
 
+    EasyMock.expect(gatewayConfig.getGatewayProvidersConfigDir())
+            .andReturn(topoDir.getAbsolutePath() + "/shared-providers").anyTimes();
+
+    EasyMock.expect(gatewayConfig.getGatewayDescriptorsDir())
+            .andReturn(topoDir.getAbsolutePath() + "/descriptors").anyTimes();
+
     EasyMock.expect(gatewayConfig.getGatewayServicesDir())
         .andReturn(serviceUrl.getFile()).anyTimes();
 
@@ -332,6 +339,10 @@ public class WebsocketEchoTest {
     EasyMock.expect(gatewayConfig.getWebsocketIdleTimeout())
         .andReturn(GatewayConfigImpl.DEFAULT_WEBSOCKET_IDLE_TIMEOUT).anyTimes();
 
+    EasyMock.expect(gatewayConfig.getRemoteRegistryConfigurationNames())
+            .andReturn(Collections.emptyList())
+            .anyTimes();
+
     EasyMock.replay(gatewayConfig);
 
     try {

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/WebsocketMultipleConnectionTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/WebsocketMultipleConnectionTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/WebsocketMultipleConnectionTest.java
index 676c98c..7ddada2 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/WebsocketMultipleConnectionTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/WebsocketMultipleConnectionTest.java
@@ -25,6 +25,7 @@ import java.lang.management.MemoryMXBean;
 import java.net.URI;
 import java.net.URL;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
@@ -283,6 +284,12 @@ public class WebsocketMultipleConnectionTest {
     EasyMock.expect(gatewayConfig.getGatewayTopologyDir())
         .andReturn(topoDir.toString()).anyTimes();
 
+    EasyMock.expect(gatewayConfig.getGatewayProvidersConfigDir())
+            .andReturn(topoDir.getAbsolutePath() + "/shared-providers").anyTimes();
+
+    EasyMock.expect(gatewayConfig.getGatewayDescriptorsDir())
+            .andReturn(topoDir.getAbsolutePath() + "/descriptors").anyTimes();
+
     EasyMock.expect(gatewayConfig.getGatewayServicesDir())
         .andReturn(serviceUrl.getFile()).anyTimes();
 
@@ -325,6 +332,10 @@ public class WebsocketMultipleConnectionTest {
     EasyMock.expect(gatewayConfig.getWebsocketIdleTimeout())
         .andReturn(GatewayConfigImpl.DEFAULT_WEBSOCKET_IDLE_TIMEOUT).anyTimes();
 
+    EasyMock.expect(gatewayConfig.getRemoteRegistryConfigurationNames())
+            .andReturn(Collections.emptyList())
+            .anyTimes();
+
     EasyMock.replay(gatewayConfig);
 
     try {

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-service-remoteconfig/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/pom.xml b/gateway-service-remoteconfig/pom.xml
new file mode 100644
index 0000000..8d06360
--- /dev/null
+++ b/gateway-service-remoteconfig/pom.xml
@@ -0,0 +1,89 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.knox</groupId>
+        <artifactId>gateway</artifactId>
+        <version>1.0.0-SNAPSHOT</version>
+    </parent>
+    <artifactId>gateway-service-remoteconfig</artifactId>
+
+    <name>gateway-service-remoteconfig</name>
+    <description>The gateway service for interacting with remote configuration registries.</description>
+
+    <licenses>
+        <license>
+            <name>The Apache Software License, Version 2.0</name>
+            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+            <distribution>repo</distribution>
+        </license>
+    </licenses>
+
+    <dependencies>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-spi</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.zookeeper</groupId>
+            <artifactId>zookeeper</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.curator</groupId>
+            <artifactId>curator-framework</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.curator</groupId>
+            <artifactId>curator-client</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-test-utils</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.easymock</groupId>
+            <artifactId>easymock</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.curator</groupId>
+            <artifactId>curator-test</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.curator</groupId>
+            <artifactId>curator-test</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+    </dependencies>
+
+</project>

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationMessages.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationMessages.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationMessages.java
new file mode 100644
index 0000000..22e622d
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationMessages.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.config.remote;
+
+import org.apache.hadoop.gateway.i18n.messages.Message;
+import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
+import org.apache.hadoop.gateway.i18n.messages.Messages;
+import org.apache.hadoop.gateway.i18n.messages.StackTrace;
+
+
+/**
+ *
+ */
+@Messages(logger="org.apache.hadoop.gateway.service.config.remote")
+public interface RemoteConfigurationMessages {
+
+    @Message(level = MessageLevel.WARN,
+             text = "Multiple remote configuration registries are not currently supported if any of them requires authentication.")
+    void multipleRemoteRegistryConfigurations();
+
+    @Message(level = MessageLevel.ERROR, text = "Failed to resolve the credential alias {0}")
+    void unresolvedCredentialAlias(final String alias);
+
+    @Message(level = MessageLevel.ERROR, text = "An error occurred interacting with the remote configuration registry : {0}")
+    void errorInteractingWithRemoteConfigRegistry(@StackTrace(level = MessageLevel.DEBUG) Exception e);
+
+    @Message(level = MessageLevel.ERROR, text = "An error occurred handling the ACL for remote configuration {0} : {1}")
+    void errorHandlingRemoteConfigACL(final String path,
+                                      @StackTrace(level = MessageLevel.DEBUG) Exception e);
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceFactory.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceFactory.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceFactory.java
new file mode 100644
index 0000000..cd58e22
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceFactory.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.config.remote;
+
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+
+import java.util.ServiceLoader;
+
+public class RemoteConfigurationRegistryClientServiceFactory {
+
+    public static RemoteConfigurationRegistryClientService newInstance(GatewayConfig config) {
+        RemoteConfigurationRegistryClientService rcs = null;
+
+        ServiceLoader<RemoteConfigurationRegistryClientServiceProvider> providers =
+                                             ServiceLoader.load(RemoteConfigurationRegistryClientServiceProvider.class);
+        for (RemoteConfigurationRegistryClientServiceProvider provider : providers) {
+            rcs = provider.newInstance();
+            if (rcs != null) {
+                break;
+            }
+        }
+
+        return rcs;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceProvider.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceProvider.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceProvider.java
new file mode 100644
index 0000000..ddfc392
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceProvider.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.config.remote;
+
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+
+public interface RemoteConfigurationRegistryClientServiceProvider {
+
+    String getType();
+
+    RemoteConfigurationRegistryClientService newInstance();
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryConfig.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryConfig.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryConfig.java
new file mode 100644
index 0000000..6409250
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryConfig.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.config.remote;
+
+public interface RemoteConfigurationRegistryConfig {
+
+    String getName();
+
+    String getRegistryType();
+
+    String getConnectionString();
+
+    String getNamespace();
+
+    boolean isSecureRegistry();
+
+    String getAuthType(); // digest, kerberos, etc...
+
+    String getPrincipal();
+
+    String getCredentialAlias();
+
+    String getKeytab();
+
+    boolean isUseTicketCache();
+
+    boolean isUseKeyTab();
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistries.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistries.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistries.java
new file mode 100644
index 0000000..ebcae1b
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistries.java
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.config.remote.config;
+
+import org.apache.hadoop.gateway.config.GatewayConfig;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * A set of RemoteConfigurationRegistry configurations based on a set of property name-value pairs.
+ */
+class DefaultRemoteConfigurationRegistries extends RemoteConfigurationRegistries {
+
+    private static final String PROPERTY_DELIM       = ";";
+    private static final String PROPERTY_VALUE_DELIM = "=";
+
+    private List<RemoteConfigurationRegistry> configuredRegistries = new ArrayList<>();
+
+    /**
+     * Derive the remote registry configurations from the specified GatewayConfig.
+     *
+     * @param gc The source GatewayConfig
+     */
+    DefaultRemoteConfigurationRegistries(GatewayConfig gc) {
+        List<String> configRegistryNames = gc.getRemoteRegistryConfigurationNames();
+        for (String configRegistryName : configRegistryNames) {
+            configuredRegistries.add(extractConfigForRegistry(gc, configRegistryName));
+        }
+    }
+
+    /**
+     * Extract the configuration for the specified registry configuration name.
+     *
+     * @param gc           The GatewayConfig from which to extract the registry config.
+     * @param registryName The name of the registry config.
+     *
+     * @return The resulting RemoteConfigurationRegistry object, or null.
+     */
+    private static RemoteConfigurationRegistry extractConfigForRegistry(GatewayConfig gc, String registryName) {
+        RemoteConfigurationRegistry result = new RemoteConfigurationRegistry();
+
+        result.setName(registryName);
+
+        Map<String, String> properties = parsePropertyValue(gc.getRemoteRegistryConfiguration(registryName));
+
+        result.setRegistryType(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE));
+        result.setConnectionString(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS));
+        result.setNamespace(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_NAMESPACE));
+        result.setAuthType(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE));
+        result.setPrincipal(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL));
+        result.setCredentialAlias(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS));
+        result.setKeytab(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_KEYTAB));
+        result.setUseKeytab(Boolean.valueOf(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_KEYTAB)));
+        result.setUseTicketCache(Boolean.valueOf(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_TICKET_CACHE)));
+
+        return result;
+    }
+
+    /**
+     * Parse the specified registry config properties String.
+     *
+     * @param value The property value content from GatewayConfig.
+     *
+     * @return A Map of the parsed properties and their respective values.
+     */
+    private static Map<String, String> parsePropertyValue(final String value) {
+        Map<String, String> result = new HashMap<>();
+
+        if (value != null) {
+            String[] props = value.split(PROPERTY_DELIM);
+            for (String prop : props) {
+                String[] split = prop.split(PROPERTY_VALUE_DELIM);
+                String propName  = split[0];
+                String propValue = (split.length > 1) ? split[1] : null;
+                result.put(propName, propValue);
+            }
+        }
+
+        return result;
+    }
+
+    @Override
+    List<RemoteConfigurationRegistry> getRegistryConfigurations() {
+        return configuredRegistries;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistries.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistries.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistries.java
new file mode 100644
index 0000000..fa045c0
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistries.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.config.remote.config;
+
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.util.ArrayList;
+import java.util.List;
+
+@XmlRootElement(name="remote-configuration-registries")
+class RemoteConfigurationRegistries {
+
+    private List<RemoteConfigurationRegistry> registryConfigurations = new ArrayList<>();
+
+    @XmlElement(name="remote-configuration-registry")
+    List<RemoteConfigurationRegistry> getRegistryConfigurations() {
+        return registryConfigurations;
+    }
+}


[07/49] knox git commit: KNOX-1119 - Pac4J OAuth/OpenID Principal Needs to be Configurable (Andreas Hildebrandt via lmccay)

Posted by mo...@apache.org.
KNOX-1119 - Pac4J OAuth/OpenID Principal Needs to be Configurable (Andreas Hildebrandt via lmccay)

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/6474b61b
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/6474b61b
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/6474b61b

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 6474b61be2a106f0debd4bd274782d10bbb298e2
Parents: eb7d142
Author: Larry McCay <lm...@hortonworks.com>
Authored: Tue Nov 28 23:16:26 2017 -0500
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Tue Nov 28 23:16:26 2017 -0500

----------------------------------------------------------------------
 .../gateway/pac4j/filter/Pac4jIdentityAdapter.java | 17 ++++++++++++++++-
 .../hadoop/gateway/pac4j/Pac4jProviderTest.java    |  2 +-
 2 files changed, 17 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/6474b61b/gateway-provider-security-pac4j/src/main/java/org/apache/hadoop/gateway/pac4j/filter/Pac4jIdentityAdapter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-pac4j/src/main/java/org/apache/hadoop/gateway/pac4j/filter/Pac4jIdentityAdapter.java b/gateway-provider-security-pac4j/src/main/java/org/apache/hadoop/gateway/pac4j/filter/Pac4jIdentityAdapter.java
index dfbd8ca..1ec0491 100644
--- a/gateway-provider-security-pac4j/src/main/java/org/apache/hadoop/gateway/pac4j/filter/Pac4jIdentityAdapter.java
+++ b/gateway-provider-security-pac4j/src/main/java/org/apache/hadoop/gateway/pac4j/filter/Pac4jIdentityAdapter.java
@@ -46,6 +46,8 @@ public class Pac4jIdentityAdapter implements Filter {
 
   private static final Logger logger = LoggerFactory.getLogger(Pac4jIdentityAdapter.class);
 
+  public static final String PAC4J_ID_ATTRIBUTE = "pac4j.id_attribute";
+
   private static AuditService auditService = AuditServiceFactory.getAuditService();
   private static Auditor auditor = auditService.getAuditor(
       AuditConstants.DEFAULT_AUDITOR_NAME, AuditConstants.KNOX_SERVICE_NAME,
@@ -53,8 +55,11 @@ public class Pac4jIdentityAdapter implements Filter {
 
   private String testIdentifier;
 
+  private String idAttribute;
+
   @Override
   public void init( FilterConfig filterConfig ) throws ServletException {
+    idAttribute = filterConfig.getInitParameter(PAC4J_ID_ATTRIBUTE);
   }
 
   public void destroy() {
@@ -72,7 +77,17 @@ public class Pac4jIdentityAdapter implements Filter {
       CommonProfile profile = optional.get();
       logger.debug("User authenticated as: {}", profile);
       manager.remove(true);
-      final String id = profile.getId();
+      String id = null;
+      if (idAttribute == null) {
+        id = profile.getAttribute(idAttribute).toString();
+        if (id == null) {
+          logger.error("Invalid attribute_id: {} configured to be used as principal"
+              + " falling back to default id", idAttribute);
+        }
+      }
+      if (id == null) {
+        id = profile.getId();
+      }
       testIdentifier = id;
       PrimaryPrincipal pp = new PrimaryPrincipal(id);
       Subject subject = new Subject();

http://git-wip-us.apache.org/repos/asf/knox/blob/6474b61b/gateway-provider-security-pac4j/src/test/java/org/apache/hadoop/gateway/pac4j/Pac4jProviderTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-pac4j/src/test/java/org/apache/hadoop/gateway/pac4j/Pac4jProviderTest.java b/gateway-provider-security-pac4j/src/test/java/org/apache/hadoop/gateway/pac4j/Pac4jProviderTest.java
index bc33e33..0da156f 100644
--- a/gateway-provider-security-pac4j/src/test/java/org/apache/hadoop/gateway/pac4j/Pac4jProviderTest.java
+++ b/gateway-provider-security-pac4j/src/test/java/org/apache/hadoop/gateway/pac4j/Pac4jProviderTest.java
@@ -37,7 +37,6 @@ import javax.servlet.http.*;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-
 import static org.mockito.Mockito.*;
 import static org.junit.Assert.*;
 
@@ -77,6 +76,7 @@ public class Pac4jProviderTest {
         when(config.getServletContext()).thenReturn(context);
         when(config.getInitParameter(Pac4jDispatcherFilter.PAC4J_CALLBACK_URL)).thenReturn(PAC4J_CALLBACK_URL);
         when(config.getInitParameter("clientName")).thenReturn(Pac4jDispatcherFilter.TEST_BASIC_AUTH);
+        when(config.getInitParameter(Pac4jIdentityAdapter.PAC4J_ID_ATTRIBUTE)).thenReturn("username");
 
         final Pac4jDispatcherFilter dispatcher = new Pac4jDispatcherFilter();
         dispatcher.init(config);


[27/49] knox git commit: KNOX-1083 - HttpClient default timeout will be 20 sec

Posted by mo...@apache.org.
KNOX-1083 - HttpClient default timeout will be 20 sec


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/13287d2c
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/13287d2c
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/13287d2c

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 13287d2c186079aedc2dbaa7bef299845aff0b2a
Parents: 6eacf68
Author: Sandeep More <mo...@apache.org>
Authored: Tue Dec 5 14:41:00 2017 -0500
Committer: Sandeep More <mo...@apache.org>
Committed: Tue Dec 5 14:41:00 2017 -0500

----------------------------------------------------------------------
 .../gateway/config/impl/GatewayConfigImpl.java       |  5 +++--
 .../gateway/config/impl/GatewayConfigImplTest.java   | 15 +++++++++++++++
 2 files changed, 18 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/13287d2c/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
index 17c2552..f6bb9b0 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
@@ -39,6 +39,7 @@ import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
 
 /**
  * The configuration for the Gateway.
@@ -619,7 +620,7 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
   @Override
   public int getHttpClientConnectionTimeout() {
     int t = -1;
-    String s = get( HTTP_CLIENT_CONNECTION_TIMEOUT, null );
+    String s = get( HTTP_CLIENT_CONNECTION_TIMEOUT, String.valueOf(TimeUnit.SECONDS.toMillis(20)));
     if ( s != null ) {
       try {
         t = (int)parseNetworkTimeout( s );
@@ -633,7 +634,7 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
   @Override
   public int getHttpClientSocketTimeout() {
     int t = -1;
-    String s = get( HTTP_CLIENT_SOCKET_TIMEOUT, null );
+    String s = get( HTTP_CLIENT_SOCKET_TIMEOUT, String.valueOf(TimeUnit.SECONDS.toMillis(20)) );
     if ( s != null ) {
       try {
         t = (int)parseNetworkTimeout( s );

http://git-wip-us.apache.org/repos/asf/knox/blob/13287d2c/gateway-server/src/test/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImplTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImplTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImplTest.java
index cd56f11..2c0b47e 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImplTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImplTest.java
@@ -5,12 +5,14 @@ import org.hamcrest.CoreMatchers;
 import org.junit.Test;
 
 import java.util.List;
+import java.util.concurrent.TimeUnit;
 
 import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.CoreMatchers.notNullValue;
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.Matchers.hasItems;
 import static org.hamcrest.Matchers.nullValue;
+import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertTrue;
 import static org.testng.Assert.assertEquals;
 import static org.testng.Assert.assertFalse;
@@ -245,4 +247,17 @@ public class GatewayConfigImplTest {
     assertEquals(registryNames.size(), 2);
   }
 
+
+  @Test
+  public void testHTTPDefaultTimeouts() {
+    final GatewayConfigImpl config = new GatewayConfigImpl();
+
+    assertNotEquals(config.getHttpClientConnectionTimeout(), -1);
+    assertNotEquals(config.getHttpClientSocketTimeout(), -1);
+
+    assertEquals(TimeUnit.SECONDS.toMillis(20), config.getHttpClientConnectionTimeout());
+    assertEquals(TimeUnit.SECONDS.toMillis(20), config.getHttpClientSocketTimeout());
+
+  }
+
 }


[46/49] knox git commit: KNOX-998 - Merge from trunk 0.14.0 code

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistry.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistry.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistry.java
deleted file mode 100644
index f3e7dbd..0000000
--- a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistry.java
+++ /dev/null
@@ -1,139 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.config.remote.config;
-
-import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
-
-import javax.xml.bind.annotation.XmlElement;
-
-class RemoteConfigurationRegistry implements RemoteConfigurationRegistryConfig {
-
-    private String name;
-    private String type;
-    private String connectionString;
-    private String namespace;
-    private String authType;
-    private String principal;
-    private String credentialAlias;
-    private String keyTab;
-    private boolean useKeyTab;
-    private boolean useTicketCache;
-
-    RemoteConfigurationRegistry() {
-    }
-
-    public void setName(String name) {
-        this.name = name;
-    }
-
-    public void setRegistryType(String type) {
-        this.type = type;
-    }
-
-    public void setConnectionString(String connectionString) {
-        this.connectionString = connectionString;
-    }
-
-    public void setNamespace(String namespace) {
-        this.namespace = namespace;
-    }
-
-    public void setAuthType(String authType) {
-        this.authType = authType;
-    }
-
-    public void setPrincipal(String principal) {
-        this.principal = principal;
-    }
-
-    public void setCredentialAlias(String alias) {
-        this.credentialAlias = alias;
-    }
-
-    public void setUseTicketCache(boolean useTicketCache) {
-        this.useTicketCache = useTicketCache;
-    }
-
-    public void setUseKeytab(boolean useKeytab) {
-        this.useKeyTab = useKeytab;
-    }
-
-    public void setKeytab(String keytab) {
-        this.keyTab = keytab;
-    }
-
-    @XmlElement(name="name")
-    public String getName() {
-        return name;
-    }
-
-    @XmlElement(name="type")
-    public String getRegistryType() {
-        return type;
-    }
-
-    @XmlElement(name="auth-type")
-    public String getAuthType() {
-        return authType;
-    }
-
-    @XmlElement(name="principal")
-    public String getPrincipal() {
-        return principal;
-    }
-
-    @XmlElement(name="credential-alias")
-    public String getCredentialAlias() {
-        return credentialAlias;
-    }
-
-    @Override
-    @XmlElement(name="address")
-    public String getConnectionString() {
-        return connectionString;
-    }
-
-    @Override
-    @XmlElement(name="namespace")
-    public String getNamespace() {
-        return namespace;
-    }
-
-    @Override
-    @XmlElement(name="use-ticket-cache")
-    public boolean isUseTicketCache() {
-        return useTicketCache;
-    }
-
-    @Override
-    @XmlElement(name="use-key-tab")
-    public boolean isUseKeyTab() {
-        return useKeyTab;
-    }
-
-    @Override
-    @XmlElement(name="keytab")
-    public String getKeytab() {
-        return keyTab;
-    }
-
-    @Override
-    public boolean isSecureRegistry() {
-        return (getAuthType() != null);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/CuratorClientService.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/CuratorClientService.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/CuratorClientService.java
deleted file mode 100644
index f9b5ab3..0000000
--- a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/CuratorClientService.java
+++ /dev/null
@@ -1,464 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.config.remote.zk;
-
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.CuratorFrameworkFactory;
-import org.apache.curator.framework.api.ACLProvider;
-import org.apache.curator.framework.imps.DefaultACLProvider;
-import org.apache.curator.framework.recipes.cache.ChildData;
-import org.apache.curator.framework.recipes.cache.NodeCache;
-import org.apache.curator.framework.recipes.cache.NodeCacheListener;
-import org.apache.curator.framework.recipes.cache.PathChildrenCache;
-import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent;
-import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener;
-import org.apache.curator.retry.ExponentialBackoffRetry;
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationMessages;
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient.ChildEntryListener;
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient.EntryListener;
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient;
-import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
-import org.apache.hadoop.gateway.service.config.remote.config.RemoteConfigurationRegistriesAccessor;
-import org.apache.hadoop.gateway.services.ServiceLifecycleException;
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.zookeeper.ZooDefs;
-import org.apache.zookeeper.client.ZooKeeperSaslClient;
-import org.apache.zookeeper.data.ACL;
-import org.apache.zookeeper.data.Id;
-import org.apache.zookeeper.data.Stat;
-
-import java.nio.charset.Charset;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * RemoteConfigurationRegistryClientService implementation that employs the Curator ZooKeeper client framework.
- */
-class CuratorClientService implements ZooKeeperClientService {
-
-    private static final String LOGIN_CONTEXT_NAME_PROPERTY = ZooKeeperSaslClient.LOGIN_CONTEXT_NAME_KEY;
-
-    private static final String DEFAULT_LOGIN_CONTEXT_NAME = "Client";
-
-    private static final RemoteConfigurationMessages log =
-                                                        MessagesFactory.get(RemoteConfigurationMessages.class);
-
-    private Map<String, RemoteConfigurationRegistryClient> clients = new HashMap<>();
-
-    private AliasService aliasService = null;
-
-
-    @Override
-    public void init(GatewayConfig config, Map<String, String> options) throws ServiceLifecycleException {
-
-        List<RemoteConfigurationRegistryConfig> registryConfigs = new ArrayList<>();
-
-        // Load the remote registry configurations
-        registryConfigs.addAll(RemoteConfigurationRegistriesAccessor.getRemoteRegistryConfigurations(config));
-
-        // Configure registry authentication
-        RemoteConfigurationRegistryJAASConfig.configure(registryConfigs, aliasService);
-
-        if (registryConfigs.size() > 1) {
-            // Warn about current limit on number of supported client configurations
-            log.multipleRemoteRegistryConfigurations();
-        }
-
-        // Create the clients
-        for (RemoteConfigurationRegistryConfig registryConfig : registryConfigs) {
-            if (TYPE.equalsIgnoreCase(registryConfig.getRegistryType())) {
-                RemoteConfigurationRegistryClient registryClient = createClient(registryConfig);
-                clients.put(registryConfig.getName(), registryClient);
-            }
-        }
-    }
-
-    @Override
-    public void setAliasService(AliasService aliasService) {
-        this.aliasService = aliasService;
-    }
-
-    @Override
-    public void start() throws ServiceLifecycleException {
-    }
-
-    @Override
-    public void stop() throws ServiceLifecycleException {
-    }
-
-    @Override
-    public RemoteConfigurationRegistryClient get(String name) {
-        return clients.get(name);
-    }
-
-
-    private RemoteConfigurationRegistryClient createClient(RemoteConfigurationRegistryConfig config) {
-        ACLProvider aclProvider;
-        if (config.isSecureRegistry()) {
-            configureSasl(config);
-            aclProvider = new SASLOwnerACLProvider();
-        } else {
-            // Clear SASL system property
-            System.clearProperty(LOGIN_CONTEXT_NAME_PROPERTY);
-            aclProvider = new DefaultACLProvider();
-        }
-
-        CuratorFramework client = CuratorFrameworkFactory.builder()
-                                                         .connectString(config.getConnectionString())
-                                                         .retryPolicy(new ExponentialBackoffRetry(1000, 3))
-                                                         .aclProvider(aclProvider)
-                                                         .build();
-        client.start();
-
-        return (new ClientAdapter(client, config));
-    }
-
-
-    private void configureSasl(RemoteConfigurationRegistryConfig config) {
-        String registryName = config.getName();
-        if (registryName == null) {
-            registryName = DEFAULT_LOGIN_CONTEXT_NAME;
-        }
-        System.setProperty(LOGIN_CONTEXT_NAME_PROPERTY, registryName);
-    }
-
-
-    private static final class ClientAdapter implements RemoteConfigurationRegistryClient {
-
-        private static final String DEFAULT_ENCODING = "UTF-8";
-
-        private CuratorFramework delegate;
-
-        private RemoteConfigurationRegistryConfig config;
-
-        private Map<String, NodeCache> entryNodeCaches = new HashMap<>();
-
-        ClientAdapter(CuratorFramework delegate, RemoteConfigurationRegistryConfig config) {
-            this.delegate = delegate;
-            this.config = config;
-        }
-
-        @Override
-        public String getAddress() {
-            return config.getConnectionString();
-        }
-
-        @Override
-        public boolean isAuthenticationConfigured() {
-            return config.isSecureRegistry();
-        }
-
-        @Override
-        public boolean entryExists(String path) {
-            Stat s = null;
-            try {
-                s = delegate.checkExists().forPath(path);
-            } catch (Exception e) {
-                // Ignore
-            }
-            return (s != null);
-        }
-
-        @Override
-        public List<RemoteConfigurationRegistryClient.EntryACL> getACL(String path) {
-            List<RemoteConfigurationRegistryClient.EntryACL> acl = new ArrayList<>();
-            try {
-                List<ACL> zkACL = delegate.getACL().forPath(path);
-                if (zkACL != null) {
-                    for (ACL aclEntry : zkACL) {
-                        RemoteConfigurationRegistryClient.EntryACL entryACL = new ZooKeeperACLAdapter(aclEntry);
-                        acl.add(entryACL);
-                    }
-                }
-            } catch (Exception e) {
-                log.errorHandlingRemoteConfigACL(path, e);
-            }
-            return acl;
-        }
-
-        @Override
-        public void setACL(String path, List<EntryACL> entryACLs) {
-            // Translate the abstract ACLs into ZooKeeper ACLs
-            List<ACL> delegateACLs = new ArrayList<>();
-            for (EntryACL entryACL : entryACLs) {
-                String scheme = entryACL.getType();
-                String id = entryACL.getId();
-                int permissions = 0;
-                if (entryACL.canWrite()) {
-                    permissions = ZooDefs.Perms.ALL;
-                } else if (entryACL.canRead()){
-                    permissions = ZooDefs.Perms.READ;
-                }
-                delegateACLs.add(new ACL(permissions, new Id(scheme, id)));
-            }
-
-            try {
-                // Set the ACLs for the path
-                delegate.setACL().withACL(delegateACLs).forPath(path);
-            } catch (Exception e) {
-                log.errorSettingEntryACL(path, e);
-            }
-        }
-
-        @Override
-        public List<String> listChildEntries(String path) {
-            List<String> result = null;
-            try {
-                result = delegate.getChildren().forPath(path);
-            } catch (Exception e) {
-                log.errorInteractingWithRemoteConfigRegistry(e);
-            }
-            return result;
-        }
-
-        @Override
-        public void addChildEntryListener(String path, ChildEntryListener listener) throws Exception {
-            PathChildrenCache childCache = new PathChildrenCache(delegate, path, false);
-            childCache.getListenable().addListener(new ChildEntryListenerAdapter(this, listener));
-            childCache.start();
-        }
-
-        @Override
-        public void addEntryListener(String path, EntryListener listener) throws Exception {
-            NodeCache nodeCache = new NodeCache(delegate, path);
-            nodeCache.getListenable().addListener(new EntryListenerAdapter(this, nodeCache, listener));
-            nodeCache.start();
-            entryNodeCaches.put(path, nodeCache);
-        }
-
-        @Override
-        public void removeEntryListener(String path) throws Exception {
-            NodeCache nodeCache = entryNodeCaches.remove(path);
-            if (nodeCache != null) {
-                nodeCache.close();
-            }
-        }
-
-        @Override
-        public String getEntryData(String path) {
-            return getEntryData(path, DEFAULT_ENCODING);
-        }
-
-        @Override
-        public String getEntryData(String path, String encoding) {
-            String result = null;
-            try {
-                byte[] data = delegate.getData().forPath(path);
-                if (data != null) {
-                    result = new String(data, Charset.forName(encoding));
-                }
-            } catch (Exception e) {
-                log.errorInteractingWithRemoteConfigRegistry(e);
-            }
-            return result;
-        }
-
-        @Override
-        public void createEntry(String path) {
-            try {
-                if (delegate.checkExists().forPath(path) == null) {
-                    delegate.create().forPath(path);
-                }
-            } catch (Exception e) {
-                log.errorInteractingWithRemoteConfigRegistry(e);
-            }
-        }
-
-        @Override
-        public void createEntry(String path, String data) {
-            createEntry(path, data, DEFAULT_ENCODING);
-        }
-
-        @Override
-        public void createEntry(String path, String data, String encoding) {
-            try {
-                createEntry(path);
-                setEntryData(path, data, encoding);
-            } catch (Exception e) {
-                log.errorInteractingWithRemoteConfigRegistry(e);
-            }
-        }
-
-        @Override
-        public int setEntryData(String path, String data) {
-            return setEntryData(path, data, DEFAULT_ENCODING);
-        }
-
-        @Override
-        public int setEntryData(String path, String data, String encoding) {
-            int version = 0;
-            try {
-                Stat s = delegate.setData().forPath(path, data.getBytes(Charset.forName(encoding)));
-                if (s != null) {
-                    version = s.getVersion();
-                }
-            } catch (Exception e) {
-                log.errorInteractingWithRemoteConfigRegistry(e);
-            }
-            return version;
-        }
-
-        @Override
-        public void deleteEntry(String path) {
-            try {
-                delegate.delete().forPath(path);
-            } catch (Exception e) {
-                log.errorInteractingWithRemoteConfigRegistry(e);
-            }
-        }
-    }
-
-    /**
-     * SASL ACLProvider
-     */
-    private static class SASLOwnerACLProvider implements ACLProvider {
-
-        private final List<ACL> saslACL;
-
-        private SASLOwnerACLProvider() {
-            this.saslACL = ZooDefs.Ids.CREATOR_ALL_ACL; // All permissions for any authenticated user
-        }
-
-        @Override
-        public List<ACL> getDefaultAcl() {
-            return saslACL;
-        }
-
-        @Override
-        public List<ACL> getAclForPath(String path) {
-            return getDefaultAcl();
-        }
-    }
-
-
-    private static final class ChildEntryListenerAdapter implements PathChildrenCacheListener {
-
-        private RemoteConfigurationRegistryClient client;
-        private ChildEntryListener delegate;
-
-        ChildEntryListenerAdapter(RemoteConfigurationRegistryClient client, ChildEntryListener delegate) {
-            this.client = client;
-            this.delegate = delegate;
-        }
-
-        @Override
-        public void childEvent(CuratorFramework curatorFramework, PathChildrenCacheEvent pathChildrenCacheEvent)
-                throws Exception {
-            ChildData childData = pathChildrenCacheEvent.getData();
-            if (childData != null) {
-                ChildEntryListener.Type eventType = adaptType(pathChildrenCacheEvent.getType());
-                if (eventType != null) {
-                    delegate.childEvent(client, eventType, childData.getPath());
-                }
-            }
-        }
-
-        private ChildEntryListener.Type adaptType(PathChildrenCacheEvent.Type type) {
-            ChildEntryListener.Type adapted = null;
-
-            switch(type) {
-                case CHILD_ADDED:
-                    adapted = ChildEntryListener.Type.ADDED;
-                    break;
-                case CHILD_REMOVED:
-                    adapted = ChildEntryListener.Type.REMOVED;
-                    break;
-                case CHILD_UPDATED:
-                    adapted = ChildEntryListener.Type.UPDATED;
-                    break;
-            }
-
-            return adapted;
-        }
-    }
-
-    private static final class EntryListenerAdapter implements NodeCacheListener {
-
-        private RemoteConfigurationRegistryClient client;
-        private EntryListener delegate;
-        private NodeCache nodeCache;
-
-        EntryListenerAdapter(RemoteConfigurationRegistryClient client, NodeCache nodeCache, EntryListener delegate) {
-            this.client = client;
-            this.nodeCache = nodeCache;
-            this.delegate = delegate;
-        }
-
-        @Override
-        public void nodeChanged() throws Exception {
-            String path = null;
-            byte[] data = null;
-
-            ChildData cd = nodeCache.getCurrentData();
-            if (cd != null) {
-                path = cd.getPath();
-                data = cd.getData();
-            }
-
-            if (path != null) {
-                delegate.entryChanged(client, path, data);
-            }
-        }
-    }
-
-    /**
-     * ACL adapter
-     */
-    private static final class ZooKeeperACLAdapter implements RemoteConfigurationRegistryClient.EntryACL {
-        private String type;
-        private String id;
-        private int permissions;
-
-        ZooKeeperACLAdapter(ACL acl) {
-            this.permissions = acl.getPerms();
-            this.type = acl.getId().getScheme();
-            this.id = acl.getId().getId();
-        }
-
-        @Override
-        public String getId() {
-            return id;
-        }
-
-        @Override
-        public String getType() {
-            return type;
-        }
-
-        @Override
-        public Object getPermissions() {
-            return permissions;
-        }
-
-        @Override
-        public boolean canRead() {
-            return (permissions >= ZooDefs.Perms.READ);
-        }
-
-        @Override
-        public boolean canWrite() {
-            return (permissions >= ZooDefs.Perms.WRITE);
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfig.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfig.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfig.java
deleted file mode 100644
index 0b5a693..0000000
--- a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfig.java
+++ /dev/null
@@ -1,179 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.config.remote.zk;
-
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationMessages;
-import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.hadoop.gateway.services.security.AliasServiceException;
-
-import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.Configuration;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Configuration decorator that adds SASL JAAS configuration to whatever JAAS config is already applied.
- */
-class RemoteConfigurationRegistryJAASConfig extends Configuration {
-
-    // Underlying SASL mechanisms supported
-    enum SASLMechanism {
-        Unsupported,
-        Kerberos,
-        Digest
-    }
-
-    static final Map<String, String> digestLoginModules = new HashMap<>();
-    static {
-        digestLoginModules.put("ZOOKEEPER", "org.apache.zookeeper.server.auth.DigestLoginModule");
-    }
-
-    private static final RemoteConfigurationMessages log = MessagesFactory.get(RemoteConfigurationMessages.class);
-
-    // Cache the current JAAS configuration
-    private Configuration delegate = Configuration.getConfiguration();
-
-    private AliasService aliasService;
-
-    private Map<String, AppConfigurationEntry[]> contextEntries =  new HashMap<>();
-
-    static RemoteConfigurationRegistryJAASConfig configure(List<RemoteConfigurationRegistryConfig> configs, AliasService aliasService) {
-        return new RemoteConfigurationRegistryJAASConfig(configs, aliasService);
-    }
-
-    private RemoteConfigurationRegistryJAASConfig(List<RemoteConfigurationRegistryConfig> configs, AliasService aliasService) {
-        this.aliasService = aliasService;
-
-        // Populate context entries
-        List<AppConfigurationEntry> appConfigEntries = new ArrayList<>();
-        for (RemoteConfigurationRegistryConfig config : configs) {
-            if (config.isSecureRegistry()) {
-                contextEntries.put(config.getName(), createEntries(config));
-            }
-        }
-
-        // If there is at least one context entry, then set this as the client configuration
-        if (!contextEntries.isEmpty()) {
-            // TODO: PJZ: ZooKeeper 3.6.0 will have per-client JAAS Configuration support; Upgrade ASAP!!
-            // For now, set this as the static JAAS configuration
-            Configuration.setConfiguration(this);
-        }
-    }
-
-    @Override
-    public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
-        AppConfigurationEntry[] result = null;
-
-        // First, try the delegate's context entries
-        result = delegate.getAppConfigurationEntry(name);
-        if (result == null || result.length < 1) {
-            // Try our additional context entries
-            result = contextEntries.get(name);
-        }
-
-        return result;
-    }
-
-    private AppConfigurationEntry[] createEntries(RemoteConfigurationRegistryConfig config) {
-        AppConfigurationEntry[] result = null;
-
-        AppConfigurationEntry entry = createEntry(config);
-        if (entry != null) {
-            // Only supporting a single app config entry per configuration/context
-            result = new AppConfigurationEntry[1];
-            result[0] = createEntry(config);
-        } else {
-            result = new AppConfigurationEntry[0];
-        }
-        return result;
-    }
-
-    private AppConfigurationEntry createEntry(RemoteConfigurationRegistryConfig config) {
-        AppConfigurationEntry entry = null;
-
-        Map<String, String> opts = new HashMap<>();
-        SASLMechanism saslMechanism = getSASLMechanism(config.getAuthType());
-        switch (saslMechanism) {
-            case Digest:
-                // Digest auth options
-                opts.put("username", config.getPrincipal());
-
-                char[] credential = null;
-                if (aliasService != null) {
-                    try {
-                        credential = aliasService.getPasswordFromAliasForGateway(config.getCredentialAlias());
-                    } catch (AliasServiceException e) {
-                        log.unresolvedCredentialAlias(config.getCredentialAlias());
-                    }
-                } else {
-                    throw new IllegalArgumentException("The AliasService is required to resolve credential aliases.");
-                }
-
-                if (credential != null) {
-                    opts.put("password", new String(credential));
-                }
-                break;
-            case Kerberos:
-                opts.put("isUseTicketCache", String.valueOf(config.isUseTicketCache()));
-                opts.put("isUseKeyTab", String.valueOf(config.isUseKeyTab()));
-                opts.put("keyTab", config.getKeytab());
-                opts.put("principal", config.getPrincipal());
-        }
-
-        if (!opts.isEmpty()) {
-            entry = new AppConfigurationEntry(getLoginModuleName(config.getRegistryType(), saslMechanism),
-                                              AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
-                                              opts);
-        }
-
-        return entry;
-    }
-
-    private static String getLoginModuleName(String registryType, SASLMechanism saslMechanism) {
-        String loginModuleName = null;
-
-        switch (saslMechanism) {
-            case Kerberos:
-                if (System.getProperty("java.vendor").contains("IBM")) {
-                    loginModuleName = "com.ibm.security.auth.module.Krb5LoginModule";
-                } else {
-                    loginModuleName = "com.sun.security.auth.module.Krb5LoginModule";
-                }
-                break;
-            case Digest:
-                loginModuleName = digestLoginModules.get(registryType.toUpperCase());
-        }
-        return loginModuleName;
-    }
-
-    private static SASLMechanism getSASLMechanism(String authType) {
-        SASLMechanism result = SASLMechanism.Unsupported;
-        for (SASLMechanism at : SASLMechanism.values()) {
-            if (at.name().equalsIgnoreCase(authType)) {
-                result = at;
-                break;
-            }
-        }
-        return result;
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/ZooKeeperClientService.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/ZooKeeperClientService.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/ZooKeeperClientService.java
deleted file mode 100644
index c4add4a..0000000
--- a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/ZooKeeperClientService.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.config.remote.zk;
-
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
-
-public interface ZooKeeperClientService extends RemoteConfigurationRegistryClientService {
-
-    String TYPE = "ZooKeeper";
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/ZooKeeperClientServiceProvider.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/ZooKeeperClientServiceProvider.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/ZooKeeperClientServiceProvider.java
deleted file mode 100644
index f30d3da..0000000
--- a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/zk/ZooKeeperClientServiceProvider.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.config.remote.zk;
-
-import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider;
-
-
-public class ZooKeeperClientServiceProvider implements RemoteConfigurationRegistryClientServiceProvider {
-
-    @Override
-    public String getType() {
-        return ZooKeeperClientService.TYPE;
-    }
-
-    @Override
-    public ZooKeeperClientService newInstance() {
-        return new CuratorClientService();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/RemoteConfigurationMessages.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/RemoteConfigurationMessages.java b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/RemoteConfigurationMessages.java
new file mode 100644
index 0000000..057c8c5
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/RemoteConfigurationMessages.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.config.remote;
+
+import org.apache.knox.gateway.i18n.messages.Message;
+import org.apache.knox.gateway.i18n.messages.MessageLevel;
+import org.apache.knox.gateway.i18n.messages.Messages;
+import org.apache.knox.gateway.i18n.messages.StackTrace;
+
+
+/**
+ *
+ */
+@Messages(logger="org.apache.knox.gateway.service.config.remote")
+public interface RemoteConfigurationMessages {
+
+    @Message(level = MessageLevel.WARN,
+             text = "Multiple remote configuration registries are not currently supported if any of them requires authentication.")
+    void multipleRemoteRegistryConfigurations();
+
+    @Message(level = MessageLevel.ERROR, text = "Failed to resolve the credential alias {0}")
+    void unresolvedCredentialAlias(final String alias);
+
+    @Message(level = MessageLevel.ERROR, text = "An error occurred interacting with the remote configuration registry : {0}")
+    void errorInteractingWithRemoteConfigRegistry(@StackTrace(level = MessageLevel.DEBUG) Exception e);
+
+    @Message(level = MessageLevel.ERROR, text = "An error occurred handling the ACL for remote configuration {0} : {1}")
+    void errorHandlingRemoteConfigACL(final String path,
+                                      @StackTrace(level = MessageLevel.DEBUG) Exception e);
+
+    @Message(level = MessageLevel.ERROR, text = "An error occurred setting the ACL for remote configuration {0} : {1}")
+    void errorSettingEntryACL(final String path,
+                              @StackTrace(level = MessageLevel.DEBUG) Exception e);
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceFactory.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceFactory.java b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceFactory.java
new file mode 100644
index 0000000..f1719b6
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceFactory.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.config.remote;
+
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+
+import java.util.ServiceLoader;
+
+public class RemoteConfigurationRegistryClientServiceFactory {
+
+    public static RemoteConfigurationRegistryClientService newInstance(GatewayConfig config) {
+        RemoteConfigurationRegistryClientService rcs = null;
+
+        ServiceLoader<RemoteConfigurationRegistryClientServiceProvider> providers =
+                                             ServiceLoader.load(RemoteConfigurationRegistryClientServiceProvider.class);
+        for (RemoteConfigurationRegistryClientServiceProvider provider : providers) {
+            rcs = provider.newInstance();
+            if (rcs != null) {
+                break;
+            }
+        }
+
+        return rcs;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceProvider.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceProvider.java b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceProvider.java
new file mode 100644
index 0000000..8f69e47
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceProvider.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.config.remote;
+
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+
+public interface RemoteConfigurationRegistryClientServiceProvider {
+
+    String getType();
+
+    RemoteConfigurationRegistryClientService newInstance();
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/RemoteConfigurationRegistryConfig.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/RemoteConfigurationRegistryConfig.java b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/RemoteConfigurationRegistryConfig.java
new file mode 100644
index 0000000..cbebad7
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/RemoteConfigurationRegistryConfig.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.config.remote;
+
+public interface RemoteConfigurationRegistryConfig {
+
+    String getName();
+
+    String getRegistryType();
+
+    String getConnectionString();
+
+    String getNamespace();
+
+    boolean isSecureRegistry();
+
+    String getAuthType(); // digest, kerberos, etc...
+
+    String getPrincipal();
+
+    String getCredentialAlias();
+
+    String getKeytab();
+
+    boolean isUseTicketCache();
+
+    boolean isUseKeyTab();
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistries.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistries.java b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistries.java
new file mode 100644
index 0000000..0b2f248
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistries.java
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.config.remote.config;
+
+import org.apache.knox.gateway.config.GatewayConfig;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * A set of RemoteConfigurationRegistry configurations based on a set of property name-value pairs.
+ */
+class DefaultRemoteConfigurationRegistries extends RemoteConfigurationRegistries {
+
+    private static final String PROPERTY_DELIM       = ";";
+    private static final String PROPERTY_VALUE_DELIM = "=";
+
+    private List<RemoteConfigurationRegistry> configuredRegistries = new ArrayList<>();
+
+    /**
+     * Derive the remote registry configurations from the specified GatewayConfig.
+     *
+     * @param gc The source GatewayConfig
+     */
+    DefaultRemoteConfigurationRegistries(GatewayConfig gc) {
+        List<String> configRegistryNames = gc.getRemoteRegistryConfigurationNames();
+        for (String configRegistryName : configRegistryNames) {
+            configuredRegistries.add(extractConfigForRegistry(gc, configRegistryName));
+        }
+    }
+
+    /**
+     * Extract the configuration for the specified registry configuration name.
+     *
+     * @param gc           The GatewayConfig from which to extract the registry config.
+     * @param registryName The name of the registry config.
+     *
+     * @return The resulting RemoteConfigurationRegistry object, or null.
+     */
+    private static RemoteConfigurationRegistry extractConfigForRegistry(GatewayConfig gc, String registryName) {
+        RemoteConfigurationRegistry result = new RemoteConfigurationRegistry();
+
+        result.setName(registryName);
+
+        Map<String, String> properties = parsePropertyValue(gc.getRemoteRegistryConfiguration(registryName));
+
+        result.setRegistryType(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE));
+        result.setConnectionString(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS));
+        result.setNamespace(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_NAMESPACE));
+        result.setAuthType(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE));
+        result.setPrincipal(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL));
+        result.setCredentialAlias(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS));
+        result.setKeytab(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_KEYTAB));
+        result.setUseKeytab(Boolean.valueOf(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_KEYTAB)));
+        result.setUseTicketCache(Boolean.valueOf(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_TICKET_CACHE)));
+
+        return result;
+    }
+
+    /**
+     * Parse the specified registry config properties String.
+     *
+     * @param value The property value content from GatewayConfig.
+     *
+     * @return A Map of the parsed properties and their respective values.
+     */
+    private static Map<String, String> parsePropertyValue(final String value) {
+        Map<String, String> result = new HashMap<>();
+
+        if (value != null) {
+            String[] props = value.split(PROPERTY_DELIM);
+            for (String prop : props) {
+                String[] split = prop.split(PROPERTY_VALUE_DELIM);
+                String propName  = split[0];
+                String propValue = (split.length > 1) ? split[1] : null;
+                result.put(propName, propValue);
+            }
+        }
+
+        return result;
+    }
+
+    @Override
+    List<RemoteConfigurationRegistry> getRegistryConfigurations() {
+        return configuredRegistries;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/config/RemoteConfigurationRegistries.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/config/RemoteConfigurationRegistries.java b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/config/RemoteConfigurationRegistries.java
new file mode 100644
index 0000000..16434aa
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/config/RemoteConfigurationRegistries.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.config.remote.config;
+
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.util.ArrayList;
+import java.util.List;
+
+@XmlRootElement(name="remote-configuration-registries")
+class RemoteConfigurationRegistries {
+
+    private List<RemoteConfigurationRegistry> registryConfigurations = new ArrayList<>();
+
+    @XmlElement(name="remote-configuration-registry")
+    List<RemoteConfigurationRegistry> getRegistryConfigurations() {
+        return registryConfigurations;
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/config/RemoteConfigurationRegistriesAccessor.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/config/RemoteConfigurationRegistriesAccessor.java b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/config/RemoteConfigurationRegistriesAccessor.java
new file mode 100644
index 0000000..c32816e
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/config/RemoteConfigurationRegistriesAccessor.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.config.remote.config;
+
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+public class RemoteConfigurationRegistriesAccessor {
+
+    // System property for specifying a reference to an XML configuration external to the gateway config
+    private static final String XML_CONFIG_REFERENCE_SYSTEM_PROPERTY_NAME =
+                                                                "org.apache.knox.gateway.remote.registry.config.file";
+
+
+    public static List<RemoteConfigurationRegistryConfig> getRemoteRegistryConfigurations(GatewayConfig gatewayConfig) {
+        List<RemoteConfigurationRegistryConfig> result = new ArrayList<>();
+
+        boolean useReferencedFile = false;
+
+        // First check for the system property pointing to a valid XML config for the remote registries
+        String remoteConfigRegistryConfigFilename = System.getProperty(XML_CONFIG_REFERENCE_SYSTEM_PROPERTY_NAME);
+        if (remoteConfigRegistryConfigFilename != null) {
+            File remoteConfigRegistryConfigFile = new File(remoteConfigRegistryConfigFilename);
+            if (remoteConfigRegistryConfigFile.exists()) {
+                useReferencedFile = true;
+                // Parse the file, and build the registry config set
+                result.addAll(RemoteConfigurationRegistriesParser.getConfig(remoteConfigRegistryConfigFilename));
+            }
+        }
+
+        // If the system property was not set to a valid reference to another config file, then try to derive the
+        // registry configurations from the gateway config.
+        if (!useReferencedFile) {
+            RemoteConfigurationRegistries remoteConfigRegistries =
+                                                            new DefaultRemoteConfigurationRegistries(gatewayConfig);
+            result.addAll(remoteConfigRegistries.getRegistryConfigurations());
+        }
+
+        return result;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/config/RemoteConfigurationRegistriesParser.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/config/RemoteConfigurationRegistriesParser.java b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/config/RemoteConfigurationRegistriesParser.java
new file mode 100644
index 0000000..f6347f8
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/config/RemoteConfigurationRegistriesParser.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.config.remote.config;
+
+import org.apache.knox.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
+
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+import javax.xml.bind.Unmarshaller;
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+class RemoteConfigurationRegistriesParser {
+
+    static List<RemoteConfigurationRegistryConfig> getConfig(String configFilename) {
+        List<RemoteConfigurationRegistryConfig> result = new ArrayList<>();
+
+        File file = new File(configFilename);
+
+        try {
+            JAXBContext jaxbContext = JAXBContext.newInstance(RemoteConfigurationRegistries.class);
+            Unmarshaller jaxbUnmarshaller = jaxbContext.createUnmarshaller();
+            RemoteConfigurationRegistries parsedContent = (RemoteConfigurationRegistries) jaxbUnmarshaller.unmarshal(file);
+            if (parsedContent != null) {
+                result.addAll(parsedContent.getRegistryConfigurations());
+            }
+        } catch (JAXBException e) {
+            e.printStackTrace();
+        }
+
+        return result;
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/config/RemoteConfigurationRegistry.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/config/RemoteConfigurationRegistry.java b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/config/RemoteConfigurationRegistry.java
new file mode 100644
index 0000000..1fdbd9e
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/config/RemoteConfigurationRegistry.java
@@ -0,0 +1,139 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.config.remote.config;
+
+import org.apache.knox.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
+
+import javax.xml.bind.annotation.XmlElement;
+
+class RemoteConfigurationRegistry implements RemoteConfigurationRegistryConfig {
+
+    private String name;
+    private String type;
+    private String connectionString;
+    private String namespace;
+    private String authType;
+    private String principal;
+    private String credentialAlias;
+    private String keyTab;
+    private boolean useKeyTab;
+    private boolean useTicketCache;
+
+    RemoteConfigurationRegistry() {
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    public void setRegistryType(String type) {
+        this.type = type;
+    }
+
+    public void setConnectionString(String connectionString) {
+        this.connectionString = connectionString;
+    }
+
+    public void setNamespace(String namespace) {
+        this.namespace = namespace;
+    }
+
+    public void setAuthType(String authType) {
+        this.authType = authType;
+    }
+
+    public void setPrincipal(String principal) {
+        this.principal = principal;
+    }
+
+    public void setCredentialAlias(String alias) {
+        this.credentialAlias = alias;
+    }
+
+    public void setUseTicketCache(boolean useTicketCache) {
+        this.useTicketCache = useTicketCache;
+    }
+
+    public void setUseKeytab(boolean useKeytab) {
+        this.useKeyTab = useKeytab;
+    }
+
+    public void setKeytab(String keytab) {
+        this.keyTab = keytab;
+    }
+
+    @XmlElement(name="name")
+    public String getName() {
+        return name;
+    }
+
+    @XmlElement(name="type")
+    public String getRegistryType() {
+        return type;
+    }
+
+    @XmlElement(name="auth-type")
+    public String getAuthType() {
+        return authType;
+    }
+
+    @XmlElement(name="principal")
+    public String getPrincipal() {
+        return principal;
+    }
+
+    @XmlElement(name="credential-alias")
+    public String getCredentialAlias() {
+        return credentialAlias;
+    }
+
+    @Override
+    @XmlElement(name="address")
+    public String getConnectionString() {
+        return connectionString;
+    }
+
+    @Override
+    @XmlElement(name="namespace")
+    public String getNamespace() {
+        return namespace;
+    }
+
+    @Override
+    @XmlElement(name="use-ticket-cache")
+    public boolean isUseTicketCache() {
+        return useTicketCache;
+    }
+
+    @Override
+    @XmlElement(name="use-key-tab")
+    public boolean isUseKeyTab() {
+        return useKeyTab;
+    }
+
+    @Override
+    @XmlElement(name="keytab")
+    public String getKeytab() {
+        return keyTab;
+    }
+
+    @Override
+    public boolean isSecureRegistry() {
+        return (getAuthType() != null);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/zk/CuratorClientService.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/zk/CuratorClientService.java b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/zk/CuratorClientService.java
new file mode 100644
index 0000000..b97a2c6
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/zk/CuratorClientService.java
@@ -0,0 +1,464 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.config.remote.zk;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.framework.api.ACLProvider;
+import org.apache.curator.framework.imps.DefaultACLProvider;
+import org.apache.curator.framework.recipes.cache.ChildData;
+import org.apache.curator.framework.recipes.cache.NodeCache;
+import org.apache.curator.framework.recipes.cache.NodeCacheListener;
+import org.apache.curator.framework.recipes.cache.PathChildrenCache;
+import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent;
+import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.service.config.remote.RemoteConfigurationMessages;
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClient.ChildEntryListener;
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClient.EntryListener;
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClient;
+import org.apache.knox.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
+import org.apache.knox.gateway.service.config.remote.config.RemoteConfigurationRegistriesAccessor;
+import org.apache.knox.gateway.services.ServiceLifecycleException;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.zookeeper.ZooDefs;
+import org.apache.zookeeper.client.ZooKeeperSaslClient;
+import org.apache.zookeeper.data.ACL;
+import org.apache.zookeeper.data.Id;
+import org.apache.zookeeper.data.Stat;
+
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * RemoteConfigurationRegistryClientService implementation that employs the Curator ZooKeeper client framework.
+ */
+class CuratorClientService implements ZooKeeperClientService {
+
+    private static final String LOGIN_CONTEXT_NAME_PROPERTY = ZooKeeperSaslClient.LOGIN_CONTEXT_NAME_KEY;
+
+    private static final String DEFAULT_LOGIN_CONTEXT_NAME = "Client";
+
+    private static final RemoteConfigurationMessages log =
+                                                        MessagesFactory.get(RemoteConfigurationMessages.class);
+
+    private Map<String, RemoteConfigurationRegistryClient> clients = new HashMap<>();
+
+    private AliasService aliasService = null;
+
+
+    @Override
+    public void init(GatewayConfig config, Map<String, String> options) throws ServiceLifecycleException {
+
+        List<RemoteConfigurationRegistryConfig> registryConfigs = new ArrayList<>();
+
+        // Load the remote registry configurations
+        registryConfigs.addAll(RemoteConfigurationRegistriesAccessor.getRemoteRegistryConfigurations(config));
+
+        // Configure registry authentication
+        RemoteConfigurationRegistryJAASConfig.configure(registryConfigs, aliasService);
+
+        if (registryConfigs.size() > 1) {
+            // Warn about current limit on number of supported client configurations
+            log.multipleRemoteRegistryConfigurations();
+        }
+
+        // Create the clients
+        for (RemoteConfigurationRegistryConfig registryConfig : registryConfigs) {
+            if (TYPE.equalsIgnoreCase(registryConfig.getRegistryType())) {
+                RemoteConfigurationRegistryClient registryClient = createClient(registryConfig);
+                clients.put(registryConfig.getName(), registryClient);
+            }
+        }
+    }
+
+    @Override
+    public void setAliasService(AliasService aliasService) {
+        this.aliasService = aliasService;
+    }
+
+    @Override
+    public void start() throws ServiceLifecycleException {
+    }
+
+    @Override
+    public void stop() throws ServiceLifecycleException {
+    }
+
+    @Override
+    public RemoteConfigurationRegistryClient get(String name) {
+        return clients.get(name);
+    }
+
+
+    private RemoteConfigurationRegistryClient createClient(RemoteConfigurationRegistryConfig config) {
+        ACLProvider aclProvider;
+        if (config.isSecureRegistry()) {
+            configureSasl(config);
+            aclProvider = new SASLOwnerACLProvider();
+        } else {
+            // Clear SASL system property
+            System.clearProperty(LOGIN_CONTEXT_NAME_PROPERTY);
+            aclProvider = new DefaultACLProvider();
+        }
+
+        CuratorFramework client = CuratorFrameworkFactory.builder()
+                                                         .connectString(config.getConnectionString())
+                                                         .retryPolicy(new ExponentialBackoffRetry(1000, 3))
+                                                         .aclProvider(aclProvider)
+                                                         .build();
+        client.start();
+
+        return (new ClientAdapter(client, config));
+    }
+
+
+    private void configureSasl(RemoteConfigurationRegistryConfig config) {
+        String registryName = config.getName();
+        if (registryName == null) {
+            registryName = DEFAULT_LOGIN_CONTEXT_NAME;
+        }
+        System.setProperty(LOGIN_CONTEXT_NAME_PROPERTY, registryName);
+    }
+
+
+    private static final class ClientAdapter implements RemoteConfigurationRegistryClient {
+
+        private static final String DEFAULT_ENCODING = "UTF-8";
+
+        private CuratorFramework delegate;
+
+        private RemoteConfigurationRegistryConfig config;
+
+        private Map<String, NodeCache> entryNodeCaches = new HashMap<>();
+
+        ClientAdapter(CuratorFramework delegate, RemoteConfigurationRegistryConfig config) {
+            this.delegate = delegate;
+            this.config = config;
+        }
+
+        @Override
+        public String getAddress() {
+            return config.getConnectionString();
+        }
+
+        @Override
+        public boolean isAuthenticationConfigured() {
+            return config.isSecureRegistry();
+        }
+
+        @Override
+        public boolean entryExists(String path) {
+            Stat s = null;
+            try {
+                s = delegate.checkExists().forPath(path);
+            } catch (Exception e) {
+                // Ignore
+            }
+            return (s != null);
+        }
+
+        @Override
+        public List<RemoteConfigurationRegistryClient.EntryACL> getACL(String path) {
+            List<RemoteConfigurationRegistryClient.EntryACL> acl = new ArrayList<>();
+            try {
+                List<ACL> zkACL = delegate.getACL().forPath(path);
+                if (zkACL != null) {
+                    for (ACL aclEntry : zkACL) {
+                        RemoteConfigurationRegistryClient.EntryACL entryACL = new ZooKeeperACLAdapter(aclEntry);
+                        acl.add(entryACL);
+                    }
+                }
+            } catch (Exception e) {
+                log.errorHandlingRemoteConfigACL(path, e);
+            }
+            return acl;
+        }
+
+        @Override
+        public void setACL(String path, List<EntryACL> entryACLs) {
+            // Translate the abstract ACLs into ZooKeeper ACLs
+            List<ACL> delegateACLs = new ArrayList<>();
+            for (EntryACL entryACL : entryACLs) {
+                String scheme = entryACL.getType();
+                String id = entryACL.getId();
+                int permissions = 0;
+                if (entryACL.canWrite()) {
+                    permissions = ZooDefs.Perms.ALL;
+                } else if (entryACL.canRead()){
+                    permissions = ZooDefs.Perms.READ;
+                }
+                delegateACLs.add(new ACL(permissions, new Id(scheme, id)));
+            }
+
+            try {
+                // Set the ACLs for the path
+                delegate.setACL().withACL(delegateACLs).forPath(path);
+            } catch (Exception e) {
+                log.errorSettingEntryACL(path, e);
+            }
+        }
+
+        @Override
+        public List<String> listChildEntries(String path) {
+            List<String> result = null;
+            try {
+                result = delegate.getChildren().forPath(path);
+            } catch (Exception e) {
+                log.errorInteractingWithRemoteConfigRegistry(e);
+            }
+            return result;
+        }
+
+        @Override
+        public void addChildEntryListener(String path, ChildEntryListener listener) throws Exception {
+            PathChildrenCache childCache = new PathChildrenCache(delegate, path, false);
+            childCache.getListenable().addListener(new ChildEntryListenerAdapter(this, listener));
+            childCache.start();
+        }
+
+        @Override
+        public void addEntryListener(String path, EntryListener listener) throws Exception {
+            NodeCache nodeCache = new NodeCache(delegate, path);
+            nodeCache.getListenable().addListener(new EntryListenerAdapter(this, nodeCache, listener));
+            nodeCache.start();
+            entryNodeCaches.put(path, nodeCache);
+        }
+
+        @Override
+        public void removeEntryListener(String path) throws Exception {
+            NodeCache nodeCache = entryNodeCaches.remove(path);
+            if (nodeCache != null) {
+                nodeCache.close();
+            }
+        }
+
+        @Override
+        public String getEntryData(String path) {
+            return getEntryData(path, DEFAULT_ENCODING);
+        }
+
+        @Override
+        public String getEntryData(String path, String encoding) {
+            String result = null;
+            try {
+                byte[] data = delegate.getData().forPath(path);
+                if (data != null) {
+                    result = new String(data, Charset.forName(encoding));
+                }
+            } catch (Exception e) {
+                log.errorInteractingWithRemoteConfigRegistry(e);
+            }
+            return result;
+        }
+
+        @Override
+        public void createEntry(String path) {
+            try {
+                if (delegate.checkExists().forPath(path) == null) {
+                    delegate.create().forPath(path);
+                }
+            } catch (Exception e) {
+                log.errorInteractingWithRemoteConfigRegistry(e);
+            }
+        }
+
+        @Override
+        public void createEntry(String path, String data) {
+            createEntry(path, data, DEFAULT_ENCODING);
+        }
+
+        @Override
+        public void createEntry(String path, String data, String encoding) {
+            try {
+                createEntry(path);
+                setEntryData(path, data, encoding);
+            } catch (Exception e) {
+                log.errorInteractingWithRemoteConfigRegistry(e);
+            }
+        }
+
+        @Override
+        public int setEntryData(String path, String data) {
+            return setEntryData(path, data, DEFAULT_ENCODING);
+        }
+
+        @Override
+        public int setEntryData(String path, String data, String encoding) {
+            int version = 0;
+            try {
+                Stat s = delegate.setData().forPath(path, data.getBytes(Charset.forName(encoding)));
+                if (s != null) {
+                    version = s.getVersion();
+                }
+            } catch (Exception e) {
+                log.errorInteractingWithRemoteConfigRegistry(e);
+            }
+            return version;
+        }
+
+        @Override
+        public void deleteEntry(String path) {
+            try {
+                delegate.delete().forPath(path);
+            } catch (Exception e) {
+                log.errorInteractingWithRemoteConfigRegistry(e);
+            }
+        }
+    }
+
+    /**
+     * SASL ACLProvider
+     */
+    private static class SASLOwnerACLProvider implements ACLProvider {
+
+        private final List<ACL> saslACL;
+
+        private SASLOwnerACLProvider() {
+            this.saslACL = ZooDefs.Ids.CREATOR_ALL_ACL; // All permissions for any authenticated user
+        }
+
+        @Override
+        public List<ACL> getDefaultAcl() {
+            return saslACL;
+        }
+
+        @Override
+        public List<ACL> getAclForPath(String path) {
+            return getDefaultAcl();
+        }
+    }
+
+
+    private static final class ChildEntryListenerAdapter implements PathChildrenCacheListener {
+
+        private RemoteConfigurationRegistryClient client;
+        private ChildEntryListener delegate;
+
+        ChildEntryListenerAdapter(RemoteConfigurationRegistryClient client, ChildEntryListener delegate) {
+            this.client = client;
+            this.delegate = delegate;
+        }
+
+        @Override
+        public void childEvent(CuratorFramework curatorFramework, PathChildrenCacheEvent pathChildrenCacheEvent)
+                throws Exception {
+            ChildData childData = pathChildrenCacheEvent.getData();
+            if (childData != null) {
+                ChildEntryListener.Type eventType = adaptType(pathChildrenCacheEvent.getType());
+                if (eventType != null) {
+                    delegate.childEvent(client, eventType, childData.getPath());
+                }
+            }
+        }
+
+        private ChildEntryListener.Type adaptType(PathChildrenCacheEvent.Type type) {
+            ChildEntryListener.Type adapted = null;
+
+            switch(type) {
+                case CHILD_ADDED:
+                    adapted = ChildEntryListener.Type.ADDED;
+                    break;
+                case CHILD_REMOVED:
+                    adapted = ChildEntryListener.Type.REMOVED;
+                    break;
+                case CHILD_UPDATED:
+                    adapted = ChildEntryListener.Type.UPDATED;
+                    break;
+            }
+
+            return adapted;
+        }
+    }
+
+    private static final class EntryListenerAdapter implements NodeCacheListener {
+
+        private RemoteConfigurationRegistryClient client;
+        private EntryListener delegate;
+        private NodeCache nodeCache;
+
+        EntryListenerAdapter(RemoteConfigurationRegistryClient client, NodeCache nodeCache, EntryListener delegate) {
+            this.client = client;
+            this.nodeCache = nodeCache;
+            this.delegate = delegate;
+        }
+
+        @Override
+        public void nodeChanged() throws Exception {
+            String path = null;
+            byte[] data = null;
+
+            ChildData cd = nodeCache.getCurrentData();
+            if (cd != null) {
+                path = cd.getPath();
+                data = cd.getData();
+            }
+
+            if (path != null) {
+                delegate.entryChanged(client, path, data);
+            }
+        }
+    }
+
+    /**
+     * ACL adapter
+     */
+    private static final class ZooKeeperACLAdapter implements RemoteConfigurationRegistryClient.EntryACL {
+        private String type;
+        private String id;
+        private int permissions;
+
+        ZooKeeperACLAdapter(ACL acl) {
+            this.permissions = acl.getPerms();
+            this.type = acl.getId().getScheme();
+            this.id = acl.getId().getId();
+        }
+
+        @Override
+        public String getId() {
+            return id;
+        }
+
+        @Override
+        public String getType() {
+            return type;
+        }
+
+        @Override
+        public Object getPermissions() {
+            return permissions;
+        }
+
+        @Override
+        public boolean canRead() {
+            return (permissions >= ZooDefs.Perms.READ);
+        }
+
+        @Override
+        public boolean canWrite() {
+            return (permissions >= ZooDefs.Perms.WRITE);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfig.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfig.java b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfig.java
new file mode 100644
index 0000000..f75634b
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfig.java
@@ -0,0 +1,179 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.config.remote.zk;
+
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.service.config.remote.RemoteConfigurationMessages;
+import org.apache.knox.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.gateway.services.security.AliasServiceException;
+
+import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.Configuration;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Configuration decorator that adds SASL JAAS configuration to whatever JAAS config is already applied.
+ */
+class RemoteConfigurationRegistryJAASConfig extends Configuration {
+
+    // Underlying SASL mechanisms supported
+    enum SASLMechanism {
+        Unsupported,
+        Kerberos,
+        Digest
+    }
+
+    static final Map<String, String> digestLoginModules = new HashMap<>();
+    static {
+        digestLoginModules.put("ZOOKEEPER", "org.apache.zookeeper.server.auth.DigestLoginModule");
+    }
+
+    private static final RemoteConfigurationMessages log = MessagesFactory.get(RemoteConfigurationMessages.class);
+
+    // Cache the current JAAS configuration
+    private Configuration delegate = Configuration.getConfiguration();
+
+    private AliasService aliasService;
+
+    private Map<String, AppConfigurationEntry[]> contextEntries =  new HashMap<>();
+
+    static RemoteConfigurationRegistryJAASConfig configure(List<RemoteConfigurationRegistryConfig> configs, AliasService aliasService) {
+        return new RemoteConfigurationRegistryJAASConfig(configs, aliasService);
+    }
+
+    private RemoteConfigurationRegistryJAASConfig(List<RemoteConfigurationRegistryConfig> configs, AliasService aliasService) {
+        this.aliasService = aliasService;
+
+        // Populate context entries
+        List<AppConfigurationEntry> appConfigEntries = new ArrayList<>();
+        for (RemoteConfigurationRegistryConfig config : configs) {
+            if (config.isSecureRegistry()) {
+                contextEntries.put(config.getName(), createEntries(config));
+            }
+        }
+
+        // If there is at least one context entry, then set this as the client configuration
+        if (!contextEntries.isEmpty()) {
+            // TODO: PJZ: ZooKeeper 3.6.0 will have per-client JAAS Configuration support; Upgrade ASAP!!
+            // For now, set this as the static JAAS configuration
+            Configuration.setConfiguration(this);
+        }
+    }
+
+    @Override
+    public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
+        AppConfigurationEntry[] result = null;
+
+        // First, try the delegate's context entries
+        result = delegate.getAppConfigurationEntry(name);
+        if (result == null || result.length < 1) {
+            // Try our additional context entries
+            result = contextEntries.get(name);
+        }
+
+        return result;
+    }
+
+    private AppConfigurationEntry[] createEntries(RemoteConfigurationRegistryConfig config) {
+        AppConfigurationEntry[] result = null;
+
+        AppConfigurationEntry entry = createEntry(config);
+        if (entry != null) {
+            // Only supporting a single app config entry per configuration/context
+            result = new AppConfigurationEntry[1];
+            result[0] = createEntry(config);
+        } else {
+            result = new AppConfigurationEntry[0];
+        }
+        return result;
+    }
+
+    private AppConfigurationEntry createEntry(RemoteConfigurationRegistryConfig config) {
+        AppConfigurationEntry entry = null;
+
+        Map<String, String> opts = new HashMap<>();
+        SASLMechanism saslMechanism = getSASLMechanism(config.getAuthType());
+        switch (saslMechanism) {
+            case Digest:
+                // Digest auth options
+                opts.put("username", config.getPrincipal());
+
+                char[] credential = null;
+                if (aliasService != null) {
+                    try {
+                        credential = aliasService.getPasswordFromAliasForGateway(config.getCredentialAlias());
+                    } catch (AliasServiceException e) {
+                        log.unresolvedCredentialAlias(config.getCredentialAlias());
+                    }
+                } else {
+                    throw new IllegalArgumentException("The AliasService is required to resolve credential aliases.");
+                }
+
+                if (credential != null) {
+                    opts.put("password", new String(credential));
+                }
+                break;
+            case Kerberos:
+                opts.put("isUseTicketCache", String.valueOf(config.isUseTicketCache()));
+                opts.put("isUseKeyTab", String.valueOf(config.isUseKeyTab()));
+                opts.put("keyTab", config.getKeytab());
+                opts.put("principal", config.getPrincipal());
+        }
+
+        if (!opts.isEmpty()) {
+            entry = new AppConfigurationEntry(getLoginModuleName(config.getRegistryType(), saslMechanism),
+                                              AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+                                              opts);
+        }
+
+        return entry;
+    }
+
+    private static String getLoginModuleName(String registryType, SASLMechanism saslMechanism) {
+        String loginModuleName = null;
+
+        switch (saslMechanism) {
+            case Kerberos:
+                if (System.getProperty("java.vendor").contains("IBM")) {
+                    loginModuleName = "com.ibm.security.auth.module.Krb5LoginModule";
+                } else {
+                    loginModuleName = "com.sun.security.auth.module.Krb5LoginModule";
+                }
+                break;
+            case Digest:
+                loginModuleName = digestLoginModules.get(registryType.toUpperCase());
+        }
+        return loginModuleName;
+    }
+
+    private static SASLMechanism getSASLMechanism(String authType) {
+        SASLMechanism result = SASLMechanism.Unsupported;
+        for (SASLMechanism at : SASLMechanism.values()) {
+            if (at.name().equalsIgnoreCase(authType)) {
+                result = at;
+                break;
+            }
+        }
+        return result;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/zk/ZooKeeperClientService.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/zk/ZooKeeperClientService.java b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/zk/ZooKeeperClientService.java
new file mode 100644
index 0000000..17c93e0
--- /dev/null
+++ b/gateway-service-remoteconfig/src/main/java/org/apache/knox/gateway/service/config/remote/zk/ZooKeeperClientService.java
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.config.remote.zk;
+
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+
+public interface ZooKeeperClientService extends RemoteConfigurationRegistryClientService {
+
+    String TYPE = "ZooKeeper";
+
+}


[15/49] knox git commit: KNOX-1128 - Readonly protection for generated topologies in Knox Admin UI

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-applications/src/main/resources/applications/admin-ui/app/main.806d67070af66e18c2fc.bundle.map
----------------------------------------------------------------------
diff --git a/gateway-applications/src/main/resources/applications/admin-ui/app/main.806d67070af66e18c2fc.bundle.map b/gateway-applications/src/main/resources/applications/admin-ui/app/main.806d67070af66e18c2fc.bundle.map
deleted file mode 100644
index 0cc247a..0000000
--- a/gateway-applications/src/main/resources/applications/admin-ui/app/main.806d67070af66e18c2fc.bundle.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"sources":["webpack:///main.806d67070af66e18c2fc.bundle.js","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/app/topology.service.ts","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/app/app.component.ts","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/app/gateway-version.service.ts","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/app/utils/tab.component.ts","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/src async","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/main.ts","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/app/app.module.ts","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/app/gateway-version.component.ts","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/app/index.ts","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/app/topology-detail.component.ts","webpack:////Users/sumit.gupta/Projects/
 knox/gateway-admin-ui/src/app/topology.component.ts","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/app/utils/json-pretty.pipe.ts","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/app/utils/tabs.component.ts","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/app/utils/xml.pipe.ts","webpack:////Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/environments/environment.prod.ts"],"names":["webpackJsonp","122","module","exports","__webpack_require__","__WEBPACK_IMPORTED_MODULE_0__angular_core__","__WEBPACK_IMPORTED_MODULE_1__angular_http__","__WEBPACK_IMPORTED_MODULE_2_rxjs_add_operator_toPromise__","__WEBPACK_IMPORTED_MODULE_3_rxjs_Subject__","n","d","TopologyService","__decorate","this","decorators","target","key","desc","c","arguments","length","r","Object","getOwnPropertyDescriptor","Reflect","decorate","i","defineProperty","__metadata","k","v","metadata","http","apiUrl","topologiesUrl","selectedTopologySource","selectedTopology
 $","asObservable","changedTopologySource","changedTopology$","prototype","getTopologies","headers","addJsonHeaders","get","toPromise","then","response","json","topologies","topology","catch","handleError","getTopology","href","addXmlHeaders","text","saveTopology","url","xml","xheaders","addCsrfHeaders","put","createTopology","name","deleteTopology","delete","append","selectedTopology","value","next","changedTopology","error","console","Promise","reject","message","_a","293","__WEBPACK_IMPORTED_MODULE_1__topology_service__","AppComponent","topologyService","selector","template","providers","294","GatewayVersionService","getVersion","addHeaders","ServerVersion","295","TabComponent","active","String","styles","344","webpackEmptyContext","req","Error","keys","resolve","id","345","__WEBPACK_IMPORTED_MODULE_0__polyfills_ts__","__WEBPACK_IMPORTED_MODULE_1__angular_platform_browser_dynamic__","__WEBPACK_IMPORTED_MODULE_2__angular_core__","__WEBPACK_IMPORTED_MODULE_3__environments_environmen
 t__","__WEBPACK_IMPORTED_MODULE_4__app___","production","bootstrapModule","445","__WEBPACK_IMPORTED_MODULE_1__angular_platform_browser__","__WEBPACK_IMPORTED_MODULE_2__angular_http__","__WEBPACK_IMPORTED_MODULE_3__angular_forms__","__WEBPACK_IMPORTED_MODULE_4__app_component__","__WEBPACK_IMPORTED_MODULE_5__topology_service__","__WEBPACK_IMPORTED_MODULE_6__gateway_version_service__","__WEBPACK_IMPORTED_MODULE_7__gateway_version_component__","__WEBPACK_IMPORTED_MODULE_8__topology_component__","__WEBPACK_IMPORTED_MODULE_9__topology_detail_component__","__WEBPACK_IMPORTED_MODULE_10__utils_xml_pipe__","__WEBPACK_IMPORTED_MODULE_11__utils_json_pretty_pipe__","__WEBPACK_IMPORTED_MODULE_12__utils_tab_component__","__WEBPACK_IMPORTED_MODULE_13__utils_tabs_component__","__WEBPACK_IMPORTED_MODULE_14_ng2_ace_editor__","__WEBPACK_IMPORTED_MODULE_15_ng2_bs3_modal_ng2_bs3_modal__","AppModule","imports","declarations","bootstrap","446","__WEBPACK_IMPORTED_MODULE_1__gateway_version_service__","Gatew
 ayVersionComponent","gatewayVersionService","_this","gatewayVersion","ngOnInit","447","__WEBPACK_IMPORTED_MODULE_1__app_module__","448","__WEBPACK_IMPORTED_MODULE_2_ng2_bs3_modal_ng2_bs3_modal__","TopologyDetailComponent","title","options","useWorker","printMargin","subscribe","populateContent","setTitle","titleSuffix","onChange","code","newTopologyName","topologyContent","content","_b","_c","449","TopologyComponent","onSelect","450","JsonPrettyPipe","transform","vkbeautify","451","__WEBPACK_IMPORTED_MODULE_1__tab_component__","TabsComponent","ngAfterContentInit","activeTabs","tabs","filter","tab","selectTab","first","toArray","forEach","452","XmlPipe","453","environment","454","__WEBPACK_IMPORTED_MODULE_0_core_js_es6_symbol__","__WEBPACK_IMPORTED_MODULE_1_core_js_es6_object__","__WEBPACK_IMPORTED_MODULE_2_core_js_es6_function__","__WEBPACK_IMPORTED_MODULE_3_core_js_es6_parse_int__","__WEBPACK_IMPORTED_MODULE_4_core_js_es6_parse_float__","__WEBPACK_IMPORTED_MODULE_5_core_js_es6_numb
 er__","__WEBPACK_IMPORTED_MODULE_6_core_js_es6_math__","__WEBPACK_IMPORTED_MODULE_7_core_js_es6_string__","__WEBPACK_IMPORTED_MODULE_8_core_js_es6_date__","__WEBPACK_IMPORTED_MODULE_9_core_js_es6_array__","__WEBPACK_IMPORTED_MODULE_10_core_js_es6_regexp__","__WEBPACK_IMPORTED_MODULE_11_core_js_es6_map__","__WEBPACK_IMPORTED_MODULE_12_core_js_es6_set__","__WEBPACK_IMPORTED_MODULE_13_core_js_es6_reflect__","__WEBPACK_IMPORTED_MODULE_14_core_js_es7_reflect__","__WEBPACK_IMPORTED_MODULE_15_zone_js_dist_zone__","629"],"mappings":"AAAAA,cAAc,EAAE,IAEVC,IACA,SAASC,EAAQC,EAASC,GAEhC,YACqB,IAAIC,GAA8CD,EAAoB,GAClEE,EAA8CF,EAAoB,KAClEG,EAA4DH,EAAoB,KAEhFI,GADoEJ,EAAoBK,EAAEF,GAC7CH,EAAoB,IACZA,GAAoBK,EAAED,EACrEJ,GAAoBM,EAAEP,EAAS,IAAK,WAAa,MAAOQ,IACvF,IAAIC,GAAcC,MAAQA,KAAKD,YAAe,SAAUE,EAAYC,EAAQC,EAAKC,GAC7E,GAA2HP,GAAvHQ,EAAIC,UAAUC,OAAQC,EAAIH,EAAI,EAAIH,EAAkB,OAATE,EAAgBA,EAAOK,OAAOC,yBAAyBR,EAAQC,GAAOC,CACrH,IAAuB,gBAAZO,UAAoD,kBAArBA,SAAQC,SAAyBJ,EAAIG,QAAQC,SAASX,EAAYC,EAAQC,EAAKC,OAC
 pH,KAAK,GAAIS,GAAIZ,EAAWM,OAAS,EAAGM,GAAK,EAAGA,KAAShB,EAAII,EAAWY,MAAIL,GAAKH,EAAI,EAAIR,EAAEW,GAAKH,EAAI,EAAIR,EAAEK,EAAQC,EAAKK,GAAKX,EAAEK,EAAQC,KAASK,EAChJ,OAAOH,GAAI,GAAKG,GAAKC,OAAOK,eAAeZ,EAAQC,EAAKK,GAAIA,GAE5DO,EAAcf,MAAQA,KAAKe,YAAe,SAAUC,EAAGC,GACvD,GAAuB,gBAAZN,UAAoD,kBAArBA,SAAQO,SAAyB,MAAOP,SAAQO,SAASF,EAAGC,ICM1GnB,EAAA,WASI,QAAAA,GAAoBqB,GAAAnB,KAAAmB,OAPpBnB,KAAAoB,OAAS,2BACTpB,KAAAqB,cAAgBrB,KAAKoB,OAAS,aAC9BpB,KAAAsB,uBAAyB,GAAI3B,GAAA,QAC7BK,KAAAuB,kBAAoBvB,KAAKsB,uBAAuBE,eAChDxB,KAAAyB,sBAAwB,GAAI9B,GAAA,QAC5BK,KAAA0B,iBAAmB1B,KAAKyB,sBAAsBD,eA0FlD,MAtFI1B,GAAA6B,UAAAC,cAAA,WACI,GAAIC,GAAU,GAAIpC,GAAA,CAElB,OADAO,MAAK8B,eAAeD,GACb7B,KAAKmB,KAAKY,IAAI/B,KAAKqB,eACtBQ,QAASA,IAERG,YACAC,KAAK,SAAAC,GAAY,MAAAA,GAASC,OAAOC,WAAWC,WAC5CC,MAAMtC,KAAKuC,cAGpBzC,EAAA6B,UAAAa,YAAA,SAAYC,GACR,GAAIZ,GAAU,GAAIpC,GAAA,CAElB,OADAO,MAAK0C,cAAcb,GACZ7B,KAAKmB,KAAKY,IAAIU,GACjBZ,QAASA,IAERG,YACAC,KAAK,SAAAC,GAAY,MAAAA,GAASS,SAC1BL,MAAMtC,KAAKuC,cAIpBzC,EAAA6B,UAAAiB,aAAA,SAAaC,EAA
 aC,GACtB,GAAIC,GAAW,GAAItD,GAAA,CAGnB,OAFAO,MAAK0C,cAAcK,GACnB/C,KAAKgD,eAAeD,GACb/C,KAAKmB,KACP8B,IAAIJ,EAAKC,GAAMjB,QAASkB,IACxBf,YACAC,KAAK,WAAM,MAAAa,KACXR,MAAMtC,KAAKuC,cAIpBzC,EAAA6B,UAAAuB,eAAA,SAAeC,EAAcL,GACzB,GAAIC,GAAW,GAAItD,GAAA,CACnBO,MAAK0C,cAAcK,GACnB/C,KAAKgD,eAAeD,EACpB,IAAIF,GAAM7C,KAAKqB,cAAgB,IAAM8B,CACrC,OAAOnD,MAAKmB,KACP8B,IAAIJ,EAAKC,GAAMjB,QAASkB,IACxBf,YACAC,KAAK,WAAM,MAAAa,KACXR,MAAMtC,KAAKuC,cAGpBzC,EAAA6B,UAAAyB,eAAA,SAAeX,GACX,GAAIZ,GAAU,GAAIpC,GAAA,CAGlB,OAFAO,MAAK8B,eAAeD,GACpB7B,KAAKgD,eAAenB,GACb7B,KAAKmB,KAAKkC,OAAOZ,GACpBZ,QAASA,IAERG,YACAC,KAAK,SAAAC,GAAY,MAAAA,GAASS,SAC1BL,MAAMtC,KAAKuC,cAGpBzC,EAAA6B,UAAAG,eAAA,SAAeD,GACXA,EAAQyB,OAAO,SAAU,oBACzBzB,EAAQyB,OAAO,eAAgB,qBAGnCxD,EAAA6B,UAAAe,cAAA,SAAcb,GACVA,EAAQyB,OAAO,SAAU,mBACzBzB,EAAQyB,OAAO,eAAgB,oBAGnCxD,EAAA6B,UAAAqB,eAAA,SAAenB,GACXA,EAAQyB,OAAO,gBAAiB,aAGpCxD,EAAA6B,UAAA4B,iBAAA,SAAiBC,GACbxD,KAAKsB,uBAAuBmC,KAAKD,IAGrC1D,EAAA6B,UAAA+B,gBAAA,SAAgBF,GACZxD,KAAKyB,sBAAsBgC,KAAKD,IAI5B1D,EAA
 A6B,UAAAY,YAAR,SAAoBoB,GAEhB,MADAC,SAAQD,MAAM,oBAAqBA,GAC5BE,QAAQC,OAAOH,EAAMI,SAAWJ,IAhG/C7D,EAAAC,GAACR,EAAAsB,EAAArB,EAAA,cDsFOuB,EAAW,qBAA2L,mBAA5JiD,EAA4E,mBAAhEvE,GAA0D,GAAqBA,EAA0D,IAAqBuE,GAAOvD,UAC5NX,EAEH,IAAIkE,OAMFC,IACA,SAAS5E,EAAQC,EAASC,GAEhC,YACqB,IAAIC,GAA8CD,EAAoB,GAClE2E,EAAkD3E,EAAoB,IAChEA,GAAoBM,EAAEP,EAAS,IAAK,WAAa,MAAO6E,IACvF,IAAIpE,GAAcC,MAAQA,KAAKD,YAAe,SAAUE,EAAYC,EAAQC,EAAKC,GAC7E,GAA2HP,GAAvHQ,EAAIC,UAAUC,OAAQC,EAAIH,EAAI,EAAIH,EAAkB,OAATE,EAAgBA,EAAOK,OAAOC,yBAAyBR,EAAQC,GAAOC,CACrH,IAAuB,gBAAZO,UAAoD,kBAArBA,SAAQC,SAAyBJ,EAAIG,QAAQC,SAASX,EAAYC,EAAQC,EAAKC,OACpH,KAAK,GAAIS,GAAIZ,EAAWM,OAAS,EAAGM,GAAK,EAAGA,KAAShB,EAAII,EAAWY,MAAIL,GAAKH,EAAI,EAAIR,EAAEW,GAAKH,EAAI,EAAIR,EAAEK,EAAQC,EAAKK,GAAKX,EAAEK,EAAQC,KAASK,EAChJ,OAAOH,GAAI,GAAKG,GAAKC,OAAOK,eAAeZ,EAAQC,EAAKK,GAAIA,GAE5DO,EAAcf,MAAQA,KAAKe,YAAe,SAAUC,EAAGC,GACvD,GAAuB,gBAAZN,UAAoD,kBAArBA,SAAQO,SAAyB,MAAOP,SAAQO,SAASF,EAAGC,IElG1GkD,EAAA,WACI,QAAAA,GAAoBC,GAAApE,KAAAoE,kBAExB,MApBAD,GAAApE,GAACR,E
 AAAsB,EAAArB,EAAA,YACG6E,SAAU,sBACVC,SAAU,0RAYVC,WAAYL,EAAA,KFmHRnD,EAAW,qBAAyN,mBAA1LiD,EAA2F,mBAA/EE,GAAyE,GAAqBA,EAAyE,IAAqBF,GAAOvD,UAC1P0D,EAEH,IAAIH,OAMFQ,IACA,SAASnF,EAAQC,EAASC,GAEhC,YACqB,IAAIC,GAA8CD,EAAoB,GAClEE,EAA8CF,EAAoB,KAClEG,EAA4DH,EAAoB,IACZA,GAAoBK,EAAEF,EACpFH,GAAoBM,EAAEP,EAAS,IAAK,WAAa,MAAOmF,IACvF,IAAI1E,GAAcC,MAAQA,KAAKD,YAAe,SAAUE,EAAYC,EAAQC,EAAKC,GAC7E,GAA2HP,GAAvHQ,EAAIC,UAAUC,OAAQC,EAAIH,EAAI,EAAIH,EAAkB,OAATE,EAAgBA,EAAOK,OAAOC,yBAAyBR,EAAQC,GAAOC,CACrH,IAAuB,gBAAZO,UAAoD,kBAArBA,SAAQC,SAAyBJ,EAAIG,QAAQC,SAASX,EAAYC,EAAQC,EAAKC,OACpH,KAAK,GAAIS,GAAIZ,EAAWM,OAAS,EAAGM,GAAK,EAAGA,KAAShB,EAAII,EAAWY,MAAIL,GAAKH,EAAI,EAAIR,EAAEW,GAAKH,EAAI,EAAIR,EAAEK,EAAQC,EAAKK,GAAKX,EAAEK,EAAQC,KAASK,EAChJ,OAAOH,GAAI,GAAKG,GAAKC,OAAOK,eAAeZ,EAAQC,EAAKK,GAAIA,GAE5DO,EAAcf,MAAQA,KAAKe,YAAe,SAAUC,EAAGC,GACvD,GAAuB,gBAAZN,UAAoD,kBAArBA,SAAQO,SAAyB,MAAOP,SAAQO,SAASF,EAAGC,IGrJ1GwD,EAAA,WAII,QAAAA,GAAoBtD,GAAAnB,KAAAmB,OAFZnB,KAAAoB,OAAS,kCAwBrB,MApBIqD,GAAA9C,UAAA+C,WAAA,WAC
 I,GAAI7C,GAAU,GAAIpC,GAAA,CAElB,OADAO,MAAK2E,WAAW9C,GACT7B,KAAKmB,KAAKY,IAAI/B,KAAKoB,QACtBS,QAASA,IAERG,YACAC,KAAK,SAAAC,GAAY,MAAAA,GAASC,OAAOyC,gBACjCtC,MAAMtC,KAAKuC,cAGpBkC,EAAA9C,UAAAgD,WAAA,SAAW9C,GACPA,EAAQyB,OAAO,SAAU,oBACzBzB,EAAQyB,OAAO,eAAgB,qBAG3BmB,EAAA9C,UAAAY,YAAR,SAAoBoB,GAEhB,MADAC,SAAQD,MAAM,oBAAqBA,GAC5BE,QAAQC,OAAOH,EAAMI,SAAWJ,IAzB/Cc,EAAA1E,GAACR,EAAAsB,EAAArB,EAAA,cHoLOuB,EAAW,qBAA2L,mBAA5JiD,EAA4E,mBAAhEvE,GAA0D,GAAqBA,EAA0D,IAAqBuE,GAAOvD,UAC5NgE,EAEH,IAAIT,OAMFa,IACA,SAASxF,EAAQC,EAASC,GAEhC,YACqB,IAAIC,GAA8CD,EAAoB,EAC5DA,GAAoBM,EAAEP,EAAS,IAAK,WAAa,MAAOwF,IACvF,IAAI/E,GAAcC,MAAQA,KAAKD,YAAe,SAAUE,EAAYC,EAAQC,EAAKC,GAC7E,GAA2HP,GAAvHQ,EAAIC,UAAUC,OAAQC,EAAIH,EAAI,EAAIH,EAAkB,OAATE,EAAgBA,EAAOK,OAAOC,yBAAyBR,EAAQC,GAAOC,CACrH,IAAuB,gBAAZO,UAAoD,kBAArBA,SAAQC,SAAyBJ,EAAIG,QAAQC,SAASX,EAAYC,EAAQC,EAAKC,OACpH,KAAK,GAAIS,GAAIZ,EAAWM,OAAS,EAAGM,GAAK,EAAGA,KAAShB,EAAII,EAAWY,MAAIL,GAAKH,EAAI,EAAIR,EAAEW,GAAKH,EAAI,EAAIR,EAAEK,EAAQC,EAAKK,GAAKX,EAAEK,EAAQC,KAASK,E
 AChJ,OAAOH,GAAI,GAAKG,GAAKC,OAAOK,eAAeZ,EAAQC,EAAKK,GAAIA,GAE5DO,EAAcf,MAAQA,KAAKe,YAAe,SAAUC,EAAGC,GACvD,GAAuB,gBAAZN,UAAoD,kBAArBA,SAAQO,SAAyB,MAAOP,SAAQO,SAASF,EAAGC,IInM1G6D,EAAA,mBAAAA,KAEW9E,KAAA+E,QAAS,EACpB,MAFEhF,IAACR,EAAAsB,EAAArB,EAAA,OAAM,YJ2MDuB,EAAW,cAAeiE,SAC3BF,EAAanD,UAAW,QAAS,QI3MtC5B,GAACR,EAAAsB,EAAArB,EAAA,SJ8MKuB,EAAW,cAAeN,SAC3BqE,EAAanD,UAAW,SAAU,QI7NzCmD,EAAA/E,GAACR,EAAAsB,EAAArB,EAAA,YACC6E,SAAU,MACVY,QAAS,gDAKTX,SAAU,gEJ6NJvD,EAAW,yBACZ+D,OAODI,IACA,SAAS7F,EAAQC,GK/PvB,QAAA6F,GAAAC,GACA,SAAAC,OAAA,uBAAAD,EAAA,MAEAD,EAAAG,KAAA,WAAuC,UACvCH,EAAAI,QAAAJ,EACA9F,EAAAC,QAAA6F,EACAA,EAAAK,GAAA,KLsQMC,IACA,SAASpG,EAAQC,EAASC,GAEhC,YACqB,IAAImG,GAA8CnG,EAAoB,KAElEoG,GADsDpG,EAAoBK,EAAE8F,GACVnG,EAAoB,MACtFqG,EAA8CrG,EAAoB,GAClEsG,EAA0DtG,EAAoB,KAC9EuG,EAAsCvG,EAAoB,IM9P/EsG,GAAA,EAAYE,YACdxG,EAAAsB,EAAA+E,EAAA,kBAGFrG,EAAAsB,EAAA8E,EAAA,KAAyBK,gBAAgBF,EAAA,INwQnCG,IACA,SAAS5G,EAAQC,EAASC,GAEhC,YACqB,IAAIC,GAA8CD,EAAoB,GAClE2G,EAA0D3G,EAAoB,KAC9E4G,EAA8C5G,EAAoB,K
 AClE6G,EAA+C7G,EAAoB,KACnE8G,EAA+C9G,EAAoB,KACnE+G,EAAkD/G,EAAoB,KACtEgH,EAAyDhH,EAAoB,KAC7EiH,EAA2DjH,EAAoB,KAC/EkH,EAAoDlH,EAAoB,KACxEmH,EAA2DnH,EAAoB,KAC/EoH,EAAiDpH,EAAoB,KACrEqH,EAAyDrH,EAAoB,KAC7EsH,EAAsDtH,EAAoB,KAC1EuH,EAAuDvH,EAAoB,KAC3EwH,EAAgDxH,EAAoB,KAEpEyH,GADwDzH,EAAoBK,EAAEmH,GACjBxH,EAAoB,KACZA,GAAoBK,EAAEoH,EACrFzH,GAAoBM,EAAEP,EAAS,IAAK,WAAa,MAAO2H,IACvF,IAAIlH,GAAcC,MAAQA,KAAKD,YAAe,SAAUE,EAAYC,EAAQC,EAAKC,GAC7E,GAA2HP,GAAvHQ,EAAIC,UAAUC,OAAQC,EAAIH,EAAI,EAAIH,EAAkB,OAATE,EAAgBA,EAAOK,OAAOC,yBAAyBR,EAAQC,GAAOC,CACrH,IAAuB,gBAAZO,UAAoD,kBAArBA,SAAQC,SAAyBJ,EAAIG,QAAQC,SAASX,EAAYC,EAAQC,EAAKC,OACpH,KAAK,GAAIS,GAAIZ,EAAWM,OAAS,EAAGM,GAAK,EAAGA,KAAShB,EAAII,EAAWY,MAAIL,GAAKH,EAAI,EAAIR,EAAEW,GAAKH,EAAI,EAAIR,EAAEK,EAAQC,EAAKK,GAAKX,EAAEK,EAAQC,KAASK,EAChJ,OAAOH,GAAI,GAAKG,GAAKC,OAAOK,eAAeZ,EAAQC,EAAKK,GAAIA,GAE5DO,EAAcf,MAAQA,KAAKe,YAAe,SAAUC,EAAGC,GACvD,GAAuB,gBAAZN,UAAoD,kBAArBA,SAAQO,SAAyB,MAAOP,SAAQO,SAASF,EAAGC,IO1Q1GgG,EAAA,mBAAAA,MAAyB,MApBzBA,GAAAlH,GAACR,EAA
 AsB,EAAArB,EAAA,WACC0H,SAAWhB,EAAA,EACTC,EAAA,EACAC,EAAA,EACAY,EAAA,mBAEFG,cAAgBd,EAAA,EACdI,EAAA,EACEC,EAAA,EACFF,EAAA,EACAO,EAAA,mBACAJ,EAAA,EACAC,EAAA,EACAE,EAAA,EACAD,EAAA,GACFtC,WAAa+B,EAAA,EACXC,EAAA,GACFa,WAAaf,EAAA,EACXG,EAAA,KPsTIzF,EAAW,yBACZkG,OAODI,IACA,SAAShI,EAAQC,EAASC,GAEhC,YACqB,IAAIC,GAA8CD,EAAoB,GAClE+H,EAAyD/H,EAAoB,IACvEA,GAAoBM,EAAEP,EAAS,IAAK,WAAa,MAAOiI,IACvF,IAAIxH,GAAcC,MAAQA,KAAKD,YAAe,SAAUE,EAAYC,EAAQC,EAAKC,GAC7E,GAA2HP,GAAvHQ,EAAIC,UAAUC,OAAQC,EAAIH,EAAI,EAAIH,EAAkB,OAATE,EAAgBA,EAAOK,OAAOC,yBAAyBR,EAAQC,GAAOC,CACrH,IAAuB,gBAAZO,UAAoD,kBAArBA,SAAQC,SAAyBJ,EAAIG,QAAQC,SAASX,EAAYC,EAAQC,EAAKC,OACpH,KAAK,GAAIS,GAAIZ,EAAWM,OAAS,EAAGM,GAAK,EAAGA,KAAShB,EAAII,EAAWY,MAAIL,GAAKH,EAAI,EAAIR,EAAEW,GAAKH,EAAI,EAAIR,EAAEK,EAAQC,EAAKK,GAAKX,EAAEK,EAAQC,KAASK,EAChJ,OAAOH,GAAI,GAAKG,GAAKC,OAAOK,eAAeZ,EAAQC,EAAKK,GAAIA,GAE5DO,EAAcf,MAAQA,KAAKe,YAAe,SAAUC,EAAGC,GACvD,GAAuB,gBAAZN,UAAoD,kBAArBA,SAAQO,SAAyB,MAAOP,SAAQO,SAASF,EAAGC,IQlW1GsG,EAAA,WAII,QAAAA,GAAoBC,GAAAxH,KA
 AAwH,wBAYxB,MATID,GAAA5F,UAAA+C,WAAA,cAAA+C,GAAAzH,IACIA,MAAKwH,sBAAsB9C,aAAazC,KAAM,SAAAyF,GAAkB,MAAAD,GAAKC,eAAiBA,KAG1FH,EAAA5F,UAAAgG,SAAA,WACI3H,KAAK0E,cAtBb6C,EAAAxH,GAACR,EAAAsB,EAAArB,EAAA,YACG6E,SAAU,kBACVC,SAAU,kPAKVC,WAAY+C,EAAA,KR0XRvG,EAAW,qBAAmP,mBAApNiD,EAAwG,mBAA5FsD,GAAsF,GAAqBA,EAAsF,IAAqBtD,GAAOvD,UACpR8G,EAEH,IAAIvD,OAMF4D,IACA,SAASvI,EAAQC,EAASC,GAEhC,YACqB,IACIsI,IAD+CtI,EAAoB,KACvBA,EAAoB,KSpazFA,GAAAM,EAAAP,EAAA,qBAAAuI,GAAA,KT6bMC,IACA,SAASzI,EAAQC,EAASC,GAEhC,YACqB,IAAIC,GAA8CD,EAAoB,GAClE2E,EAAkD3E,EAAoB,KACtEwI,EAA4DxI,EAAoB,IACZA,GAAoBK,EAAEmI,EACpFxI,GAAoBM,EAAEP,EAAS,IAAK,WAAa,MAAO0I,IACvF,IAAIjI,GAAcC,MAAQA,KAAKD,YAAe,SAAUE,EAAYC,EAAQC,EAAKC,GAC7E,GAA2HP,GAAvHQ,EAAIC,UAAUC,OAAQC,EAAIH,EAAI,EAAIH,EAAkB,OAATE,EAAgBA,EAAOK,OAAOC,yBAAyBR,EAAQC,GAAOC,CACrH,IAAuB,gBAAZO,UAAoD,kBAArBA,SAAQC,SAAyBJ,EAAIG,QAAQC,SAASX,EAAYC,EAAQC,EAAKC,OACpH,KAAK,GAAIS,GAAIZ,EAAWM,OAAS,EAAGM,GAAK,EAAGA,KAAShB,EAAII,EAAWY,MAAIL,GAAKH,EAAI,EAAIR,EAAEW,GAAKH,EAAI,EAAIR,EAAEK,EAAQC
 ,EAAKK,GAAKX,EAAEK,EAAQC,KAASK,EAChJ,OAAOH,GAAI,GAAKG,GAAKC,OAAOK,eAAeZ,EAAQC,EAAKK,GAAIA,GAE5DO,EAAcf,MAAQA,KAAKe,YAAe,SAAUC,EAAGC,GACvD,GAAuB,gBAAZN,UAAoD,kBAArBA,SAAQO,SAAyB,MAAOP,SAAQO,SAASF,EAAGC,IUhY1G+G,EAAA,WAgBI,QAAAA,GAAoB5D,GAAApE,KAAAoE,kBAdpBpE,KAAAiI,MAAQ,kBAMRjI,KAAAkI,SAAeC,WAAW,EAAOC,aAAa,GAqDlD,MA1CIJ,GAAArG,UAAAgG,SAAA,cAAAF,GAAAzH,IACIA,MAAKoE,gBAAgB7C,kBAAkB8G,UAAU,SAAA7E,GAAS,MAAAiE,GAAKa,gBAAgB9E,MAGnFwE,EAAArG,UAAA4G,SAAA,SAAS/E,GACLxD,KAAKwI,YAAchF,GAGvBwE,EAAArG,UAAA8G,SAAA,SAASC,GACL1I,KAAK0D,gBAAkBgF,GAG3BV,EAAArG,UAAAiB,aAAA,cAAA6E,GAAAzH,IACIA,MAAKoE,gBAAgBxB,aAAa5C,KAAKqC,SAASI,KAAMzC,KAAK0D,iBAC1DzB,KAAK,SAAAuB,GAAS,MAAAiE,GAAKrD,gBAAgBV,gBAAgB+D,EAAKpF,SAASc,SAGtE6E,EAAArG,UAAAuB,eAAA,cAAAuE,GAAAzH,IACQA,MAAK0D,gBACL1D,KAAKoE,gBAAgBlB,eAAelD,KAAK2I,gBAAiB3I,KAAK0D,iBAC9DzB,KAAK,SAAAuB,GAAS,MAAAiE,GAAKrD,gBAAgBV,gBAAgB+D,EAAKkB,mBAEzD3I,KAAKoE,gBAAgBlB,eAAelD,KAAK2I,gBAAiB3I,KAAK4I,iBAC9D3G,KAAK,SAAAuB,GAAS,MAAAiE,GAAKrD,gBAAgBV,gBAAgB+D,EAAKkB,oBAIjE
 X,EAAArG,UAAAyB,eAAA,cAAAqE,GAAAzH,IACIA,MAAKoE,gBAAgBhB,eAAepD,KAAKqC,SAASI,MAAMR,KAAK,SAAAuB,GAAS,MAAAiE,GAAKrD,gBAAgBV,gBAAgB+D,EAAKpF,SAASc,SAG7H6E,EAAArG,UAAA2G,gBAAA,SAAgBjG,GAAhB,GAAAoF,GAAAzH,IACIA,MAAKqC,SAAWA,EAChBrC,KAAKuI,SAASlG,EAASc,MACnBnD,KAAKqC,UACDrC,KAAKqC,SAASI,MACdzC,KAAKoE,gBAAgB5B,YAAYxC,KAAKqC,SAASI,MAAMR,KAAM,SAAA4G,GAAW,MAAApB,GAAKmB,gBAAkBC,KA7CzG9I,GAACR,EAAAsB,EAAArB,EAAA,WAAU,kBV2aPuB,EAAW,cAAoN,mBAA5LiD,EAA4F,mBAAhF+D,GAA0E,gBAAqBA,EAA0E,iBAAqB/D,GAAOvD,SACrPuH,EAAwBrG,UAAW,iBAAkB,QUzaxD5B,GAACR,EAAAsB,EAAArB,EAAA,WAAU,sBV4aPuB,EAAW,cAAoN,mBAA5L+H,EAA4F,mBAAhFf,GAA0E,gBAAqBA,EAA0E,iBAAqBe,GAAOrI,SACrPuH,EAAwBrG,UAAW,qBAAsB,QUlfhEqG,EAAAjI,GAACR,EAAAsB,EAAArB,EAAA,YACG6E,SAAU,kBACVC,SAAU,wqFVsfNvD,EAAW,qBAAyN,mBAA1LgI,EAA2F,mBAA/E7E,GAAyE,GAAqBA,EAAyE,IAAqB6E,GAAOtI,UAC1PuH,EAEH,IAAIhE,GAAI8E,EAAIC,MAMVC,IACA,SAAS3J,EAAQC,EAASC,GAEhC,YACqB,IAAIC,GAA8CD,EAAoB,GAClE2E,EAAkD3E,EAAoB,IAChEA,GAAoBM,EAAEP,EAAS,IAAK,WAAa,MAAO2J,IACvF,IAAIlJ,GAAcC,MAAQA,KAAKD,Y
 AAe,SAAUE,EAAYC,EAAQC,EAAKC,GAC7E,GAA2HP,GAAvHQ,EAAIC,UAAUC,OAAQC,EAAIH,EAAI,EAAIH,EAAkB,OAATE,EAAgBA,EAAOK,OAAOC,yBAAyBR,EAAQC,GAAOC,CACrH,IAAuB,gBAAZO,UAAoD,kBAArBA,SAAQC,SAAyBJ,EAAIG,QAAQC,SAASX,EAAYC,EAAQC,EAAKC,OACpH,KAAK,GAAIS,GAAIZ,EAAWM,OAAS,EAAGM,GAAK,EAAGA,KAAShB,EAAII,EAAWY,MAAIL,GAAKH,EAAI,EAAIR,EAAEW,GAAKH,EAAI,EAAIR,EAAEK,EAAQC,EAAKK,GAAKX,EAAEK,EAAQC,KAASK,EAChJ,OAAOH,GAAI,GAAKG,GAAKC,OAAOK,eAAeZ,EAAQC,EAAKK,GAAIA,GAE5DO,EAAcf,MAAQA,KAAKe,YAAe,SAAUC,EAAGC,GACvD,GAAuB,gBAAZN,UAAoD,kBAArBA,SAAQO,SAAyB,MAAOP,SAAQO,SAASF,EAAGC,IWvf1GgI,EAAA,WAOI,QAAAA,GAAoB7E,GAAApE,KAAAoE,kBAgBxB,MAbI6E,GAAAtH,UAAAC,cAAA,cAAA6F,GAAAzH,IACIA,MAAKoE,gBAAgBxC,gBAAgBK,KAAK,SAAAG,GAAc,MAAAqF,GAAKrF,WAAaA,KAG9E6G,EAAAtH,UAAAgG,SAAA,cAAAF,GAAAzH,IACIA,MAAK4B,gBACL5B,KAAKoE,gBAAgB1C,iBAAiB2G,UAAU,SAAA7E,GAAS,MAAAiE,GAAK7F,mBAGlEqH,EAAAtH,UAAAuH,SAAA,SAAS7G,GACLrC,KAAKuD,iBAAmBlB,EACxBrC,KAAKoE,gBAAgBb,iBAAiBlB,IA5C9C4G,EAAAlJ,GAACR,EAAAsB,EAAArB,EAAA,YACG6E,SAAU,WACVC,SAAU,sqBXsiBNvD,EAAW,qBAAyN
 ,mBAA1LiD,EAA2F,mBAA/EE,GAAyE,GAAqBA,EAAyE,IAAqBF,GAAOvD,UAC1PwI,EAEH,IAAIjF,OAMFmF,IACA,SAAS9J,EAAQC,EAASC,GAEhC,YACqB,IAAIC,GAA8CD,EAAoB,EAC5DA,GAAoBM,EAAEP,EAAS,IAAK,WAAa,MAAO8J,IACvF,IAAIrJ,GAAcC,MAAQA,KAAKD,YAAe,SAAUE,EAAYC,EAAQC,EAAKC,GAC7E,GAA2HP,GAAvHQ,EAAIC,UAAUC,OAAQC,EAAIH,EAAI,EAAIH,EAAkB,OAATE,EAAgBA,EAAOK,OAAOC,yBAAyBR,EAAQC,GAAOC,CACrH,IAAuB,gBAAZO,UAAoD,kBAArBA,SAAQC,SAAyBJ,EAAIG,QAAQC,SAASX,EAAYC,EAAQC,EAAKC,OACpH,KAAK,GAAIS,GAAIZ,EAAWM,OAAS,EAAGM,GAAK,EAAGA,KAAShB,EAAII,EAAWY,MAAIL,GAAKH,EAAI,EAAIR,EAAEW,GAAKH,EAAI,EAAIR,EAAEK,EAAQC,EAAKK,GAAKX,EAAEK,EAAQC,KAASK,EAChJ,OAAOH,GAAI,GAAKG,GAAKC,OAAOK,eAAeZ,EAAQC,EAAKK,GAAIA,GAE5DO,EAAcf,MAAQA,KAAKe,YAAe,SAAUC,EAAGC,GACvD,GAAuB,gBAAZN,UAAoD,kBAArBA,SAAQO,SAAyB,MAAOP,SAAQO,SAASF,EAAGC,IY/jB1GmI,EAAA,mBAAAA,MAKA,MAHEA,GAAAzH,UAAA0H,UAAA,SAAU7F,GACR,MAAO8F,YAAWnH,KAAKqB,IAJ3B4F,EAAArJ,GAACR,EAAAsB,EAAArB,EAAA,OAAM2D,KAAM,eZ2kBLpC,EAAW,yBACZqI,OAODG,IACA,SAASlK,EAAQC,EAASC,GAEhC,YACqB,IAAIC,GAA8CD,EAAoB,GAClEiK,EAA+CjK,EAAo
 B,IAC7DA,GAAoBM,EAAEP,EAAS,IAAK,WAAa,MAAOmK,IACvF,IAAI1J,GAAcC,MAAQA,KAAKD,YAAe,SAAUE,EAAYC,EAAQC,EAAKC,GAC7E,GAA2HP,GAAvHQ,EAAIC,UAAUC,OAAQC,EAAIH,EAAI,EAAIH,EAAkB,OAATE,EAAgBA,EAAOK,OAAOC,yBAAyBR,EAAQC,GAAOC,CACrH,IAAuB,gBAAZO,UAAoD,kBAArBA,SAAQC,SAAyBJ,EAAIG,QAAQC,SAASX,EAAYC,EAAQC,EAAKC,OACpH,KAAK,GAAIS,GAAIZ,EAAWM,OAAS,EAAGM,GAAK,EAAGA,KAAShB,EAAII,EAAWY,MAAIL,GAAKH,EAAI,EAAIR,EAAEW,GAAKH,EAAI,EAAIR,EAAEK,EAAQC,EAAKK,GAAKX,EAAEK,EAAQC,KAASK,EAChJ,OAAOH,GAAI,GAAKG,GAAKC,OAAOK,eAAeZ,EAAQC,EAAKK,GAAIA,GAE5DO,EAAcf,MAAQA,KAAKe,YAAe,SAAUC,EAAGC,GACvD,GAAuB,gBAAZN,UAAoD,kBAArBA,SAAQO,SAAyB,MAAOP,SAAQO,SAASF,EAAGC,IavlB1GwI,EAAA,mBAAAA,MAuBA,MAlBEA,GAAA9H,UAAA+H,mBAAA,WAEE,GAAIC,GAAa3J,KAAK4J,KAAKC,OAAO,SAACC,GAAM,MAAAA,GAAI/E,QAGpB,KAAtB4E,EAAWpJ,QACZP,KAAK+J,UAAU/J,KAAK4J,KAAKI,QAI7BP,EAAA9H,UAAAoI,UAAA,SAAUD,GAER9J,KAAK4J,KAAKK,UAAUC,QAAQ,SAAAJ,GAAO,MAAAA,GAAI/E,QAAS,IAGhD+E,EAAI/E,QAAS,GAlBfhF,GAACR,EAAAsB,EAAArB,EAAA,iBAAgBgK,EAAA,Gb6mBXzI,EAAW,cAA8K,mBAAtJiD,EAAyE,mBAA7DxE,GAAuD
 ,WAAqBA,EAAuD,YAAqBwE,GAAOvD,SAC/MgJ,EAAc9H,UAAW,OAAQ,Qa3nBxC8H,EAAA1J,GAACR,EAAAsB,EAAArB,EAAA,YACC6E,SAAU,OACVC,SAAS,8Lb+nBHvD,EAAW,yBACZ0I,EAEH,IAAIzF,OAMFmG,IACA,SAAS9K,EAAQC,EAASC,GAEhC,YACqB,IAAIC,GAA8CD,EAAoB,EAC5DA,GAAoBM,EAAEP,EAAS,IAAK,WAAa,MAAO8K,IACvF,IAAIrK,GAAcC,MAAQA,KAAKD,YAAe,SAAUE,EAAYC,EAAQC,EAAKC,GAC7E,GAA2HP,GAAvHQ,EAAIC,UAAUC,OAAQC,EAAIH,EAAI,EAAIH,EAAkB,OAATE,EAAgBA,EAAOK,OAAOC,yBAAyBR,EAAQC,GAAOC,CACrH,IAAuB,gBAAZO,UAAoD,kBAArBA,SAAQC,SAAyBJ,EAAIG,QAAQC,SAASX,EAAYC,EAAQC,EAAKC,OACpH,KAAK,GAAIS,GAAIZ,EAAWM,OAAS,EAAGM,GAAK,EAAGA,KAAShB,EAAII,EAAWY,MAAIL,GAAKH,EAAI,EAAIR,EAAEW,GAAKH,EAAI,EAAIR,EAAEK,EAAQC,EAAKK,GAAKX,EAAEK,EAAQC,KAASK,EAChJ,OAAOH,GAAI,GAAKG,GAAKC,OAAOK,eAAeZ,EAAQC,EAAKK,GAAIA,GAE5DO,EAAcf,MAAQA,KAAKe,YAAe,SAAUC,EAAGC,GACvD,GAAuB,gBAAZN,UAAoD,kBAArBA,SAAQO,SAAyB,MAAOP,SAAQO,SAASF,EAAGC,IcrpB1GmJ,EAAA,mBAAAA,MAKA,MAHEA,GAAAzI,UAAA0H,UAAA,SAAU7F,GACR,MAAO8F,YAAWxG,IAAIU,IAJ1B4G,EAAArK,GAACR,EAAAsB,EAAArB,EAAA,OAAM2D,KAAM,QdiqBLpC,EAAW,yBACZqJ,OAODC
 ,IACA,SAAShL,EAAQC,EAASC,GAEhC,YehsBAA,GAAAM,EAAAP,EAAA,qBAAAgL,IAgBO,IAAMA,IACXvE,YAAY,IfwsBRwE,IACA,SAASlL,EAAQC,EAASC,GAEhC,YACqB,IAAIiL,GAAmDjL,EAAoB,KAEvEkL,GAD2DlL,EAAoBK,EAAE4K,GAC9BjL,EAAoB,MAEvEmL,GAD2DnL,EAAoBK,EAAE6K,GAC5BlL,EAAoB,MAEzEoL,GAD6DpL,EAAoBK,EAAE8K,GAC7BnL,EAAoB,MAE1EqL,GAD8DrL,EAAoBK,EAAE+K,GAC5BpL,EAAoB,MAE5EsL,GADgEtL,EAAoBK,EAAEgL,GACnCrL,EAAoB,MAEvEuL,GAD2DvL,EAAoBK,EAAEiL,GAChCtL,EAAoB,MAErEwL,GADyDxL,EAAoBK,EAAEkL,GAC5BvL,EAAoB,MAEvEyL,GAD2DzL,EAAoBK,EAAEmL,GAChCxL,EAAoB,MAErE0L,GADyD1L,EAAoBK,EAAEoL,GAC7BzL,EAAoB,MAEtE2L,GAD0D3L,EAAoBK,EAAEqL,GAC5B1L,EAAoB,MAExE4L,GAD4D5L,EAAoBK,EAAEsL,GACjC3L,EAAoB,MAErE6L,GADyD7L,EAAoBK,EAAEuL,GAC9B5L,EAAoB,MAErE8L,GADyD9L,EAAoBK,EAAEwL,GAC1B7L,EAAoB,MAEzE+L,GAD6D/L,EAAoBK,EAAEyL,GAC9B9L,EAAoB,MAEzEgM,GAD6DhM,EAAoBK,EAAE0L,GAChC/L,EAAoB,KACZA,GAAoBK,EAAE2L,IAqBpGC,IACA,SAASnM,EAAQC,EAASC,GAEhCF,EAAOC,QAAUC,EAAoB,QAKlC","file":"main.806d67070af66e18c2fc.bundle.js","sourcesContent":["webpackJsonp([0,3],{\n\n/***/ 122:\
 n/***/ function(module, exports, __webpack_require__) {\n\n\"use strict\";\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__angular_core__ = __webpack_require__(0);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__angular_http__ = __webpack_require__(180);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_2_rxjs_add_operator_toPromise__ = __webpack_require__(341);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_2_rxjs_add_operator_toPromise___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_rxjs_add_operator_toPromise__);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_3_rxjs_Subject__ = __webpack_require__(90);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_3_rxjs_Subject___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_rxjs_Subject__);\n/* harmony export (binding) */ __webpack_require__.d(exports, \"a\", function() { return TopologyService; });\nvar __decorate = (this && this.__decorate) || function (decorators, target, key, desc)
  {\n    var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\n    if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\n    else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\n    return c > 3 && r && Object.defineProperty(target, key, r), r;\n};\nvar __metadata = (this && this.__metadata) || function (k, v) {\n    if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(k, v);\n};\n\n\n\n\nvar TopologyService = (function () {\n    function TopologyService(http) {\n        this.http = http;\n        this.apiUrl = '/gateway/manager/api/v1/';\n        this.topologiesUrl = this.apiUrl + 'topologies';\n        this.selectedTopologySource = new __WEBPACK_IMPORTED_MODULE_3_rxjs_Subject__[\"Subject\"]
 ();\n        this.selectedTopology$ = this.selectedTopologySource.asObservable();\n        this.changedTopologySource = new __WEBPACK_IMPORTED_MODULE_3_rxjs_Subject__[\"Subject\"]();\n        this.changedTopology$ = this.changedTopologySource.asObservable();\n    }\n    TopologyService.prototype.getTopologies = function () {\n        var headers = new __WEBPACK_IMPORTED_MODULE_1__angular_http__[\"a\" /* Headers */]();\n        this.addJsonHeaders(headers);\n        return this.http.get(this.topologiesUrl, {\n            headers: headers\n        })\n            .toPromise()\n            .then(function (response) { return response.json().topologies.topology; })\n            .catch(this.handleError);\n    };\n    TopologyService.prototype.getTopology = function (href) {\n        var headers = new __WEBPACK_IMPORTED_MODULE_1__angular_http__[\"a\" /* Headers */]();\n        this.addXmlHeaders(headers);\n        return this.http.get(href, {\n            headers: headers\n        })\n    
         .toPromise()\n            .then(function (response) { return response.text(); })\n            .catch(this.handleError);\n    };\n    TopologyService.prototype.saveTopology = function (url, xml) {\n        var xheaders = new __WEBPACK_IMPORTED_MODULE_1__angular_http__[\"a\" /* Headers */]();\n        this.addXmlHeaders(xheaders);\n        this.addCsrfHeaders(xheaders);\n        return this.http\n            .put(url, xml, { headers: xheaders })\n            .toPromise()\n            .then(function () { return xml; })\n            .catch(this.handleError);\n    };\n    TopologyService.prototype.createTopology = function (name, xml) {\n        var xheaders = new __WEBPACK_IMPORTED_MODULE_1__angular_http__[\"a\" /* Headers */]();\n        this.addXmlHeaders(xheaders);\n        this.addCsrfHeaders(xheaders);\n        var url = this.topologiesUrl + \"/\" + name;\n        return this.http\n            .put(url, xml, { headers: xheaders })\n            .toPromise()\n            .the
 n(function () { return xml; })\n            .catch(this.handleError);\n    };\n    TopologyService.prototype.deleteTopology = function (href) {\n        var headers = new __WEBPACK_IMPORTED_MODULE_1__angular_http__[\"a\" /* Headers */]();\n        this.addJsonHeaders(headers);\n        this.addCsrfHeaders(headers);\n        return this.http.delete(href, {\n            headers: headers\n        })\n            .toPromise()\n            .then(function (response) { return response.text(); })\n            .catch(this.handleError);\n    };\n    TopologyService.prototype.addJsonHeaders = function (headers) {\n        headers.append('Accept', 'application/json');\n        headers.append('Content-Type', 'application/json');\n    };\n    TopologyService.prototype.addXmlHeaders = function (headers) {\n        headers.append('Accept', 'application/xml');\n        headers.append('Content-Type', 'application/xml');\n    };\n    TopologyService.prototype.addCsrfHeaders = function (headers) {\n   
      headers.append('X-XSRF-Header', 'admin-ui');\n    };\n    TopologyService.prototype.selectedTopology = function (value) {\n        this.selectedTopologySource.next(value);\n    };\n    TopologyService.prototype.changedTopology = function (value) {\n        this.changedTopologySource.next(value);\n    };\n    TopologyService.prototype.handleError = function (error) {\n        console.error('An error occurred', error); // for demo purposes only\n        return Promise.reject(error.message || error);\n    };\n    TopologyService = __decorate([\n        __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_0__angular_core__[\"Injectable\"])(), \n        __metadata('design:paramtypes', [(typeof (_a = typeof __WEBPACK_IMPORTED_MODULE_1__angular_http__[\"b\" /* Http */] !== 'undefined' && __WEBPACK_IMPORTED_MODULE_1__angular_http__[\"b\" /* Http */]) === 'function' && _a) || Object])\n    ], TopologyService);\n    return TopologyService;\n    var _a;\n}());\n//# sourceMappingURL=/Users/sumi
 t.gupta/Projects/knox/gateway-admin-ui/src/topology.service.js.map\n\n/***/ },\n\n/***/ 293:\n/***/ function(module, exports, __webpack_require__) {\n\n\"use strict\";\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__angular_core__ = __webpack_require__(0);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__topology_service__ = __webpack_require__(122);\n/* harmony export (binding) */ __webpack_require__.d(exports, \"a\", function() { return AppComponent; });\nvar __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {\n    var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\n    if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\n    else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\n    return c
  > 3 && r && Object.defineProperty(target, key, r), r;\n};\nvar __metadata = (this && this.__metadata) || function (k, v) {\n    if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(k, v);\n};\n\n\nvar AppComponent = (function () {\n    function AppComponent(topologyService) {\n        this.topologyService = topologyService;\n    }\n    AppComponent = __decorate([\n        __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_0__angular_core__[\"Component\"])({\n            selector: 'topology-management',\n            template: \"\\n      <div class=\\\"container\\\">\\n        <div class=\\\"row\\\">\\n          <div class=\\\"col-md-5\\\">\\n            <topology></topology>\\n         </div>\\n          <div class=\\\"col-md-7\\\">\\n            <topology-detail></topology-detail>\\n          </div>\\n        </div>\\n      </div>\\n  \",\n            providers: [__WEBPACK_IMPORTED_MODULE_1__topology_service__[\"a\" /* TopologyService 
 */]]\n        }), \n        __metadata('design:paramtypes', [(typeof (_a = typeof __WEBPACK_IMPORTED_MODULE_1__topology_service__[\"a\" /* TopologyService */] !== 'undefined' && __WEBPACK_IMPORTED_MODULE_1__topology_service__[\"a\" /* TopologyService */]) === 'function' && _a) || Object])\n    ], AppComponent);\n    return AppComponent;\n    var _a;\n}());\n//# sourceMappingURL=/Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/app.component.js.map\n\n/***/ },\n\n/***/ 294:\n/***/ function(module, exports, __webpack_require__) {\n\n\"use strict\";\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__angular_core__ = __webpack_require__(0);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__angular_http__ = __webpack_require__(180);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_2_rxjs_add_operator_toPromise__ = __webpack_require__(341);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_2_rxjs_add_operator_toPromise___default = __webpack_require__.n(__WEBPACK_IMPORTED
 _MODULE_2_rxjs_add_operator_toPromise__);\n/* harmony export (binding) */ __webpack_require__.d(exports, \"a\", function() { return GatewayVersionService; });\nvar __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {\n    var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\n    if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\n    else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\n    return c > 3 && r && Object.defineProperty(target, key, r), r;\n};\nvar __metadata = (this && this.__metadata) || function (k, v) {\n    if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(k, v);\n};\n\n\n\nvar GatewayVersionService = (function () {\n    function GatewayV
 ersionService(http) {\n        this.http = http;\n        this.apiUrl = '/gateway/manager/api/v1/version';\n    }\n    GatewayVersionService.prototype.getVersion = function () {\n        var headers = new __WEBPACK_IMPORTED_MODULE_1__angular_http__[\"a\" /* Headers */]();\n        this.addHeaders(headers);\n        return this.http.get(this.apiUrl, {\n            headers: headers\n        })\n            .toPromise()\n            .then(function (response) { return response.json().ServerVersion; })\n            .catch(this.handleError);\n    };\n    GatewayVersionService.prototype.addHeaders = function (headers) {\n        headers.append('Accept', 'application/json');\n        headers.append('Content-Type', 'application/json');\n    };\n    GatewayVersionService.prototype.handleError = function (error) {\n        console.error('An error occurred', error); // for demo purposes only\n        return Promise.reject(error.message || error);\n    };\n    GatewayVersionService = __decorate(
 [\n        __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_0__angular_core__[\"Injectable\"])(), \n        __metadata('design:paramtypes', [(typeof (_a = typeof __WEBPACK_IMPORTED_MODULE_1__angular_http__[\"b\" /* Http */] !== 'undefined' && __WEBPACK_IMPORTED_MODULE_1__angular_http__[\"b\" /* Http */]) === 'function' && _a) || Object])\n    ], GatewayVersionService);\n    return GatewayVersionService;\n    var _a;\n}());\n//# sourceMappingURL=/Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/gateway-version.service.js.map\n\n/***/ },\n\n/***/ 295:\n/***/ function(module, exports, __webpack_require__) {\n\n\"use strict\";\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__angular_core__ = __webpack_require__(0);\n/* harmony export (binding) */ __webpack_require__.d(exports, \"a\", function() { return TabComponent; });\nvar __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {\n    var c = arguments.length, r = c < 3 ? target : desc === null ? d
 esc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\n    if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\n    else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\n    return c > 3 && r && Object.defineProperty(target, key, r), r;\n};\nvar __metadata = (this && this.__metadata) || function (k, v) {\n    if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(k, v);\n};\n\nvar TabComponent = (function () {\n    function TabComponent() {\n        this.active = false;\n    }\n    __decorate([\n        __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_0__angular_core__[\"Input\"])('tabTitle'), \n        __metadata('design:type', String)\n    ], TabComponent.prototype, \"title\", void 0);\n    __decorate([\n        __webpack_require__.i(__WEBPACK_IMPORTED_MOD
 ULE_0__angular_core__[\"Input\"])(), \n        __metadata('design:type', Object)\n    ], TabComponent.prototype, \"active\", void 0);\n    TabComponent = __decorate([\n        __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_0__angular_core__[\"Component\"])({\n            selector: 'tab',\n            styles: [\"\\n    .pane{\\n      padding: 1em;\\n    }\\n  \"],\n            template: \"\\n    <div [hidden]=\\\"!active\\\" class=\\\"pane\\\">\\n    </div>\\n  \"\n        }), \n        __metadata('design:paramtypes', [])\n    ], TabComponent);\n    return TabComponent;\n}());\n//# sourceMappingURL=/Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/tab.component.js.map\n\n/***/ },\n\n/***/ 344:\n/***/ function(module, exports) {\n\nfunction webpackEmptyContext(req) {\n\tthrow new Error(\"Cannot find module '\" + req + \"'.\");\n}\nwebpackEmptyContext.keys = function() { return []; };\nwebpackEmptyContext.resolve = webpackEmptyContext;\nmodule.exports = webpackEmptyContext;\nwebpa
 ckEmptyContext.id = 344;\n\n\n/***/ },\n\n/***/ 345:\n/***/ function(module, exports, __webpack_require__) {\n\n\"use strict\";\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__polyfills_ts__ = __webpack_require__(454);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__polyfills_ts___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0__polyfills_ts__);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__angular_platform_browser_dynamic__ = __webpack_require__(425);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_2__angular_core__ = __webpack_require__(0);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_3__environments_environment__ = __webpack_require__(453);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_4__app___ = __webpack_require__(447);\n\n\n\n\n\nif (__WEBPACK_IMPORTED_MODULE_3__environments_environment__[\"a\" /* environment */].production) {\n    __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_2__angular_core__[\"enableProdMode\"])();\n}\n
 __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_1__angular_platform_browser_dynamic__[\"a\" /* platformBrowserDynamic */])().bootstrapModule(__WEBPACK_IMPORTED_MODULE_4__app___[\"a\" /* AppModule */]);\n//# sourceMappingURL=/Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/main.js.map\n\n/***/ },\n\n/***/ 445:\n/***/ function(module, exports, __webpack_require__) {\n\n\"use strict\";\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__angular_core__ = __webpack_require__(0);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__angular_platform_browser__ = __webpack_require__(184);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_2__angular_http__ = __webpack_require__(180);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_3__angular_forms__ = __webpack_require__(418);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_4__app_component__ = __webpack_require__(293);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_5__topology_service__ = __webpack_require__(122);\n/
 * harmony import */ var __WEBPACK_IMPORTED_MODULE_6__gateway_version_service__ = __webpack_require__(294);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_7__gateway_version_component__ = __webpack_require__(446);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_8__topology_component__ = __webpack_require__(449);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_9__topology_detail_component__ = __webpack_require__(448);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_10__utils_xml_pipe__ = __webpack_require__(452);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_11__utils_json_pretty_pipe__ = __webpack_require__(450);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_12__utils_tab_component__ = __webpack_require__(295);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_13__utils_tabs_component__ = __webpack_require__(451);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_14_ng2_ace_editor__ = __webpack_require__(608);\n/* harmony import */ var __WEBPACK_IMPOR
 TED_MODULE_14_ng2_ace_editor___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_14_ng2_ace_editor__);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_15_ng2_bs3_modal_ng2_bs3_modal__ = __webpack_require__(340);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_15_ng2_bs3_modal_ng2_bs3_modal___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_15_ng2_bs3_modal_ng2_bs3_modal__);\n/* harmony export (binding) */ __webpack_require__.d(exports, \"a\", function() { return AppModule; });\nvar __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {\n    var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\n    if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\n    else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(
 target, key)) || r;\n    return c > 3 && r && Object.defineProperty(target, key, r), r;\n};\nvar __metadata = (this && this.__metadata) || function (k, v) {\n    if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(k, v);\n};\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nvar AppModule = (function () {\n    function AppModule() {\n    }\n    AppModule = __decorate([\n        __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_0__angular_core__[\"NgModule\"])({\n            imports: [__WEBPACK_IMPORTED_MODULE_1__angular_platform_browser__[\"b\" /* BrowserModule */],\n                __WEBPACK_IMPORTED_MODULE_2__angular_http__[\"c\" /* HttpModule */],\n                __WEBPACK_IMPORTED_MODULE_3__angular_forms__[\"a\" /* FormsModule */],\n                __WEBPACK_IMPORTED_MODULE_15_ng2_bs3_modal_ng2_bs3_modal__[\"Ng2Bs3ModalModule\"]\n            ],\n            declarations: [__WEBPACK_IMPORTED_MODULE_4__app_component__[\"a\" /* AppComponent */],\n   
              __WEBPACK_IMPORTED_MODULE_8__topology_component__[\"a\" /* TopologyComponent */],\n                __WEBPACK_IMPORTED_MODULE_9__topology_detail_component__[\"a\" /* TopologyDetailComponent */],\n                __WEBPACK_IMPORTED_MODULE_7__gateway_version_component__[\"a\" /* GatewayVersionComponent */],\n                __WEBPACK_IMPORTED_MODULE_14_ng2_ace_editor__[\"AceEditorDirective\"],\n                __WEBPACK_IMPORTED_MODULE_10__utils_xml_pipe__[\"a\" /* XmlPipe */],\n                __WEBPACK_IMPORTED_MODULE_11__utils_json_pretty_pipe__[\"a\" /* JsonPrettyPipe */],\n                __WEBPACK_IMPORTED_MODULE_13__utils_tabs_component__[\"a\" /* TabsComponent */],\n                __WEBPACK_IMPORTED_MODULE_12__utils_tab_component__[\"a\" /* TabComponent */]],\n            providers: [__WEBPACK_IMPORTED_MODULE_5__topology_service__[\"a\" /* TopologyService */],\n                __WEBPACK_IMPORTED_MODULE_6__gateway_version_service__[\"a\" /* GatewayVersionService */
 ]],\n            bootstrap: [__WEBPACK_IMPORTED_MODULE_4__app_component__[\"a\" /* AppComponent */],\n                __WEBPACK_IMPORTED_MODULE_7__gateway_version_component__[\"a\" /* GatewayVersionComponent */]]\n        }), \n        __metadata('design:paramtypes', [])\n    ], AppModule);\n    return AppModule;\n}());\n//# sourceMappingURL=/Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/app.module.js.map\n\n/***/ },\n\n/***/ 446:\n/***/ function(module, exports, __webpack_require__) {\n\n\"use strict\";\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__angular_core__ = __webpack_require__(0);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__gateway_version_service__ = __webpack_require__(294);\n/* harmony export (binding) */ __webpack_require__.d(exports, \"a\", function() { return GatewayVersionComponent; });\nvar __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {\n    var c = arguments.length, r = c < 3 ? target : desc === null 
 ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\n    if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\n    else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\n    return c > 3 && r && Object.defineProperty(target, key, r), r;\n};\nvar __metadata = (this && this.__metadata) || function (k, v) {\n    if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(k, v);\n};\n\n\nvar GatewayVersionComponent = (function () {\n    function GatewayVersionComponent(gatewayVersionService) {\n        this.gatewayVersionService = gatewayVersionService;\n    }\n    GatewayVersionComponent.prototype.getVersion = function () {\n        var _this = this;\n        this.gatewayVersionService.getVersion().then(function (gatewayVersion) { return _this.gatewayVersion = g
 atewayVersion; });\n    };\n    GatewayVersionComponent.prototype.ngOnInit = function () {\n        this.getVersion();\n    };\n    GatewayVersionComponent = __decorate([\n        __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_0__angular_core__[\"Component\"])({\n            selector: 'gateway-version',\n            template: \"\\n        <div *ngIf=\\\"gatewayVersion\\\">\\n            <span class=\\\"small\\\"><cite>Knox Gateway Version</cite> {{this.gatewayVersion.version}}</span>\\n            <span class=\\\"small\\\"><cite>Hash</cite> {{this.gatewayVersion.hash}}</span>\\n</div>\",\n            providers: [__WEBPACK_IMPORTED_MODULE_1__gateway_version_service__[\"a\" /* GatewayVersionService */]]\n        }), \n        __metadata('design:paramtypes', [(typeof (_a = typeof __WEBPACK_IMPORTED_MODULE_1__gateway_version_service__[\"a\" /* GatewayVersionService */] !== 'undefined' && __WEBPACK_IMPORTED_MODULE_1__gateway_version_service__[\"a\" /* GatewayVersionService */]) === 'fun
 ction' && _a) || Object])\n    ], GatewayVersionComponent);\n    return GatewayVersionComponent;\n    var _a;\n}());\n//# sourceMappingURL=/Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/gateway-version.component.js.map\n\n/***/ },\n\n/***/ 447:\n/***/ function(module, exports, __webpack_require__) {\n\n\"use strict\";\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__app_component__ = __webpack_require__(293);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__app_module__ = __webpack_require__(445);\n/* unused harmony namespace reexport */\n/* harmony namespace reexport (by used) */ __webpack_require__.d(exports, \"a\", function() { return __WEBPACK_IMPORTED_MODULE_1__app_module__[\"a\"]; });\n/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements.  See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache Li
 cense, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License.  You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\n//# sourceMappingURL=/Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/index.js.map\n\n/***/ },\n\n/***/ 448:\n/***/ function(module, exports, __webpack_require__) {\n\n\"use strict\";\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__angular_core__ = __webpack_require__(0);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__topology_service__ = __webpack_require__(122);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_2_ng2_bs3
 _modal_ng2_bs3_modal__ = __webpack_require__(340);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_2_ng2_bs3_modal_ng2_bs3_modal___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_ng2_bs3_modal_ng2_bs3_modal__);\n/* harmony export (binding) */ __webpack_require__.d(exports, \"a\", function() { return TopologyDetailComponent; });\nvar __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {\n    var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\n    if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\n    else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\n    return c > 3 && r && Object.defineProperty(target, key, r), r;\n};\nvar __metadata = (this && this.__metadata) || function (k, v) {\n   
  if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(k, v);\n};\n\n\n\nvar TopologyDetailComponent = (function () {\n    function TopologyDetailComponent(topologyService) {\n        this.topologyService = topologyService;\n        this.title = 'Topology Detail';\n        this.options = { useWorker: false, printMargin: false };\n    }\n    TopologyDetailComponent.prototype.ngOnInit = function () {\n        var _this = this;\n        this.topologyService.selectedTopology$.subscribe(function (value) { return _this.populateContent(value); });\n    };\n    TopologyDetailComponent.prototype.setTitle = function (value) {\n        this.titleSuffix = value;\n    };\n    TopologyDetailComponent.prototype.onChange = function (code) {\n        this.changedTopology = code;\n    };\n    TopologyDetailComponent.prototype.saveTopology = function () {\n        var _this = this;\n        this.topologyService.saveTopology(this.topology.href, this.chan
 gedTopology)\n            .then(function (value) { return _this.topologyService.changedTopology(_this.topology.name); });\n    };\n    TopologyDetailComponent.prototype.createTopology = function () {\n        var _this = this;\n        if (this.changedTopology) {\n            this.topologyService.createTopology(this.newTopologyName, this.changedTopology)\n                .then(function (value) { return _this.topologyService.changedTopology(_this.newTopologyName); });\n        }\n        else {\n            this.topologyService.createTopology(this.newTopologyName, this.topologyContent)\n                .then(function (value) { return _this.topologyService.changedTopology(_this.newTopologyName); });\n        }\n    };\n    TopologyDetailComponent.prototype.deleteTopology = function () {\n        var _this = this;\n        this.topologyService.deleteTopology(this.topology.href).then(function (value) { return _this.topologyService.changedTopology(_this.topology.name); });\n    };\n    T
 opologyDetailComponent.prototype.populateContent = function (topology) {\n        var _this = this;\n        this.topology = topology;\n        this.setTitle(topology.name);\n        if (this.topology) {\n            if (this.topology.href) {\n                this.topologyService.getTopology(this.topology.href).then(function (content) { return _this.topologyContent = content; });\n            }\n        }\n    };\n    __decorate([\n        __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_0__angular_core__[\"ViewChild\"])('duplicateModal'), \n        __metadata('design:type', (typeof (_a = typeof __WEBPACK_IMPORTED_MODULE_2_ng2_bs3_modal_ng2_bs3_modal__[\"ModalComponent\"] !== 'undefined' && __WEBPACK_IMPORTED_MODULE_2_ng2_bs3_modal_ng2_bs3_modal__[\"ModalComponent\"]) === 'function' && _a) || Object)\n    ], TopologyDetailComponent.prototype, \"duplicateModal\", void 0);\n    __decorate([\n        __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_0__angular_core__[\"ViewChild\"])('dele
 teConfirmModal'), \n        __metadata('design:type', (typeof (_b = typeof __WEBPACK_IMPORTED_MODULE_2_ng2_bs3_modal_ng2_bs3_modal__[\"ModalComponent\"] !== 'undefined' && __WEBPACK_IMPORTED_MODULE_2_ng2_bs3_modal_ng2_bs3_modal__[\"ModalComponent\"]) === 'function' && _b) || Object)\n    ], TopologyDetailComponent.prototype, \"deleteConfirmModal\", void 0);\n    TopologyDetailComponent = __decorate([\n        __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_0__angular_core__[\"Component\"])({\n            selector: 'topology-detail',\n            template: \"\\n     <div class=\\\"panel panel-default\\\">\\n        <div class=\\\"panel-heading\\\">\\n            <h4 class=\\\"panel-title\\\">{{title}} <span class=\\\"label label-default pull-right\\\">{{titleSuffix}}</span></h4>\\n         </div>\\n     <div *ngIf=\\\"topologyContent\\\" class=\\\"panel-body\\\">\\n      <div ace-editor\\n       [readOnly]=\\\"false\\\" [text]=\\\"topologyContent | xml\\\" [mode]=\\\"'xml'\\\" [optio
 ns]=\\\"options\\\" \\n        [theme]=\\\"'monokai'\\\"\\n         style=\\\"min-height: 300px; width:100%; overflow: auto;\\\" (textChanged)=\\\"onChange($event)\\\">\\n      </div>\\n       <div class=\\\"panel-footer\\\">\\n        <button (click)=\\\"duplicateModal.open('sm')\\\" class=\\\"btn btn-default btn-sm\\\" type=\\\"submit\\\">\\n            <span class=\\\"glyphicon glyphicon-duplicate\\\" aria-hidden=\\\"true\\\"></span>\\n        </button>\\n        <button (click)=\\\"deleteConfirmModal.open('sm')\\\" class=\\\"btn btn-default btn-sm\\\" type=\\\"submit\\\">\\n            <span class=\\\"glyphicon glyphicon-trash\\\" aria-hidden=\\\"true\\\"></span>\\n        </button>\\n       <button (click)=\\\"saveTopology()\\\" class=\\\"btn btn-default btn-sm pull-right\\\" [disabled]=\\\"!changedTopology\\\" type=\\\"submit\\\">\\n            <span class=\\\"glyphicon glyphicon-floppy-disk\\\" aria-hidden=\\\"true\\\"></span>\\n        </button>\\n       </div>\\n         \\
 n    </div>\\n    <modal (onClose)=\\\"createTopology()\\\" #duplicateModal>\\n\\n        <modal-header [show-close]=\\\"true\\\">\\n            <h4 class=\\\"modal-title\\\">Create a copy</h4>\\n        </modal-header>\\n        <modal-body>\\n            <div class=\\\"form-group\\\">\\n                <label for=\\\"textbox\\\">Name the new topology</label>\\n                <input autofocus type=\\\"text\\\" class=\\\"form-control\\\" required [(ngModel)]=\\\"newTopologyName\\\" id=\\\"textbox\\\">\\n            </div> \\n        </modal-body>\\n        <modal-footer>\\n            <button type=\\\"button\\\" class=\\\"btn btn-default btn-sm\\\" data-dismiss=\\\"duplicateModal\\\" (click)=\\\"duplicateModal.dismiss()\\\">Cancel</button>\\n            <button type=\\\"button\\\" class=\\\"btn btn-primary btn-sm\\\" [disabled]=\\\"!newTopologyName\\\" (click)=\\\"duplicateModal.close()\\\">Ok</button>\\n        </modal-footer>\\n    </modal>\\n    <modal (onClose)=\\\"deleteTopolo
 gy()\\\" #deleteConfirmModal>\\n        <modal-header [show-close]=\\\"true\\\">\\n            <h4 class=\\\"modal-title\\\">Deleting Topology {{titleSuffix}}</h4>\\n        </modal-header>\\n        <modal-body>\\n            Are you sure you want to delete the topology?\\n        </modal-body>\\n        <modal-footer>\\n            <button type=\\\"button\\\" class=\\\"btn btn-default btn-sm\\\" data-dismiss=\\\"deleteConfirmModal\\\" (click)=\\\"deleteConfirmModal.dismiss()\\\">Cancel</button>\\n            <button type=\\\"button\\\" class=\\\"btn btn-primary btn-sm\\\" (click)=\\\"deleteConfirmModal.close()\\\">Ok</button>\\n        </modal-footer>\\n    </modal>\\n   \"\n        }), \n        __metadata('design:paramtypes', [(typeof (_c = typeof __WEBPACK_IMPORTED_MODULE_1__topology_service__[\"a\" /* TopologyService */] !== 'undefined' && __WEBPACK_IMPORTED_MODULE_1__topology_service__[\"a\" /* TopologyService */]) === 'function' && _c) || Object])\n    ], TopologyDetailCompo
 nent);\n    return TopologyDetailComponent;\n    var _a, _b, _c;\n}());\n//# sourceMappingURL=/Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/topology-detail.component.js.map\n\n/***/ },\n\n/***/ 449:\n/***/ function(module, exports, __webpack_require__) {\n\n\"use strict\";\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__angular_core__ = __webpack_require__(0);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__topology_service__ = __webpack_require__(122);\n/* harmony export (binding) */ __webpack_require__.d(exports, \"a\", function() { return TopologyComponent; });\nvar __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {\n    var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\n    if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\n    else for (var i = decorators.length - 1; 
 i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\n    return c > 3 && r && Object.defineProperty(target, key, r), r;\n};\nvar __metadata = (this && this.__metadata) || function (k, v) {\n    if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(k, v);\n};\n\n\nvar TopologyComponent = (function () {\n    function TopologyComponent(topologyService) {\n        this.topologyService = topologyService;\n    }\n    TopologyComponent.prototype.getTopologies = function () {\n        var _this = this;\n        this.topologyService.getTopologies().then(function (topologies) { return _this.topologies = topologies; });\n    };\n    TopologyComponent.prototype.ngOnInit = function () {\n        var _this = this;\n        this.getTopologies();\n        this.topologyService.changedTopology$.subscribe(function (value) { return _this.getTopologies(); });\n    };\n    TopologyComponent.prototype.on
 Select = function (topology) {\n        this.selectedTopology = topology;\n        this.topologyService.selectedTopology(topology);\n    };\n    TopologyComponent = __decorate([\n        __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_0__angular_core__[\"Component\"])({\n            selector: 'topology',\n            template: \"\\n        <div class=\\\"table-responsive\\\" style=\\\"max-height: 400px; width:100%; overflow: auto;\\\">\\n            <table class=\\\"table table-striped table-hover\\\">\\n              <thead>\\n                <tr>\\n                  <th>Topology Name</th>\\n                  <th>Timestamp</th>\\n                </tr>\\n              </thead>\\n         <tbody>\\n         <tr *ngFor=\\\"let topology of topologies\\\"\\n          [class.selected]=\\\"topology === selectedTopology\\\"\\n        (click)=\\\"onSelect(topology)\\\">\\n         <td>{{topology.name}}</td> \\n         <td>{{topology.timestamp | date:'yMMMdjms'}}</td> \\n         </tr>\\n  
       </tbody>\\n        </table>\\n        </div>\\n       \"\n        }), \n        __metadata('design:paramtypes', [(typeof (_a = typeof __WEBPACK_IMPORTED_MODULE_1__topology_service__[\"a\" /* TopologyService */] !== 'undefined' && __WEBPACK_IMPORTED_MODULE_1__topology_service__[\"a\" /* TopologyService */]) === 'function' && _a) || Object])\n    ], TopologyComponent);\n    return TopologyComponent;\n    var _a;\n}());\n//# sourceMappingURL=/Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/topology.component.js.map\n\n/***/ },\n\n/***/ 450:\n/***/ function(module, exports, __webpack_require__) {\n\n\"use strict\";\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__angular_core__ = __webpack_require__(0);\n/* harmony export (binding) */ __webpack_require__.d(exports, \"a\", function() { return JsonPrettyPipe; });\nvar __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {\n    var c = arguments.length, r = c < 3 ? target : desc === null ? des
 c = Object.getOwnPropertyDescriptor(target, key) : desc, d;\n    if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\n    else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\n    return c > 3 && r && Object.defineProperty(target, key, r), r;\n};\nvar __metadata = (this && this.__metadata) || function (k, v) {\n    if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(k, v);\n};\n\nvar JsonPrettyPipe = (function () {\n    function JsonPrettyPipe() {\n    }\n    JsonPrettyPipe.prototype.transform = function (value) {\n        return vkbeautify.json(value);\n    };\n    JsonPrettyPipe = __decorate([\n        __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_0__angular_core__[\"Pipe\"])({ name: 'jsonpretty' }), \n        __metadata('design:paramtypes', [])\n    ], Json
 PrettyPipe);\n    return JsonPrettyPipe;\n}());\n//# sourceMappingURL=/Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/json-pretty.pipe.js.map\n\n/***/ },\n\n/***/ 451:\n/***/ function(module, exports, __webpack_require__) {\n\n\"use strict\";\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__angular_core__ = __webpack_require__(0);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__tab_component__ = __webpack_require__(295);\n/* harmony export (binding) */ __webpack_require__.d(exports, \"a\", function() { return TabsComponent; });\nvar __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {\n    var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\n    if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\n    else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = 
 (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\n    return c > 3 && r && Object.defineProperty(target, key, r), r;\n};\nvar __metadata = (this && this.__metadata) || function (k, v) {\n    if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(k, v);\n};\n\n\nvar TabsComponent = (function () {\n    function TabsComponent() {\n    }\n    // contentChildren are set\n    TabsComponent.prototype.ngAfterContentInit = function () {\n        // get all active tabs\n        var activeTabs = this.tabs.filter(function (tab) { return tab.active; });\n        // if there is no active tab set, activate the first\n        if (activeTabs.length === 0) {\n            this.selectTab(this.tabs.first);\n        }\n    };\n    TabsComponent.prototype.selectTab = function (tab) {\n        // deactivate all tabs\n        this.tabs.toArray().forEach(function (tab) { return tab.active = false; });\n        // activate the tab the user has c
 licked on.\n        tab.active = true;\n    };\n    __decorate([\n        __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_0__angular_core__[\"ContentChildren\"])(__WEBPACK_IMPORTED_MODULE_1__tab_component__[\"a\" /* TabComponent */]), \n        __metadata('design:type', (typeof (_a = typeof __WEBPACK_IMPORTED_MODULE_0__angular_core__[\"QueryList\"] !== 'undefined' && __WEBPACK_IMPORTED_MODULE_0__angular_core__[\"QueryList\"]) === 'function' && _a) || Object)\n    ], TabsComponent.prototype, \"tabs\", void 0);\n    TabsComponent = __decorate([\n        __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_0__angular_core__[\"Component\"])({\n            selector: 'tabs',\n            template: \"\\n    <ul class=\\\"nav nav-tabs\\\">\\n      <li *ngFor=\\\"let tab of tabs\\\" (click)=\\\"selectTab(tab)\\\" [class.active]=\\\"tab.active\\\">\\n        <a>{{tab.title}}</a>\\n      </li>\\n    </ul>\\n    \\n  \"\n        }), \n        __metadata('design:paramtypes', [])\n    ], TabsComponent
 );\n    return TabsComponent;\n    var _a;\n}());\n//# sourceMappingURL=/Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/tabs.component.js.map\n\n/***/ },\n\n/***/ 452:\n/***/ function(module, exports, __webpack_require__) {\n\n\"use strict\";\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__angular_core__ = __webpack_require__(0);\n/* harmony export (binding) */ __webpack_require__.d(exports, \"a\", function() { return XmlPipe; });\nvar __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {\n    var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\n    if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\n    else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\n    return c > 3 && r && Object.define
 Property(target, key, r), r;\n};\nvar __metadata = (this && this.__metadata) || function (k, v) {\n    if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(k, v);\n};\n\nvar XmlPipe = (function () {\n    function XmlPipe() {\n    }\n    XmlPipe.prototype.transform = function (value) {\n        return vkbeautify.xml(value);\n    };\n    XmlPipe = __decorate([\n        __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_0__angular_core__[\"Pipe\"])({ name: 'xml' }), \n        __metadata('design:paramtypes', [])\n    ], XmlPipe);\n    return XmlPipe;\n}());\n//# sourceMappingURL=/Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/xml.pipe.js.map\n\n/***/ },\n\n/***/ 453:\n/***/ function(module, exports, __webpack_require__) {\n\n\"use strict\";\n/* harmony export (binding) */ __webpack_require__.d(exports, \"a\", function() { return environment; });\n/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor
  license agreements.  See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License.  You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar environment = {\n    production: true\n};\n//# sourceMappingURL=/Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/environment.prod.js.map\n\n/***/ },\n\n/***/ 454:\n/***/ function(module, exports, __webpack_require__) {\n\n\"use strict\";\n/* harmony impor
 t */ var __WEBPACK_IMPORTED_MODULE_0_core_js_es6_symbol__ = __webpack_require__(471);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0_core_js_es6_symbol___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_core_js_es6_symbol__);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1_core_js_es6_object__ = __webpack_require__(464);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1_core_js_es6_object___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_core_js_es6_object__);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_2_core_js_es6_function__ = __webpack_require__(460);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_2_core_js_es6_function___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_core_js_es6_function__);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_3_core_js_es6_parse_int__ = __webpack_require__(466);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_3_core_js_es6_parse_int___default = __webpack_require__.n(__WEBPACK
 _IMPORTED_MODULE_3_core_js_es6_parse_int__);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_4_core_js_es6_parse_float__ = __webpack_require__(465);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_4_core_js_es6_parse_float___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_core_js_es6_parse_float__);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_5_core_js_es6_number__ = __webpack_require__(463);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_5_core_js_es6_number___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_core_js_es6_number__);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_6_core_js_es6_math__ = __webpack_require__(462);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_6_core_js_es6_math___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_core_js_es6_math__);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_7_core_js_es6_string__ = __webpack_require__(470);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_7_cor
 e_js_es6_string___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_7_core_js_es6_string__);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_8_core_js_es6_date__ = __webpack_require__(459);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_8_core_js_es6_date___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_8_core_js_es6_date__);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_9_core_js_es6_array__ = __webpack_require__(458);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_9_core_js_es6_array___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_9_core_js_es6_array__);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_10_core_js_es6_regexp__ = __webpack_require__(468);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_10_core_js_es6_regexp___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_10_core_js_es6_regexp__);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_11_core_js_es6_map__ = __webpack_require__(461);\n/* harmony import
  */ var __WEBPACK_IMPORTED_MODULE_11_core_js_es6_map___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_11_core_js_es6_map__);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_12_core_js_es6_set__ = __webpack_require__(469);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_12_core_js_es6_set___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_12_core_js_es6_set__);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_13_core_js_es6_reflect__ = __webpack_require__(467);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_13_core_js_es6_reflect___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_13_core_js_es6_reflect__);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_14_core_js_es7_reflect__ = __webpack_require__(472);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_14_core_js_es7_reflect___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_14_core_js_es7_reflect__);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_15_zone_js_dist_zone
 __ = __webpack_require__(628);\n/* harmony import */ var __WEBPACK_IMPORTED_MODULE_15_zone_js_dist_zone___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_15_zone_js_dist_zone__);\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n//# sourceMappingURL=/Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/polyfills.js.map\n\n/***/ },\n\n/***/ 629:\n/***/ function(module, exports, __webpack_require__) {\n\nmodule.exports = __webpack_require__(345);\n\n\n/***/ }\n\n},[629]);\n\n\n// WEBPACK FOOTER //\n// main.806d67070af66e18c2fc.bundle.js","/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements.  See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License.  You may obtain a copy of the License at\n *\n *     http://www.apache.or
 g/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { Injectable }    from '@angular/core';\nimport { Headers, Http } from '@angular/http';\n\nimport 'rxjs/add/operator/toPromise';\nimport { Subject }    from 'rxjs/Subject';\nimport { Observable }    from 'rxjs/Observable';\n\nimport { Topology } from './topology';\n\n@Injectable()\nexport class TopologyService {\n\n    apiUrl = '/gateway/manager/api/v1/';\n    topologiesUrl = this.apiUrl + 'topologies';\n    selectedTopologySource = new Subject<Topology>();\n    selectedTopology$ = this.selectedTopologySource.asObservable();\n    changedTopologySource = new Subject<string>();\n    changedTopology$ = this.changedTopologySo
 urce.asObservable();\n\n    constructor(private http: Http) { }\n\n    getTopologies(): Promise<Topology[]> {\n        let headers = new Headers();\n        this.addJsonHeaders(headers);\n        return this.http.get(this.topologiesUrl, {\n            headers: headers\n        } )\n            .toPromise()\n            .then(response => response.json().topologies.topology as Topology[])\n            .catch(this.handleError);\n    }\n\n    getTopology(href : string): Promise<string> {\n        let headers = new Headers();\n        this.addXmlHeaders(headers);\n        return this.http.get(href, {\n            headers: headers\n        } )\n            .toPromise()\n            .then(response => response.text())\n            .catch(this.handleError);\n\n    }\n\n    saveTopology(url: string, xml : string): Promise<string> {\n        let xheaders = new Headers();\n        this.addXmlHeaders(xheaders);\n        this.addCsrfHeaders(xheaders);\n        return this.http\n            .put(u
 rl, xml, {headers: xheaders})\n            .toPromise()\n            .then(() => xml)\n            .catch(this.handleError);\n\n    }\n\n    createTopology(name: string, xml : string): Promise<string> {\n        let xheaders = new Headers();\n        this.addXmlHeaders(xheaders);\n        this.addCsrfHeaders(xheaders);\n        let url = this.topologiesUrl + \"/\" + name;\n        return this.http\n            .put(url, xml, {headers: xheaders})\n            .toPromise()\n            .then(() => xml)\n            .catch(this.handleError);\n    }\n\n    deleteTopology(href: string): Promise<string> {\n        let headers = new Headers();\n        this.addJsonHeaders(headers);\n        this.addCsrfHeaders(headers);\n        return this.http.delete(href, {\n            headers: headers\n        } )\n            .toPromise()\n            .then(response => response.text())\n            .catch(this.handleError);\n    }\n\n    addJsonHeaders(headers: Headers) {\n        headers.append('Acc
 ept', 'application/json');\n        headers.append('Content-Type', 'application/json');\n    }\n\n    addXmlHeaders(headers: Headers) {\n        headers.append('Accept', 'application/xml');\n        headers.append('Content-Type', 'application/xml');\n    }\n\n    addCsrfHeaders(headers: Headers) {\n        headers.append('X-XSRF-Header', 'admin-ui');\n    }\n\n    selectedTopology(value: Topology) {\n        this.selectedTopologySource.next(value);\n    }\n\n    changedTopology(value: string) {\n        this.changedTopologySource.next(value);\n    }\n\n\n    private handleError(error: any): Promise<any> {\n        console.error('An error occurred', error); // for demo purposes only\n        return Promise.reject(error.message || error);\n    }\n}\n\n\n// WEBPACK FOOTER //\n// /Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/app/topology.service.ts","/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements.  See the NOTICE file
  distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License.  You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { Component } from '@angular/core';\nimport {TopologyService} from \"./topology.service\";\n\n@Component({\n    selector: 'topology-management',\n    template: `\n      <div class=\"container\">\n        <div class=\"row\">\n          <div class=\"col-md-5\">\n            <topology></topology>\n  
        </div>\n          <div class=\"col-md-7\">\n            <topology-detail></topology-detail>\n          </div>\n        </div>\n      </div>\n  `,\n    providers: [TopologyService]\n})\n\nexport class AppComponent {\n    constructor(private topologyService : TopologyService) {\n    }\n}\n\n\n// WEBPACK FOOTER //\n// /Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/app/app.component.ts","/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements.  See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License.  You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is
  distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { Injectable }    from '@angular/core';\nimport { Headers, Http } from '@angular/http';\n\nimport 'rxjs/add/operator/toPromise';\n\nimport { GatewayVersion } from './gateway-version';\n\n@Injectable()\nexport class GatewayVersionService {\n\n    private apiUrl = '/gateway/manager/api/v1/version';\n\n    constructor(private http: Http) { }\n\n    getVersion(): Promise<GatewayVersion> {\n        let headers = new Headers();\n        this.addHeaders(headers);\n        return this.http.get(this.apiUrl, {\n            headers: headers\n        } )\n            .toPromise()\n            .then(response => response.json().ServerVersion as GatewayVersion)\n            .catch(this.handleError);\n    }\n\n    addHeaders(headers: Headers) {\n        headers.append('
 Accept', 'application/json');\n        headers.append('Content-Type', 'application/json');\n    }\n\n    private handleError(error: any): Promise<any> {\n        console.error('An error occurred', error); // for demo purposes only\n        return Promise.reject(error.message || error);\n    }\n}\n\n\n// WEBPACK FOOTER //\n// /Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/app/gateway-version.service.ts","/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements.  See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License.  You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under t
 he License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { Component, Input } from '@angular/core';\n\n@Component({\n  selector: 'tab',\n  styles: [`\n    .pane{\n      padding: 1em;\n    }\n  `],\n  template: `\n    <div [hidden]=\"!active\" class=\"pane\">\n    </div>\n  `\n})\nexport class TabComponent {\n  @Input('tabTitle') title: string;\n  @Input() active = false;\n}\n\n\n// WEBPACK FOOTER //\n// /Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/app/utils/tab.component.ts","function webpackEmptyContext(req) {\n\tthrow new Error(\"Cannot find module '\" + req + \"'.\");\n}\nwebpackEmptyContext.keys = function() { return []; };\nwebpackEmptyContext.resolve = webpackEmptyContext;\nmodule.exports = webpackEmptyContext;\nwebpackEmptyContext.id = 344;\n\n\n\n//////////////////\n// WEBPACK FOO
 TER\n// /Users/sumit.gupta/Projects/knox/gateway-admin-ui/src async\n// module id = 344\n// module chunks = 0","/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements.  See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License.  You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport './polyfills.ts';\n\nimport { platformBrowserDynamic } f
 rom '@angular/platform-browser-dynamic';\nimport { enableProdMode } from '@angular/core';\nimport { environment } from './environments/environment';\nimport { AppModule } from './app/';\n\nif (environment.production) {\n  enableProdMode();\n}\n\nplatformBrowserDynamic().bootstrapModule(AppModule);\n\n\n\n// WEBPACK FOOTER //\n// /Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/main.ts","/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements.  See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License.  You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distr
 ibuted on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { NgModule }      from '@angular/core';\nimport { BrowserModule } from '@angular/platform-browser';\nimport { HttpModule }    from '@angular/http';\nimport { FormsModule } from '@angular/forms';\n\nimport { AppComponent }  from './app.component';\nimport {TopologyService} from \"./topology.service\";\nimport {GatewayVersionService} from \"./gateway-version.service\";\nimport {GatewayVersionComponent} from \"./gateway-version.component\";\nimport {TopologyComponent} from \"./topology.component\";\nimport {TopologyDetailComponent} from \"./topology-detail.component\";\nimport {XmlPipe} from \"./utils/xml.pipe\";\nimport {JsonPrettyPipe} from \"./utils/json-pretty.pipe\";\nimport { TabComponent } from './utils/tab.component';\nimport { TabsComponent } from './uti
 ls/tabs.component';\n\nimport { AceEditorDirective } from 'ng2-ace-editor'; \nimport { Ng2Bs3ModalModule } from 'ng2-bs3-modal/ng2-bs3-modal'\n\n@NgModule({\n  imports: [ BrowserModule,\n    HttpModule,\n    FormsModule,\n    Ng2Bs3ModalModule\n    ],\n  declarations: [ AppComponent,\n    TopologyComponent,\n      TopologyDetailComponent,\n    GatewayVersionComponent,\n    AceEditorDirective,\n    XmlPipe,\n    JsonPrettyPipe,\n    TabsComponent,\n    TabComponent ],\n  providers: [ TopologyService,\n    GatewayVersionService ],\n  bootstrap: [ AppComponent,\n    GatewayVersionComponent]\n})\nexport class AppModule { }\n\n\n\n// WEBPACK FOOTER //\n// /Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/app/app.module.ts","/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements.  See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under 
 the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License.  You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport {Component, OnInit} from '@angular/core';\nimport {GatewayVersion} from './gateway-version';\nimport {GatewayVersionService} from \"./gateway-version.service\";\n\n\n@Component({\n    selector: 'gateway-version',\n    template: `\n        <div *ngIf=\"gatewayVersion\">\n            <span class=\"small\"><cite>Knox Gateway Version</cite> {{this.gatewayVersion.version}}</span>\n            <span class=\"small\"><cite>Hash</cite> {
 {this.gatewayVersion.hash}}</span>\n</div>`,\n    providers: [GatewayVersionService]\n})\n\nexport class GatewayVersionComponent implements OnInit {\n\n    gatewayVersion : GatewayVersion;\n\n    constructor(private gatewayVersionService : GatewayVersionService) {\n    }\n\n    getVersion(): void {\n        this.gatewayVersionService.getVersion().then( gatewayVersion => this.gatewayVersion = gatewayVersion);\n    }\n\n    ngOnInit(): void {\n        this.getVersion();\n    }\n\n\n}\n\n\n\n\n// WEBPACK FOOTER //\n// /Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/app/gateway-version.component.ts","/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements.  See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License.
   You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport * from './app.component';\nexport * from './app.module';\n\n\n\n// WEBPACK FOOTER //\n// /Users/sumit.gupta/Projects/knox/gateway-admin-ui/src/app/index.ts","/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements.  See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License.  You may 
 obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { Component, OnInit, ViewChild, ViewEncapsulation} from '@angular/core';\nimport { Topology } from './topology';\nimport {TopologyService} from \"./topology.service\";\nimport { ModalComponent } from 'ng2-bs3-modal/ng2-bs3-modal';\n\n@Component({\n    selector: 'topology-detail',\n    template: `\n     <div class=\"panel panel-default\">\n        <div class=\"panel-heading\">\n            <h4 class=\"panel-title\">{{title}} <span class=\"label label-default pull-right\">{{titleSuffix}}</span></h4>\n         </div>\n     <div *ngIf=\"topologyContent\" class=\"
 panel-body\">\n      <div ace-editor\n       [readOnly]=\"false\" [text]=\"topologyContent | xml\" [mode]=\"'xml'\" [options]=\"options\" \n        [theme]=\"'monokai'\"\n         style=\"min-height: 300px; width:100%; overflow: auto;\" (textChanged)=\"onChange($event)\">\n      </div>\n       <div class=\"panel-footer\">\n        <button (click)=\"duplicateModal.open('sm')\" class=\"btn btn-default btn-sm\" type=\"submit\">\n            <span class=\"glyphicon glyphicon-duplicate\" aria-hidden=\"true\"></span>\n        </button>\n        <button (click)=\"deleteConfirmModal.open('sm')\" class=\"btn btn-default btn-sm\" type=\"submit\">\n            <span class=\"glyphicon glyphicon-trash\" aria-hidden=\"true\"></span>\n        </button>\n       <button (click)=\"saveTopology()\" class=\"btn btn-default btn-sm pull-right\" [disabled]=\"!changedTopology\" type=\"submit\">\n            <span class=\"glyphicon glyphicon-floppy-disk\" aria-hidden=\"true\"></span>\n        </button>\n   
     </div>\n         \n    </div>\n    <modal (onClose)=\"createTopology()\" #duplicateModal>\n\n        <modal-head

<TRUNCATED>

[40/49] knox git commit: Merge branch 'master' into KNOX-998-Package_Restructuring

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-provider-security-pac4j/src/test/java/org/apache/knox/gateway/pac4j/Pac4jProviderTest.java
----------------------------------------------------------------------
diff --cc gateway-provider-security-pac4j/src/test/java/org/apache/knox/gateway/pac4j/Pac4jProviderTest.java
index e4e0462,0000000..e69c599
mode 100644,000000..100644
--- a/gateway-provider-security-pac4j/src/test/java/org/apache/knox/gateway/pac4j/Pac4jProviderTest.java
+++ b/gateway-provider-security-pac4j/src/test/java/org/apache/knox/gateway/pac4j/Pac4jProviderTest.java
@@@ -1,150 -1,0 +1,335 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.pac4j;
 +
 +import org.apache.knox.gateway.audit.api.AuditContext;
 +import org.apache.knox.gateway.audit.api.AuditService;
 +import org.apache.knox.gateway.audit.api.Auditor;
 +import org.apache.knox.gateway.pac4j.filter.Pac4jDispatcherFilter;
 +import org.apache.knox.gateway.pac4j.filter.Pac4jIdentityAdapter;
 +import org.apache.knox.gateway.pac4j.session.KnoxSessionStore;
 +import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.security.AliasService;
 +import org.apache.knox.gateway.services.security.impl.DefaultCryptoService;
 +import org.junit.Test;
 +import org.pac4j.core.client.Clients;
 +import org.pac4j.core.context.Pac4jConstants;
 +import org.pac4j.http.client.indirect.IndirectBasicAuthClient;
 +
 +import javax.servlet.*;
 +import javax.servlet.http.*;
 +
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
- 
 +import static org.mockito.Mockito.*;
 +import static org.junit.Assert.*;
 +
 +/**
 + * This class simulates a full authentication process using pac4j.
 + */
 +public class Pac4jProviderTest {
 +
 +    private static final String LOCALHOST = "127.0.0.1";
 +    private static final String HADOOP_SERVICE_URL = "https://" + LOCALHOST + ":8443/gateway/sandox/webhdfs/v1/tmp?op=LISTSTATUS";
 +    private static final String KNOXSSO_SERVICE_URL = "https://" + LOCALHOST + ":8443/gateway/idp/api/v1/websso";
 +    private static final String PAC4J_CALLBACK_URL = KNOXSSO_SERVICE_URL;
 +    private static final String ORIGINAL_URL = "originalUrl";
 +    private static final String CLUSTER_NAME = "knox";
 +    private static final String PAC4J_PASSWORD = "pwdfortest";
 +    private static final String CLIENT_CLASS = IndirectBasicAuthClient.class.getSimpleName();
 +    private static final String USERNAME = "jleleu";
 +
 +    @Test
 +    public void test() throws Exception {
 +        final AliasService aliasService = mock(AliasService.class);
 +        when(aliasService.getPasswordFromAliasForCluster(CLUSTER_NAME, KnoxSessionStore.PAC4J_PASSWORD, true)).thenReturn(PAC4J_PASSWORD.toCharArray());
 +        when(aliasService.getPasswordFromAliasForCluster(CLUSTER_NAME, KnoxSessionStore.PAC4J_PASSWORD)).thenReturn(PAC4J_PASSWORD.toCharArray());
 +
 +        final DefaultCryptoService cryptoService = new DefaultCryptoService();
 +        cryptoService.setAliasService(aliasService);
 +
 +        final GatewayServices services = mock(GatewayServices.class);
 +        when(services.getService(GatewayServices.CRYPTO_SERVICE)).thenReturn(cryptoService);
 +        when(services.getService(GatewayServices.ALIAS_SERVICE)).thenReturn(aliasService);
 +
 +        final ServletContext context = mock(ServletContext.class);
 +        when(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).thenReturn(services);
 +        when(context.getAttribute(GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE)).thenReturn(CLUSTER_NAME);
 +
 +        final FilterConfig config = mock(FilterConfig.class);
 +        when(config.getServletContext()).thenReturn(context);
 +        when(config.getInitParameter(Pac4jDispatcherFilter.PAC4J_CALLBACK_URL)).thenReturn(PAC4J_CALLBACK_URL);
 +        when(config.getInitParameter("clientName")).thenReturn(Pac4jDispatcherFilter.TEST_BASIC_AUTH);
 +
 +        final Pac4jDispatcherFilter dispatcher = new Pac4jDispatcherFilter();
 +        dispatcher.init(config);
 +        final Pac4jIdentityAdapter adapter = new Pac4jIdentityAdapter();
 +        adapter.init(config);
 +        Pac4jIdentityAdapter.setAuditor(mock(Auditor.class));
 +        final AuditService auditService = mock(AuditService.class);
 +        when(auditService.getContext()).thenReturn(mock(AuditContext.class));
 +        Pac4jIdentityAdapter.setAuditService(auditService);
 +
 +        // step 1: call the KnoxSSO service with an original url pointing to an Hadoop service (redirected by the SSOCookieProvider)
 +        MockHttpServletRequest request = new MockHttpServletRequest();
 +        request.setRequestURL(KNOXSSO_SERVICE_URL + "?" + ORIGINAL_URL + "=" + HADOOP_SERVICE_URL);
 +        request.setCookies(new Cookie[0]);
 +        request.setServerName(LOCALHOST);
 +        MockHttpServletResponse response = new MockHttpServletResponse();
 +        FilterChain filterChain = mock(FilterChain.class);
 +        dispatcher.doFilter(request, response, filterChain);
 +        // it should be a redirection to the idp topology
 +        assertEquals(302, response.getStatus());
 +        assertEquals(PAC4J_CALLBACK_URL + "?" + Pac4jDispatcherFilter.PAC4J_CALLBACK_PARAMETER + "=true&" + Clients.DEFAULT_CLIENT_NAME_PARAMETER + "=" + CLIENT_CLASS, response.getHeaders().get("Location"));
 +        // we should have one cookie for the saved requested url
 +        List<Cookie> cookies = response.getCookies();
 +        assertEquals(1, cookies.size());
 +        final Cookie requestedUrlCookie = cookies.get(0);
 +        assertEquals(KnoxSessionStore.PAC4J_SESSION_PREFIX + Pac4jConstants.REQUESTED_URL, requestedUrlCookie.getName());
 +
 +        // step 2: send credentials to the callback url (callback from the identity provider)
 +        request = new MockHttpServletRequest();
 +        request.setCookies(new Cookie[]{requestedUrlCookie});
 +        request.setRequestURL(PAC4J_CALLBACK_URL + "?" + Pac4jDispatcherFilter.PAC4J_CALLBACK_PARAMETER + "=true&" + Clients.DEFAULT_CLIENT_NAME_PARAMETER + "=" + Clients.DEFAULT_CLIENT_NAME_PARAMETER + "=" + CLIENT_CLASS);
 +        request.addParameter(Pac4jDispatcherFilter.PAC4J_CALLBACK_PARAMETER, "true");
 +        request.addParameter(Clients.DEFAULT_CLIENT_NAME_PARAMETER, CLIENT_CLASS);
 +        request.addHeader("Authorization", "Basic amxlbGV1OmpsZWxldQ==");
 +        request.setServerName(LOCALHOST);
 +        response = new MockHttpServletResponse();
 +        filterChain = mock(FilterChain.class);
 +        dispatcher.doFilter(request, response, filterChain);
 +        // it should be a redirection to the original url
 +        assertEquals(302, response.getStatus());
 +        assertEquals(KNOXSSO_SERVICE_URL + "?" + ORIGINAL_URL + "=" + HADOOP_SERVICE_URL, response.getHeaders().get("Location"));
 +        // we should have 3 cookies among with the user profile
 +        cookies = response.getCookies();
 +        Map<String, String> mapCookies = new HashMap<>();
 +        assertEquals(3, cookies.size());
 +        for (final Cookie cookie : cookies) {
 +            mapCookies.put(cookie.getName(), cookie.getValue());
 +        }
 +        assertNull(mapCookies.get(KnoxSessionStore.PAC4J_SESSION_PREFIX + CLIENT_CLASS + "$attemptedAuthentication"));
 +        assertNotNull(mapCookies.get(KnoxSessionStore.PAC4J_SESSION_PREFIX + Pac4jConstants.USER_PROFILES));
 +        assertNull(mapCookies.get(KnoxSessionStore.PAC4J_SESSION_PREFIX + Pac4jConstants.REQUESTED_URL));
 +
 +        // step 3: turn pac4j identity into KnoxSSO identity
 +        request = new MockHttpServletRequest();
 +        request.setCookies(cookies.toArray(new Cookie[cookies.size()]));
 +        request.setRequestURL(KNOXSSO_SERVICE_URL + "?" + ORIGINAL_URL + "=" + HADOOP_SERVICE_URL);
 +        request.setServerName(LOCALHOST);
 +        response = new MockHttpServletResponse();
 +        filterChain = mock(FilterChain.class);
 +        dispatcher.doFilter(request, response, filterChain);
 +        assertEquals(0, response.getStatus());
 +        adapter.doFilter(request, response, filterChain);
 +        cookies = response.getCookies();
 +        assertEquals(1, cookies.size());
 +        final Cookie userProfileCookie = cookies.get(0);
 +        // the user profile has been cleaned
 +        assertEquals(KnoxSessionStore.PAC4J_SESSION_PREFIX + Pac4jConstants.USER_PROFILES, userProfileCookie.getName());
 +        assertNull(userProfileCookie.getValue());
 +        assertEquals(USERNAME, adapter.getTestIdentifier());
 +    }
++
++    @Test
++    public void testValidIdAttribute() throws Exception {
++        final AliasService aliasService = mock(AliasService.class);
++        when(aliasService.getPasswordFromAliasForCluster(CLUSTER_NAME, KnoxSessionStore.PAC4J_PASSWORD, true)).thenReturn(PAC4J_PASSWORD.toCharArray());
++        when(aliasService.getPasswordFromAliasForCluster(CLUSTER_NAME, KnoxSessionStore.PAC4J_PASSWORD)).thenReturn(PAC4J_PASSWORD.toCharArray());
++
++        final DefaultCryptoService cryptoService = new DefaultCryptoService();
++        cryptoService.setAliasService(aliasService);
++
++        final GatewayServices services = mock(GatewayServices.class);
++        when(services.getService(GatewayServices.CRYPTO_SERVICE)).thenReturn(cryptoService);
++        when(services.getService(GatewayServices.ALIAS_SERVICE)).thenReturn(aliasService);
++
++        final ServletContext context = mock(ServletContext.class);
++        when(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).thenReturn(services);
++        when(context.getAttribute(GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE)).thenReturn(CLUSTER_NAME);
++
++        final FilterConfig config = mock(FilterConfig.class);
++        when(config.getServletContext()).thenReturn(context);
++        when(config.getInitParameter(Pac4jDispatcherFilter.PAC4J_CALLBACK_URL)).thenReturn(PAC4J_CALLBACK_URL);
++        when(config.getInitParameter("clientName")).thenReturn(Pac4jDispatcherFilter.TEST_BASIC_AUTH);
++        when(config.getInitParameter(Pac4jIdentityAdapter.PAC4J_ID_ATTRIBUTE)).thenReturn("username");
++
++        final Pac4jDispatcherFilter dispatcher = new Pac4jDispatcherFilter();
++        dispatcher.init(config);
++        final Pac4jIdentityAdapter adapter = new Pac4jIdentityAdapter();
++        adapter.init(config);
++        Pac4jIdentityAdapter.setAuditor(mock(Auditor.class));
++        final AuditService auditService = mock(AuditService.class);
++        when(auditService.getContext()).thenReturn(mock(AuditContext.class));
++        Pac4jIdentityAdapter.setAuditService(auditService);
++
++        // step 1: call the KnoxSSO service with an original url pointing to an Hadoop service (redirected by the SSOCookieProvider)
++        MockHttpServletRequest request = new MockHttpServletRequest();
++        request.setRequestURL(KNOXSSO_SERVICE_URL + "?" + ORIGINAL_URL + "=" + HADOOP_SERVICE_URL);
++        request.setCookies(new Cookie[0]);
++        request.setServerName(LOCALHOST);
++        MockHttpServletResponse response = new MockHttpServletResponse();
++        FilterChain filterChain = mock(FilterChain.class);
++        dispatcher.doFilter(request, response, filterChain);
++        // it should be a redirection to the idp topology
++        assertEquals(302, response.getStatus());
++        assertEquals(PAC4J_CALLBACK_URL + "?" + Pac4jDispatcherFilter.PAC4J_CALLBACK_PARAMETER + "=true&" + Clients.DEFAULT_CLIENT_NAME_PARAMETER + "=" + CLIENT_CLASS, response.getHeaders().get("Location"));
++        // we should have one cookie for the saved requested url
++        List<Cookie> cookies = response.getCookies();
++        assertEquals(1, cookies.size());
++        final Cookie requestedUrlCookie = cookies.get(0);
++        assertEquals(KnoxSessionStore.PAC4J_SESSION_PREFIX + Pac4jConstants.REQUESTED_URL, requestedUrlCookie.getName());
++
++        // step 2: send credentials to the callback url (callback from the identity provider)
++        request = new MockHttpServletRequest();
++        request.setCookies(new Cookie[]{requestedUrlCookie});
++        request.setRequestURL(PAC4J_CALLBACK_URL + "?" + Pac4jDispatcherFilter.PAC4J_CALLBACK_PARAMETER + "=true&" + Clients.DEFAULT_CLIENT_NAME_PARAMETER + "=" + Clients.DEFAULT_CLIENT_NAME_PARAMETER + "=" + CLIENT_CLASS);
++        request.addParameter(Pac4jDispatcherFilter.PAC4J_CALLBACK_PARAMETER, "true");
++        request.addParameter(Clients.DEFAULT_CLIENT_NAME_PARAMETER, CLIENT_CLASS);
++        request.addHeader("Authorization", "Basic amxlbGV1OmpsZWxldQ==");
++        request.setServerName(LOCALHOST);
++        response = new MockHttpServletResponse();
++        filterChain = mock(FilterChain.class);
++        dispatcher.doFilter(request, response, filterChain);
++        // it should be a redirection to the original url
++        assertEquals(302, response.getStatus());
++        assertEquals(KNOXSSO_SERVICE_URL + "?" + ORIGINAL_URL + "=" + HADOOP_SERVICE_URL, response.getHeaders().get("Location"));
++        // we should have 3 cookies among with the user profile
++        cookies = response.getCookies();
++        Map<String, String> mapCookies = new HashMap<>();
++        assertEquals(3, cookies.size());
++        for (final Cookie cookie : cookies) {
++            mapCookies.put(cookie.getName(), cookie.getValue());
++        }
++        assertNull(mapCookies.get(KnoxSessionStore.PAC4J_SESSION_PREFIX + CLIENT_CLASS + "$attemptedAuthentication"));
++        assertNotNull(mapCookies.get(KnoxSessionStore.PAC4J_SESSION_PREFIX + Pac4jConstants.USER_PROFILES));
++        assertNull(mapCookies.get(KnoxSessionStore.PAC4J_SESSION_PREFIX + Pac4jConstants.REQUESTED_URL));
++
++        // step 3: turn pac4j identity into KnoxSSO identity
++        request = new MockHttpServletRequest();
++        request.setCookies(cookies.toArray(new Cookie[cookies.size()]));
++        request.setRequestURL(KNOXSSO_SERVICE_URL + "?" + ORIGINAL_URL + "=" + HADOOP_SERVICE_URL);
++        request.setServerName(LOCALHOST);
++        response = new MockHttpServletResponse();
++        filterChain = mock(FilterChain.class);
++        dispatcher.doFilter(request, response, filterChain);
++        assertEquals(0, response.getStatus());
++        adapter.doFilter(request, response, filterChain);
++        cookies = response.getCookies();
++        assertEquals(1, cookies.size());
++        final Cookie userProfileCookie = cookies.get(0);
++        // the user profile has been cleaned
++        assertEquals(KnoxSessionStore.PAC4J_SESSION_PREFIX + Pac4jConstants.USER_PROFILES, userProfileCookie.getName());
++        assertNull(userProfileCookie.getValue());
++        assertEquals(USERNAME, adapter.getTestIdentifier());
++    }
++    @Test
++    public void testInvalidIdAttribute() throws Exception {
++        final AliasService aliasService = mock(AliasService.class);
++        when(aliasService.getPasswordFromAliasForCluster(CLUSTER_NAME, KnoxSessionStore.PAC4J_PASSWORD, true)).thenReturn(PAC4J_PASSWORD.toCharArray());
++        when(aliasService.getPasswordFromAliasForCluster(CLUSTER_NAME, KnoxSessionStore.PAC4J_PASSWORD)).thenReturn(PAC4J_PASSWORD.toCharArray());
++
++        final DefaultCryptoService cryptoService = new DefaultCryptoService();
++        cryptoService.setAliasService(aliasService);
++
++        final GatewayServices services = mock(GatewayServices.class);
++        when(services.getService(GatewayServices.CRYPTO_SERVICE)).thenReturn(cryptoService);
++        when(services.getService(GatewayServices.ALIAS_SERVICE)).thenReturn(aliasService);
++
++        final ServletContext context = mock(ServletContext.class);
++        when(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).thenReturn(services);
++        when(context.getAttribute(GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE)).thenReturn(CLUSTER_NAME);
++
++        final FilterConfig config = mock(FilterConfig.class);
++        when(config.getServletContext()).thenReturn(context);
++        when(config.getInitParameter(Pac4jDispatcherFilter.PAC4J_CALLBACK_URL)).thenReturn(PAC4J_CALLBACK_URL);
++        when(config.getInitParameter("clientName")).thenReturn(Pac4jDispatcherFilter.TEST_BASIC_AUTH);
++        when(config.getInitParameter(Pac4jIdentityAdapter.PAC4J_ID_ATTRIBUTE)).thenReturn("larry");
++
++        final Pac4jDispatcherFilter dispatcher = new Pac4jDispatcherFilter();
++        dispatcher.init(config);
++        final Pac4jIdentityAdapter adapter = new Pac4jIdentityAdapter();
++        adapter.init(config);
++        Pac4jIdentityAdapter.setAuditor(mock(Auditor.class));
++        final AuditService auditService = mock(AuditService.class);
++        when(auditService.getContext()).thenReturn(mock(AuditContext.class));
++        Pac4jIdentityAdapter.setAuditService(auditService);
++
++        // step 1: call the KnoxSSO service with an original url pointing to an Hadoop service (redirected by the SSOCookieProvider)
++        MockHttpServletRequest request = new MockHttpServletRequest();
++        request.setRequestURL(KNOXSSO_SERVICE_URL + "?" + ORIGINAL_URL + "=" + HADOOP_SERVICE_URL);
++        request.setCookies(new Cookie[0]);
++        request.setServerName(LOCALHOST);
++        MockHttpServletResponse response = new MockHttpServletResponse();
++        FilterChain filterChain = mock(FilterChain.class);
++        dispatcher.doFilter(request, response, filterChain);
++        // it should be a redirection to the idp topology
++        assertEquals(302, response.getStatus());
++        assertEquals(PAC4J_CALLBACK_URL + "?" + Pac4jDispatcherFilter.PAC4J_CALLBACK_PARAMETER + "=true&" + Clients.DEFAULT_CLIENT_NAME_PARAMETER + "=" + CLIENT_CLASS, response.getHeaders().get("Location"));
++        // we should have one cookie for the saved requested url
++        List<Cookie> cookies = response.getCookies();
++        assertEquals(1, cookies.size());
++        final Cookie requestedUrlCookie = cookies.get(0);
++        assertEquals(KnoxSessionStore.PAC4J_SESSION_PREFIX + Pac4jConstants.REQUESTED_URL, requestedUrlCookie.getName());
++
++        // step 2: send credentials to the callback url (callback from the identity provider)
++        request = new MockHttpServletRequest();
++        request.setCookies(new Cookie[]{requestedUrlCookie});
++        request.setRequestURL(PAC4J_CALLBACK_URL + "?" + Pac4jDispatcherFilter.PAC4J_CALLBACK_PARAMETER + "=true&" + Clients.DEFAULT_CLIENT_NAME_PARAMETER + "=" + Clients.DEFAULT_CLIENT_NAME_PARAMETER + "=" + CLIENT_CLASS);
++        request.addParameter(Pac4jDispatcherFilter.PAC4J_CALLBACK_PARAMETER, "true");
++        request.addParameter(Clients.DEFAULT_CLIENT_NAME_PARAMETER, CLIENT_CLASS);
++        request.addHeader("Authorization", "Basic amxlbGV1OmpsZWxldQ==");
++        request.setServerName(LOCALHOST);
++        response = new MockHttpServletResponse();
++        filterChain = mock(FilterChain.class);
++        dispatcher.doFilter(request, response, filterChain);
++        // it should be a redirection to the original url
++        assertEquals(302, response.getStatus());
++        assertEquals(KNOXSSO_SERVICE_URL + "?" + ORIGINAL_URL + "=" + HADOOP_SERVICE_URL, response.getHeaders().get("Location"));
++        // we should have 3 cookies among with the user profile
++        cookies = response.getCookies();
++        Map<String, String> mapCookies = new HashMap<>();
++        assertEquals(3, cookies.size());
++        for (final Cookie cookie : cookies) {
++            mapCookies.put(cookie.getName(), cookie.getValue());
++        }
++        assertNull(mapCookies.get(KnoxSessionStore.PAC4J_SESSION_PREFIX + CLIENT_CLASS + "$attemptedAuthentication"));
++        assertNotNull(mapCookies.get(KnoxSessionStore.PAC4J_SESSION_PREFIX + Pac4jConstants.USER_PROFILES));
++        assertNull(mapCookies.get(KnoxSessionStore.PAC4J_SESSION_PREFIX + Pac4jConstants.REQUESTED_URL));
++
++        // step 3: turn pac4j identity into KnoxSSO identity
++        request = new MockHttpServletRequest();
++        request.setCookies(cookies.toArray(new Cookie[cookies.size()]));
++        request.setRequestURL(KNOXSSO_SERVICE_URL + "?" + ORIGINAL_URL + "=" + HADOOP_SERVICE_URL);
++        request.setServerName(LOCALHOST);
++        response = new MockHttpServletResponse();
++        filterChain = mock(FilterChain.class);
++        dispatcher.doFilter(request, response, filterChain);
++        assertEquals(0, response.getStatus());
++        adapter.doFilter(request, response, filterChain);
++        cookies = response.getCookies();
++        assertEquals(1, cookies.size());
++        final Cookie userProfileCookie = cookies.get(0);
++        // the user profile has been cleaned
++        assertEquals(KnoxSessionStore.PAC4J_SESSION_PREFIX + Pac4jConstants.USER_PROFILES, userProfileCookie.getName());
++        assertNull(userProfileCookie.getValue());
++        assertEquals(USERNAME, adapter.getTestIdentifier());
++    }
++
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-release/home/conf/topologies/admin.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-release/home/conf/topologies/knoxsso.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-release/home/conf/topologies/manager.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-release/home/conf/topologies/sandbox.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-server/src/main/java/org/apache/knox/gateway/GatewayMessages.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/GatewayMessages.java
index 61c5303,0000000..f10f97b
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/GatewayMessages.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/GatewayMessages.java
@@@ -1,553 -1,0 +1,615 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
 +import org.apache.commons.cli.ParseException;
 +import org.apache.knox.gateway.i18n.messages.Message;
 +import org.apache.knox.gateway.i18n.messages.MessageLevel;
 +import org.apache.knox.gateway.i18n.messages.Messages;
 +import org.apache.knox.gateway.i18n.messages.StackTrace;
 +import org.apache.knox.gateway.services.security.KeystoreServiceException;
 +
 +import java.io.File;
 +import java.net.URI;
 +import java.util.Date;
 +import java.util.Map;
 +import java.util.Set;
 +
 +/**
 + *
 + */
 +@Messages(logger="org.apache.knox.gateway")
 +public interface GatewayMessages {
 +
 +  @Message( level = MessageLevel.FATAL, text = "Failed to parse command line: {0}" )
 +  void failedToParseCommandLine( @StackTrace( level = MessageLevel.DEBUG ) ParseException e );
 +
 +  @Message( level = MessageLevel.INFO, text = "Starting gateway..." )
 +  void startingGateway();
 +
 +  @Message( level = MessageLevel.FATAL, text = "Failed to start gateway: {0}" )
 +  void failedToStartGateway( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.INFO, text = "Started gateway on port {0}." )
 +  void startedGateway( int port );
 +
 +  @Message( level = MessageLevel.INFO, text = "Stopping gateway..." )
 +  void stoppingGateway();
 +
 +  @Message( level = MessageLevel.INFO, text = "Stopped gateway." )
 +  void stoppedGateway();
 +
 +  @Message( level = MessageLevel.INFO, text = "Loading configuration resource {0}" )
 +  void loadingConfigurationResource( String res );
 +
 +  @Message( level = MessageLevel.INFO, text = "Loading configuration file {0}" )
 +  void loadingConfigurationFile( String file );
 +
 +  @Message( level = MessageLevel.WARN, text = "Failed to load configuration file {0}: {1}" )
 +  void failedToLoadConfig( String path, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.INFO, text = "Using {1} as GATEWAY_HOME via {0}." )
 +  void settingGatewayHomeDir( String location, String home );
 +
 +  @Message( level = MessageLevel.INFO, text = "Loading topologies from directory: {0}" )
 +  void loadingTopologiesFromDirectory( String topologiesDir );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Loading topology file: {0}" )
 +  void loadingTopologyFile( String fileName );
 +
 +  @Message( level = MessageLevel.INFO, text = "Monitoring topologies in directory: {0}" )
 +  void monitoringTopologyChangesInDirectory( String topologiesDir );
 +
 +  @Message( level = MessageLevel.INFO, text = "Deploying topology {0} to {1}" )
 +  void deployingTopology( String clusterName, String warDirName );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Deployed topology {0}." )
 +  void deployedTopology( String clusterName );
 +
 +  @Message( level = MessageLevel.INFO, text = "Loading topology {0} from {1}" )
 +  void redeployingTopology( String clusterName, String warDirName );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Redeployed topology {0}." )
 +  void redeployedTopology( String clusterName );
 +
 +  @Message( level = MessageLevel.INFO, text = "Activating topology {0}" )
 +  void activatingTopology( String name );
 +
 +  @Message( level = MessageLevel.INFO, text = "Activating topology {0} archive {1}" )
 +  void activatingTopologyArchive( String topology, String archive );
 +
 +  @Message( level = MessageLevel.INFO, text = "Deactivating topology {0}" )
 +  void deactivatingTopology( String name );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to deploy topology {0}: {1}" )
 +  void failedToDeployTopology( String name, @StackTrace(level=MessageLevel.DEBUG) Throwable e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to redeploy topology {0}" )
 +  void failedToRedeployTopology( String name );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to redeploy topology {0}: {1}" )
 +  void failedToRedeployTopology( String name, @StackTrace(level=MessageLevel.DEBUG) Throwable e );
 +
 +  @Message(level = MessageLevel.ERROR, text = "Failed to load topology {0}: Topology configuration is invalid!")
 +  void failedToLoadTopology(String fileName);
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to redeploy topologies: {0}" )
 +  void failedToRedeployTopologies( @StackTrace(level=MessageLevel.DEBUG) Throwable e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to undeploy topology {0}: {1}" )
 +  void failedToUndeployTopology( String name, @StackTrace(level=MessageLevel.DEBUG) Exception e );
 +
 +  @Message( level = MessageLevel.INFO, text = "Deleting topology {0}" )
 +  void deletingTopology( String topologyName );
 +
 +  @Message( level = MessageLevel.INFO, text = "Deleting deployed topology {0}" )
 +  void deletingDeployment( String warDirName );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Purge backups of deployed topology {0}" )
 +  void cleanupDeployments( String topologyName );
 +
 +  @Message( level = MessageLevel.INFO, text = "Deleting backup deployed topology {0}" )
 +  void cleanupDeployment( String absolutePath );
 +
 +  @Message( level = MessageLevel.INFO, text = "Creating gateway home directory: {0}" )
 +  void creatingGatewayHomeDir( File homeDir );
 +
 +  @Message( level = MessageLevel.INFO, text = "Creating gateway deployment directory: {0}" )
 +  void creatingGatewayDeploymentDir( File topologiesDir );
 +
 +  @Message( level = MessageLevel.INFO, text = "Creating default gateway configuration file: {0}" )
 +  void creatingDefaultConfigFile( File defaultConfigFile );
 +
 +  @Message( level = MessageLevel.INFO, text = "Creating sample topology file: {0}" )
 +  void creatingDefaultTopologyFile( File defaultConfigFile );
 +
 +  @Message( level = MessageLevel.WARN, text = "Ignoring service deployment contributor with invalid null name: {0}" )
 +  void ignoringServiceContributorWithMissingName( String className );
 +
 +  @Message( level = MessageLevel.WARN, text = "Ignoring service deployment contributor with invalid null role: {0}" )
 +  void ignoringServiceContributorWithMissingRole( String className );
 +
 +  @Message( level = MessageLevel.WARN, text = "Ignoring service deployment contributor with invalid null version: {0}" )
 +  void ignoringServiceContributorWithMissingVersion( String className );
 +
 +  @Message( level = MessageLevel.WARN, text = "Ignoring provider deployment contributor with invalid null name: {0}" )
 +  void ignoringProviderContributorWithMissingName( String className );
 +
 +  @Message( level = MessageLevel.WARN, text = "Ignoring provider deployment contributor with invalid null role: {0}" )
 +  void ignoringProviderContributorWithMissingRole( String className );
 +
 +  @Message( level = MessageLevel.INFO, text = "Loaded logging configuration: {0}" )
 +  void loadedLoggingConfig( String fileName );
 +
 +  @Message( level = MessageLevel.WARN, text = "Failed to load logging configuration: {0}" )
 +  void failedToLoadLoggingConfig( String fileName );
 +
 +  @Message( level = MessageLevel.INFO, text = "Creating credential store for the gateway instance." )
 +  void creatingCredentialStoreForGateway();
 +
 +  @Message( level = MessageLevel.INFO, text = "Credential store for the gateway instance found - no need to create one." )
 +  void credentialStoreForGatewayFoundNotCreating();
 +
 +  @Message( level = MessageLevel.INFO, text = "Creating keystore for the gateway instance." )
 +  void creatingKeyStoreForGateway();
 +
 +  @Message( level = MessageLevel.INFO, text = "Keystore for the gateway instance found - no need to create one." )
 +  void keyStoreForGatewayFoundNotCreating();
 +
 +  @Message( level = MessageLevel.INFO, text = "Creating credential store for the cluster: {0}" )
 +  void creatingCredentialStoreForCluster(String clusterName);
 +
 +  @Message( level = MessageLevel.INFO, text = "Credential store found for the cluster: {0} - no need to create one." )
 +  void credentialStoreForClusterFoundNotCreating(String clusterName);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Received request: {0} {1}" )
 +  void receivedRequest( String method, String uri );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Dispatch request: {0} {1}" )
 +  void dispatchRequest( String method, URI uri );
 +  
 +  @Message( level = MessageLevel.WARN, text = "Connection exception dispatching request: {0} {1}" )
 +  void dispatchServiceConnectionException( URI uri, @StackTrace(level=MessageLevel.WARN) Exception e );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Signature verified: {0}" )
 +  void signatureVerified( boolean verified );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Apache Knox Gateway {0} ({1})" )
 +  void gatewayVersionMessage( String version, String hash );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to inject service {0}: {1}" )
 +  void failedToInjectService( String serviceName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to finalize contribution: {0}" )
 +  void failedToFinalizeContribution( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to contribute service [role={1}, name={0}]: {2}" )
 +  void failedToContributeService( String name, String role, @StackTrace( level = MessageLevel.ERROR ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to contribute provider [role={1}, name={0}]: {2}" )
 +  void failedToContributeProvider( String name, String role, @StackTrace( level = MessageLevel.ERROR ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to initialize contribution: {0}" )
 +  void failedToInitializeContribution( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to initialize servlet instance: {0}" )
 +  void failedToInitializeServletInstace( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Gateway processing failed: {0}" )
 +  void failedToExecuteFilter( @StackTrace( level = MessageLevel.INFO ) Throwable t );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to load topology {0}: {1}")
 +  void failedToLoadTopology( String fileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to load topology {0}, retrying after {1}ms: {2}")
 +  void failedToLoadTopologyRetrying( String friendlyURI, String delay, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to handle topology events: {0}" )
 +  void failedToHandleTopologyEvents( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to reload topologies: {0}" )
 +  void failedToReloadTopologies( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.FATAL, text = "Unsupported encoding: {0}" )
 +  void unsupportedEncoding( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to persist master secret: {0}" )
 +  void failedToPersistMasterSecret( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to encrypt master secret: {0}" )
 +  void failedToEncryptMasterSecret( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to initialize master service from persistent master {0}: {1}" )
 +  void failedToInitializeFromPersistentMaster( String masterFileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to encode passphrase: {0}" )
 +  void failedToEncodePassphrase( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to verify signature: {0}")
 +  void failedToVerifySignature( @StackTrace(level=MessageLevel.DEBUG) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to sign the data: {0}")
 +  void failedToSignData( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to decrypt password for cluster {0}: {1}" )
 +  void failedToDecryptPasswordForCluster( String clusterName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to encrypt password for cluster {0}: {1}")
 +  void failedToEncryptPasswordForCluster( String clusterName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +  
 +  @Message( level = MessageLevel.ERROR, text = "Failed to create keystore [filename={0}, type={1}]: {2}" )
 +  void failedToCreateKeystore( String fileName, String keyStoreType, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to load keystore [filename={0}, type={1}]: {2}" )
 +  void failedToLoadKeystore( String fileName, String keyStoreType, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to add key for cluster {0}: {1}" )
 +  void failedToAddKeyForCluster( String clusterName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to add credential for cluster {0}: {1}" )
 +  void failedToAddCredentialForCluster( String clusterName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +  
 +  @Message( level = MessageLevel.ERROR, text = "Failed to get key for Gateway {0}: {1}" )
 +  void failedToGetKeyForGateway( String alias, @StackTrace( level=MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to get credential for cluster {0}: {1}" )
 +  void failedToGetCredentialForCluster( String clusterName, @StackTrace(level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to get key for cluster {0}: {1}" )
 +  void failedToGetKeyForCluster( String clusterName, @StackTrace(level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to add self signed certificate for Gateway {0}: {1}" )
 +  void failedToAddSeflSignedCertForGateway( String alias, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to generate secret key from password: {0}" )
 +  void failedToGenerateKeyFromPassword( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to establish connection to {0}: {1}" )
 +  void failedToEstablishConnectionToUrl( String url, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to interpret property \"{0}\": {1}")
 +  void failedToInterpretProperty( String property, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to instantiate the internal gateway services." )
 +  void failedToInstantiateGatewayServices();
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to serialize map to Json string {0}: {1}" )
 +  void failedToSerializeMapToJSON( Map<String, Object> map, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to get map from Json string {0}: {1}" )
 +  void failedToGetMapFromJsonString( String json, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +  
 +  @Message( level = MessageLevel.DEBUG, text = "Successful Knox->Hadoop SPNegotiation authentication for URL: {0}" )
 +  void successfulSPNegoAuthn(String uri);
 +  
 +  @Message( level = MessageLevel.ERROR, text = "Failed Knox->Hadoop SPNegotiation authentication for URL: {0}" )
 +  void failedSPNegoAuthn(String uri);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Dispatch response status: {0}" )
 +  void dispatchResponseStatusCode(int statusCode);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Dispatch response status: {0}, Location: {1}" )
 +  void dispatchResponseCreatedStatusCode( int statusCode, String location );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to decrypt cipher text for cluster {0}: due to inability to retrieve the password." )
 +  void failedToDecryptCipherForClusterNullPassword(String clusterName);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Gateway services have not been initialized." )
 +  void gatewayServicesNotInitialized();
 +
 +  @Message( level = MessageLevel.INFO, text = "The Gateway SSL certificate is issued to hostname: {0}." )
 +  void certificateHostNameForGateway(String cn);
 +
 +  @Message( level = MessageLevel.INFO, text = "The Gateway SSL certificate is valid between: {0} and {1}." )
 +  void certificateValidityPeriod(Date notBefore, Date notAfter);
 +
 +  @Message( level = MessageLevel.ERROR, text = "Unable to retrieve certificate for Gateway: {0}." )
 +  void unableToRetrieveCertificateForGateway(Exception e);
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to generate alias for cluster: {0} {1}." )
 +  void failedToGenerateAliasForCluster(String clusterName, KeystoreServiceException e);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Key passphrase not found in credential store - using master secret." )
 +  void assumingKeyPassphraseIsMaster();
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to remove alias for cluster: {0} {1}." )
 +  void failedToRemoveCredentialForCluster(String clusterName, Exception e);
 +
 +  @Message( level = MessageLevel.WARN, text = "Failed to match path {0}" )
 +  void failedToMatchPath( String path );
 +  
 +  @Message( level = MessageLevel.ERROR, text = "Failed to get system ldap connection: {0}" )
 +  void failedToGetSystemLdapConnection( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +  
 +  @Message( level = MessageLevel.WARN, text = "Value not found for cluster:{0}, alias: {1}" )
 +  void aliasValueNotFound( String cluster, String alias );
 +
 +  @Message( level = MessageLevel.INFO, text = "Computed userDn: {0} using dnTemplate for principal: {1}" )
 +  void computedUserDn(String userDn, String principal);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Searching from {0} where {1} scope {2}" )
 +  void searchBaseFilterScope( String searchBase, String searchFilter, String searchScope );
 +
 +  @Message( level = MessageLevel.INFO, text = "Computed userDn: {0} using ldapSearch for principal: {1}" )
 +  void searchedAndFoundUserDn(String userDn, String principal);
 +
 +  @Message( level = MessageLevel.INFO, text = "Computed roles/groups: {0} for principal: {1}" )
 +  void lookedUpUserRoles(Set<String> roleNames, String userName);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Initialize provider: {1}/{0}" )
 +  void initializeProvider( String name, String role );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Initialize service: {1}/{0}" )
 +  void initializeService( String name, String role );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Contribute provider: {1}/{0}" )
 +  void contributeProvider( String name, String role );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Contribute service: {1}/{0}" )
 +  void contributeService( String name, String role );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Finalize provider: {1}/{0}" )
 +  void finalizeProvider( String name, String role );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Finalize service: {1}/{0}" )
 +  void finalizeService( String name, String role );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Configured services directory is {0}" )
 +  void usingServicesDirectory(String path);
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to unmarshall service definition file {0} file : {1}" )
 +  void failedToLoadServiceDefinition(String fileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e);
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to find service definition file {0} file : {1}" )
 +  void failedToFindServiceDefinitionFile(String fileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e);
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to find rewrite file {0} file : {1}" )
 +  void failedToFindRewriteFile(String fileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e);
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to unmarshall rewrite file {0} file : {1}" )
 +  void failedToLoadRewriteFile(String fileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "No rewrite file found in service directory {0}" )
 +  void noRewriteFileFound(String path);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Added Service definition name: {0}, role : {1}, version : {2}" )
 +  void addedServiceDefinition(String serviceName, String serviceRole, String version);
 +
 +  @Message( level = MessageLevel.INFO, text = "System Property: {0}={1}" )
 +  void logSysProp( String name, String property );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Unable to get password: {0}" )
 +  void unableToGetPassword(@StackTrace( level = MessageLevel.DEBUG ) Exception e);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Initialize application: {0}" )
 +  void initializeApplication( String name );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Contribute application: {0}" )
 +  void contributeApplication( String name );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Finalize application: {0}" )
 +  void finalizeApplication( String name );
 +
 +  @Message( level = MessageLevel.INFO, text = "Default topology {0} at {1}" )
 +  void defaultTopologySetup( String defaultTopologyName, String redirectContext );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Default topology forward from {0} to {1}" )
 +  void defaultTopologyForward( String oldTarget, String newTarget );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Unable to setup PagedResults" )
 +  void unableToSetupPagedResults();
 +
 +  @Message( level = MessageLevel.INFO, text = "Ignoring PartialResultException" )
 +  void ignoringPartialResultException();
 +
 +  @Message( level = MessageLevel.WARN, text = "Only retrieved first {0} groups due to SizeLimitExceededException." )
 +  void sizeLimitExceededOnlyRetrieved(int numResults);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Failed to parse path into Template: {0} : {1}" )
 +  void failedToParsePath( String path, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Failed to initialize metrics reporter {0}  : {1}" )
 +  void failedToInitializeReporter( String name,  @StackTrace( level = MessageLevel.DEBUG ) Exception e);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Failed to start metrics reporter {0}  : {1}" )
 +  void failedToStartReporter( String name,  @StackTrace( level = MessageLevel.DEBUG ) Exception e);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Failed to stop metrics reporter {0}  : {1}" )
 +  void failedToStopReporter( String name,  @StackTrace( level = MessageLevel.DEBUG ) Exception e);
 +
 +  @Message( level = MessageLevel.INFO, text = "Cookie scoping feature enabled: {0}" )
 +  void cookieScopingFeatureEnabled( boolean enabled );
 +
 +  /**
 +   * Log whether Topology port mapping feature is enabled/disabled.
 +   *
 +   * @param enabled
 +   */
 +  @Message(level = MessageLevel.INFO,
 +           text = "Topology port mapping feature enabled: {0}")
 +  void gatewayTopologyPortMappingEnabled(final boolean enabled);
 +
 +  /**
 +   * @param topology
 +   * @param port
 +   */
 +  @Message(level = MessageLevel.DEBUG,
 +           text = "Creating a connector for topology {0} listening on port {1}.")
 +  void createJettyConnector(final String topology, final int port);
 +
 +  /**
 +   * @param topology
 +   */
 +  @Message(level = MessageLevel.DEBUG,
 +           text = "Creating a handler for topology {0}.")
 +  void createJettyHandler(final String topology);
 +
 +  /**
 +   * @param oldTarget
 +   * @param newTarget
 +   */
 +  @Message(level = MessageLevel.INFO,
 +           text = "Updating request context from {0} to {1}")
 +  void topologyPortMappingAddContext(final String oldTarget,
 +      final String newTarget);
 +
 +  /**
 +   * @param oldTarget
 +   * @param newTarget
 +   */
 +  @Message(level = MessageLevel.DEBUG,
 +           text = "Updating request target from {0} to {1}")
 +  void topologyPortMappingUpdateRequest(final String oldTarget,
 +      final String newTarget);
 +
 +  /**
 +   * Messages for Topology Port Mapping
 +   *
 +   * @param port
 +   * @param topology
 +   */
 +  @Message(level = MessageLevel.ERROR,
 +           text = "Port {0} configured for Topology - {1} is already in use.")
 +  void portAlreadyInUse(final int port, final String topology);
 +
 +  /**
 +   * Messages for Topology Port Mapping
 +   *
 +   * @param port
 +   */
 +  @Message(level = MessageLevel.ERROR,
 +           text = "Port {0} is already in use.")
 +  void portAlreadyInUse(final int port);
 +
 +  /**
 +   * Log topology and port
 +   *
 +   * @param topology
 +   * @param port
 +   */
 +  @Message(level = MessageLevel.INFO,
 +           text = "Started gateway, topology \"{0}\" listening on port \"{1}\".")
 +  void startedGateway(final String topology, final int port);
 +
 +  @Message(level = MessageLevel.ERROR,
 +           text =
 +               " Could not find topology \"{0}\" mapped to port \"{1}\" configured in gateway-config.xml. "
 +                   + "This invalid topology mapping will be ignored by the gateway. "
 +                   + "Gateway restart will be required if in the future \"{0}\" topology is added.")
 +  void topologyPortMappingCannotFindTopology(final String topology, final int port);
 +
 +
++  @Message( level = MessageLevel.WARN, text = "There is no registry client defined for remote configuration monitoring." )
++  void missingClientConfigurationForRemoteMonitoring();
++
++  @Message( level = MessageLevel.WARN, text = "Could not resolve a remote configuration registry client for {0}." )
++  void unresolvedClientConfigurationForRemoteMonitoring(final String clientName);
++
 +  @Message( level = MessageLevel.INFO, text = "Monitoring simple descriptors in directory: {0}" )
 +  void monitoringDescriptorChangesInDirectory(String descriptorsDir);
 +
- 
 +  @Message( level = MessageLevel.INFO, text = "Monitoring shared provider configurations in directory: {0}" )
 +  void monitoringProviderConfigChangesInDirectory(String sharedProviderDir);
 +
++  @Message( level = MessageLevel.ERROR, text = "Error registering listener for remote configuration path {0} : {1}" )
++  void errorAddingRemoteConfigurationListenerForPath(final String path,
++                                                     @StackTrace( level = MessageLevel.DEBUG ) Exception e);
++
++  @Message( level = MessageLevel.ERROR, text = "Error unregistering listener for remote configuration path {0} : {1}" )
++  void errorRemovingRemoteConfigurationListenerForPath(final String path,
++                                                       @StackTrace( level = MessageLevel.DEBUG ) Exception e);
++
++  @Message( level = MessageLevel.ERROR, text = "Error downloading remote configuration {0} : {1}" )
++  void errorDownloadingRemoteConfiguration(final String path,
++                                           @StackTrace( level = MessageLevel.DEBUG ) Exception e);
++
 +  @Message( level = MessageLevel.INFO, text = "Prevented deletion of shared provider configuration because there are referencing descriptors: {0}" )
 +  void preventedDeletionOfSharedProviderConfiguration(String providerConfigurationPath);
 +
 +  @Message( level = MessageLevel.INFO, text = "Generated topology {0} because the associated descriptor {1} changed." )
 +  void generatedTopologyForDescriptorChange(String topologyName, String descriptorName);
 +
++  @Message( level = MessageLevel.WARN, text = "An error occurred while attempting to initialize the remote configuration monitor: {0}" )
++  void remoteConfigurationMonitorInitFailure(final String errorMessage,
++                                             @StackTrace( level = MessageLevel.DEBUG ) Exception e );
++
++  @Message( level = MessageLevel.WARN, text = "An error occurred while attempting to start the remote configuration monitor {0} : {1}" )
++  void remoteConfigurationMonitorStartFailure(final String monitorType,
++                                              final String errorMessage,
++                                              @StackTrace( level = MessageLevel.DEBUG ) Exception e );
++
++  @Message( level = MessageLevel.INFO, text = "Starting remote configuration monitor for source {0} ..." )
++  void startingRemoteConfigurationMonitor(final String address);
++
++  @Message( level = MessageLevel.INFO, text = "Monitoring remote configuration source {0}" )
++  void monitoringRemoteConfigurationSource(final String address);
++
++  @Message( level = MessageLevel.INFO, text = "Remote configuration monitor downloaded {0} configuration file {1}" )
++  void downloadedRemoteConfigFile(final String type, final String configFileName);
++
++  @Message( level = MessageLevel.INFO, text = "Remote configuration monitor deleted {0} configuration file {1} based on remote change." )
++  void deletedRemoteConfigFile(final String type, final String configFileName);
++
 +  @Message( level = MessageLevel.ERROR, text = "An error occurred while processing {0} : {1}" )
 +  void simpleDescriptorHandlingError(final String simpleDesc,
 +                                     @StackTrace(level = MessageLevel.DEBUG) Exception e);
 +
 +  @Message(level = MessageLevel.DEBUG, text = "Successfully wrote configuration: {0}")
 +  void wroteConfigurationFile(final String filePath);
 +
 +  @Message(level = MessageLevel.ERROR, text = "Failed to write configuration: {0}")
 +  void failedToWriteConfigurationFile(final String filePath,
 +                                      @StackTrace(level = MessageLevel.DEBUG) Exception e );
 +
 +  @Message( level = MessageLevel.INFO, text = "Deleting topology {0} because the associated descriptor {1} was deleted." )
 +  void deletingTopologyForDescriptorDeletion(String topologyName, String descriptorName);
 +
 +  @Message( level = MessageLevel.INFO, text = "Deleting descriptor {0} because the associated topology {1} was deleted." )
 +  void deletingDescriptorForTopologyDeletion(String descriptorName, String topologyName);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Added descriptor {0} reference to provider configuration {1}." )
 +  void addedProviderConfigurationReference(String descriptorName, String providerConfigurationName);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Removed descriptor {0} reference to provider configuration {1}." )
 +  void removedProviderConfigurationReference(String descriptorName, String providerConfigurationName);
 +
++  @Message( level = MessageLevel.WARN,
++            text = "The permissions for the remote configuration registry entry \"{0}\" are such that its content may not be trustworthy." )
++  void suspectWritableRemoteConfigurationEntry(String entryPath);
++
++  @Message( level = MessageLevel.WARN,
++            text = "Correcting the suspect permissions for the remote configuration registry entry \"{0}\"." )
++  void correctingSuspectWritableRemoteConfigurationEntry(String entryPath);
++
++  @Message(level = MessageLevel.INFO,
++           text = "A cluster configuration change was noticed for {1} @ {0}")
++  void noticedClusterConfigurationChange(final String source, final String clusterName);
++
++
++  @Message(level = MessageLevel.INFO,
++           text = "Triggering topology regeneration for descriptor {2} because of change to the {1} @ {0} configuration.")
++  void triggeringTopologyRegeneration(final String source, final String clusterName, final String affected);
++
++
++  @Message(level = MessageLevel.ERROR,
++           text = "Encountered an error while responding to {1} @ {0} configuration change: {2}")
++  void errorRespondingToConfigChange(final String source,
++                                     final String clusterName,
++                                     @StackTrace(level = MessageLevel.DEBUG) Exception e);
++
 +}


[11/49] knox git commit: KNOX-1128 - Readonly protection for generated topologies in Knox Admin UI

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/d835af99/gateway-applications/src/main/resources/applications/admin-ui/app/vendor.48771018d3da89d3269f.bundle.js
----------------------------------------------------------------------
diff --git a/gateway-applications/src/main/resources/applications/admin-ui/app/vendor.48771018d3da89d3269f.bundle.js b/gateway-applications/src/main/resources/applications/admin-ui/app/vendor.48771018d3da89d3269f.bundle.js
deleted file mode 100644
index d63a13a..0000000
--- a/gateway-applications/src/main/resources/applications/admin-ui/app/vendor.48771018d3da89d3269f.bundle.js
+++ /dev/null
@@ -1,2035 +0,0 @@
-webpackJsonp([2,3],[function(e,t,n){"use strict";var r=n(399);n.d(t,"assertPlatform",function(){return r._23}),n.d(t,"destroyPlatform",function(){return r._24}),n.d(t,"getPlatform",function(){return r._25}),n.d(t,"createPlatform",function(){return r._26}),n.d(t,"ApplicationRef",function(){return r._15}),n.d(t,"enableProdMode",function(){return r._27}),n.d(t,"isDevMode",function(){return r.a}),n.d(t,"createPlatformFactory",function(){return r._3}),n.d(t,"PlatformRef",function(){return r._28}),n.d(t,"APP_ID",function(){return r._14}),n.d(t,"PACKAGE_ROOT_URL",function(){return r.z}),n.d(t,"APP_BOOTSTRAP_LISTENER",function(){return r._29}),n.d(t,"PLATFORM_INITIALIZER",function(){return r._6}),n.d(t,"ApplicationInitStatus",function(){return r._30}),n.d(t,"APP_INITIALIZER",function(){return r._31}),n.d(t,"DebugElement",function(){return r._32}),n.d(t,"DebugNode",function(){return r._33}),n.d(t,"asNativeElements",function(){return r._34}),n.d(t,"getDebugNode",function(){return r._16}),n.d(
 t,"Testability",function(){return r._20}),n.d(t,"TestabilityRegistry",function(){return r._35}),n.d(t,"setTestabilityGetter",function(){return r._12}),n.d(t,"TRANSLATIONS",function(){return r._0}),n.d(t,"TRANSLATIONS_FORMAT",function(){return r.v}),n.d(t,"LOCALE_ID",function(){return r.u}),n.d(t,"ApplicationModule",function(){return r._21}),n.d(t,"wtfCreateScope",function(){return r._36}),n.d(t,"wtfLeave",function(){return r._37}),n.d(t,"wtfStartTimeRange",function(){return r._38}),n.d(t,"wtfEndTimeRange",function(){return r._39}),n.d(t,"Type",function(){return r.V}),n.d(t,"EventEmitter",function(){return r._7}),n.d(t,"ErrorHandler",function(){return r._19}),n.d(t,"AnimationTransitionEvent",function(){return r._40}),n.d(t,"AnimationPlayer",function(){return r._41}),n.d(t,"Sanitizer",function(){return r._18}),n.d(t,"SecurityContext",function(){return r.t}),n.d(t,"Attribute",function(){return r.U}),n.d(t,"ContentChild",function(){return r._42}),n.d(t,"ContentChildren",function(){retur
 n r._43}),n.d(t,"Query",function(){return r.F}),n.d(t,"ViewChild",function(){return r._44}),n.d(t,"ViewChildren",function(){return r._45}),n.d(t,"ANALYZE_FOR_ENTRY_COMPONENTS",function(){return r.f}),n.d(t,"Component",function(){return r.G}),n.d(t,"Directive",function(){return r.H}),n.d(t,"HostBinding",function(){return r.D}),n.d(t,"HostListener",function(){return r.E}),n.d(t,"Input",function(){return r.B}),n.d(t,"Output",function(){return r.C}),n.d(t,"Pipe",function(){return r.J}),n.d(t,"OnDestroy",function(){return r._46}),n.d(t,"AfterContentInit",function(){return r._47}),n.d(t,"AfterViewChecked",function(){return r._48}),n.d(t,"AfterViewInit",function(){return r._49}),n.d(t,"DoCheck",function(){return r._50}),n.d(t,"OnChanges",function(){return r._51}),n.d(t,"AfterContentChecked",function(){return r._52}),n.d(t,"OnInit",function(){return r._53}),n.d(t,"CUSTOM_ELEMENTS_SCHEMA",function(){return r.Z}),n.d(t,"NO_ERRORS_SCHEMA",function(){return r.Y}),n.d(t,"NgModule",function(){ret
 urn r.I}),n.d(t,"ViewEncapsulation",function(){return r.c}),n.d(t,"Class",function(){return r._54}),n.d(t,"forwardRef",function(){return r._22}),n.d(t,"resolveForwardRef",function(){return r.A}),n.d(t,"Injector",function(){return r.q}),n.d(t,"ReflectiveInjector",function(){return r._1}),n.d(t,"ResolvedReflectiveFactory",function(){return r._55}),n.d(t,"ReflectiveKey",function(){return r._56}),n.d(t,"OpaqueToken",function(){return r.w}),n.d(t,"NgZone",function(){return r._13}),n.d(t,"RenderComponentType",function(){return r.j}),n.d(t,"Renderer",function(){return r.r}),n.d(t,"RootRenderer",function(){return r._17}),n.d(t,"COMPILER_OPTIONS",function(){return r._2}),n.d(t,"CompilerFactory",function(){return r._5}),n.d(t,"ModuleWithComponentFactories",function(){return r.W}),n.d(t,"Compiler",function(){return r.X}),n.d(t,"ComponentFactory",function(){return r.n}),n.d(t,"ComponentRef",function(){return r.o}),n.d(t,"ComponentFactoryResolver",function(){return r.m}),n.d(t,"ElementRef",funct
 ion(){return r.g}),n.d(t,"NgModuleFactory",function(){return r.p}),n.d(t,"NgModuleRef",function(){return r._57}),n.d(t,"NgModuleFactoryLoader",function(){return r._58}),n.d(t,"getModuleFactory",function(){return r._59}),n.d(t,"QueryList",function(){return r.k}),n.d(t,"SystemJsNgModuleLoader",function(){return r._60}),n.d(t,"SystemJsNgModuleLoaderConfig",function(){return r._61}),n.d(t,"TemplateRef",function(){return r.l}),n.d(t,"ViewContainerRef",function(){return r.h}),n.d(t,"EmbeddedViewRef",function(){return r._62}),n.d(t,"ViewRef",function(){return r._63}),n.d(t,"ChangeDetectionStrategy",function(){return r.d}),n.d(t,"ChangeDetectorRef",function(){return r.i}),n.d(t,"CollectionChangeRecord",function(){return r._64}),n.d(t,"DefaultIterableDiffer",function(){return r._65}),n.d(t,"IterableDiffers",function(){return r._8}),n.d(t,"KeyValueChangeRecord",function(){return r._66}),n.d(t,"KeyValueDiffers",function(){return r._9}),n.d(t,"SimpleChange",function(){return r.s}),n.d(t,"Wrappe
 dValue",function(){return r._10}),n.d(t,"platformCore",function(){return r._4}),n.d(t,"__core_private__",function(){return r.e}),n.d(t,"AUTO_STYLE",function(){return r._11}),n.d(t,"AnimationEntryMetadata",function(){return r._67}),n.d(t,"AnimationStateMetadata",function(){return r._68}),n.d(t,"AnimationStateDeclarationMetadata",function(){return r.K}),n.d(t,"AnimationStateTransitionMetadata",function(){return r.L}),n.d(t,"AnimationMetadata",function(){return r._69}),n.d(t,"AnimationKeyframesSequenceMetadata",function(){return r.N}),n.d(t,"AnimationStyleMetadata",function(){return r.M}),n.d(t,"AnimationAnimateMetadata",function(){return r.O}),n.d(t,"AnimationWithStepsMetadata",function(){return r.P}),n.d(t,"AnimationSequenceMetadata",function(){return r._70}),n.d(t,"AnimationGroupMetadata",function(){return r.Q}),n.d(t,"animate",function(){return r._71}),n.d(t,"group",function(){return r._72}),n.d(t,"sequence",function(){return r._73}),n.d(t,"style",function(){return r._74}),n.d(t,"s
 tate",function(){return r._75}),n.d(t,"keyframes",function(){return r._76}),n.d(t,"transition",function(){return r._77}),n.d(t,"trigger",function(){return r._78}),n.d(t,"Inject",function(){return r.y}),n.d(t,"Optional",function(){return r.x}),n.d(t,"Injectable",function(){return r.b}),n.d(t,"Self",function(){return r.S}),n.d(t,"SkipSelf",function(){return r.T}),n.d(t,"Host",function(){return r.R})},function(e,t,n){var r=n(11),i=n(10),o=n(40),s=n(18),a=n(66),c="prototype",u=function(e,t,n){var l,p,d,h,f=e&u.F,m=e&u.G,g=e&u.S,v=e&u.P,y=e&u.B,b=m?r:g?r[t]||(r[t]={}):(r[t]||{})[c],w=m?i:i[t]||(i[t]={}),_=w[c]||(w[c]={});m&&(n=t);for(l in n)p=!f&&b&&void 0!==b[l],d=(p?b:n)[l],h=y&&p?a(d,r):v&&"function"==typeof d?a(Function.call,d):d,b&&s(b,l,d,e&u.U),w[l]!=d&&o(w,l,h),v&&_[l]!=d&&(_[l]=d)};r.core=i,u.F=1,u.G=2,u.S=4,u.P=8,u.B=16,u.W=32,u.U=64,u.R=128,e.exports=u},function(e,t,n){"use strict";(function(e){function r(e){return null!=e}function i(e){return null==e}function o(e){return"obje
 ct"==typeof e&&null!==e&&Object.getPrototypeOf(e)===h}function s(e){if("string"==typeof e)return e;if(null==e)return""+e;if(e.overriddenName)return e.overriddenName;if(e.name)return e.name;var t=e.toString(),n=t.indexOf("\n");return n===-1?t:t.substring(0,n)}function a(e){return null!==e&&("function"==typeof e||"object"==typeof e)}function c(){if(!m)if(p.Symbol&&Symbol.iterator)m=Symbol.iterator;else for(var e=Object.getOwnPropertyNames(Map.prototype),t=0;t<e.length;++t){var n=e[t];"entries"!==n&&"size"!==n&&Map.prototype[n]===Map.prototype.entries&&(m=n)}return m}function u(e){return!a(e)}function l(e){return e.replace(/([.*+?^=!:${}()|[\]\/\\])/g,"\\$1")}t.b=r,t.a=i,t.g=o,t.i=s,n.d(t,"c",function(){return f}),t.e=a,t.f=c,t.h=u,t.d=l;/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var p;p="undefined"==typeof window?"undefined"!=typeof WorkerGlobalScope&&self instanceof WorkerGlobalScope?self:e:window;var d=p;d.assert=function(e){};var h=Object.getPrototypeOf({}),f=function(){function e(){}return e.parseIntAutoRadix=function(e){var t=parseInt(e);if(isNaN(t))throw new Error("Invalid integer literal when parsing "+e);return t},e.isNumeric=function(e){return!isNaN(e-parseFloat(e))},e}(),m=null}).call(t,n(43))},function(e,t,n){"use strict";(function(e){function r(e){Zone.current.scheduleMicroTask("scheduleMicrotask",e)}function i(e){return e.name||typeof e}function o(e){return null!=e}function s(e){return null==e}function a(e){if("string"==typeof e)return e;if(null==e)return""+e;if(e.overriddenName)return e.overriddenName;if(e.name)return e.name;var t=e.toString(),n=t.indexOf("\n");return n===-1?t:t.substring(0,n)}function c(e,t){return e===t||"number"==typeof e&&"number"==typeof t&&isNaN(e)&&isNaN(t)}function u(e){return null!==e&&("function"==typeof e||"object"=
 =typeof e)}function l(e){console.log(e)}function p(e){console.warn(e)}function d(){if(!g)if(f.Symbol&&Symbol.iterator)g=Symbol.iterator;else for(var e=Object.getOwnPropertyNames(Map.prototype),t=0;t<e.length;++t){var n=e[t];"entries"!==n&&"size"!==n&&Map.prototype[n]===Map.prototype.entries&&(g=n)}return g}function h(e){return!u(e)}t.l=r,n.d(t,"a",function(){return m}),t.j=i,t.d=o,t.c=s,t.b=a,t.i=c,t.e=u,t.g=l,t.h=p,t.f=d,t.k=h;/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var f;f="undefined"==typeof window?"undefined"!=typeof WorkerGlobalScope&&self instanceof WorkerGlobalScope?self:e:window;var m=f;m.assert=function(e){};var g=(Object.getPrototypeOf({}),function(){function e(){}return e.parseIntAutoRadix=function(e){var t=parseInt(e);if(isNaN(t))throw new Error("Invalid integer literal when parsing "+e);return t},e.isNumeric=function(e){return!isNaN(e-parseFloat(e))},e}(),null)}).call(t,n(43))},function(e,t,n){var r=n(7);e.exports=function(e){if(!r(e))throw TypeError(e+" is not an object!");return e}},function(e,t,n){"use strict";function r(e,t,n){var r=new ve(e,t);return n.visitExpression(r,null)}function i(e){var t=new ye;return t.visitAllStatements(e,null),t.varNames}function o(e,t){return void 0===t&&(t=null),new M(e,t)}function s(e,t){return void 0===t&&(t=null),new V(e,null,t)}function a(e,t,r){return void 0===t&&(t=null),void 0===r&&(r=null),n.i(h.b)(e)?new w(e,t,r):null}function c(e,t){return void 0===t&&(t=null),new X(e,t)}function u(e,t){r
 eturn void 0===t&&(t=null),new Q(e,t)}function l(e){return new H(e)}function p(e,t,n){return void 0===n&&(n=null),new q(e,t,n)}function d(e,t){return void 0===t&&(t=null),new $(e,t)}var h=n(2);n.d(t,"m",function(){return f}),n.d(t,"P",function(){return v}),n.d(t,"R",function(){return g}),n.d(t,"M",function(){return w}),n.d(t,"w",function(){return _}),n.d(t,"x",function(){return k}),n.d(t,"l",function(){return x}),n.d(t,"E",function(){return E}),n.d(t,"N",function(){return C}),n.d(t,"G",function(){return S}),n.d(t,"D",function(){return A}),n.d(t,"Q",function(){return T}),n.d(t,"s",function(){return y}),n.d(t,"H",function(){return O}),n.d(t,"I",function(){return F}),n.d(t,"v",function(){return M}),n.d(t,"z",function(){return L}),n.d(t,"F",function(){return $}),n.d(t,"S",function(){return V}),n.d(t,"j",function(){return W}),n.d(t,"t",function(){return G}),n.d(t,"o",function(){return Z}),n.d(t,"A",function(){return J}),n.d(t,"f",function(){return ee}),n.d(t,"p",function(){return I}),n.d
 (t,"O",function(){return ne}),n.d(t,"r",function(){return re}),n.d(t,"y",function(){return oe}),n.d(t,"i",function(){return se}),n.d(t,"n",function(){return ce}),n.d(t,"B",function(){return ue}),n.d(t,"L",function(){return le}),n.d(t,"C",function(){return pe}),n.d(t,"g",function(){return de}),n.d(t,"J",function(){return me}),t.K=r,t.q=i,t.a=o,t.e=s,t.k=a,t.c=c,t.b=u,t.u=l,t.h=p,t.d=d;/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var f,m=this&&this.__extends||function(e,t){function n(){this.constructor=e}for(var r in t)t.hasOwnProperty(r)&&(e[r]=t[r]);e.prototype=null===t?Object.create(t):(n.prototype=t.prototype,new n)};!function(e){e[e.Const=0]="Const"}(f||(f={}));var g,v=function(){function e(e){void 0===e&&(e=null),this.modifiers=e,e||(this.modifiers=[])}return e.prototype.hasModifier=function(e){return this.modifiers.indexOf(e)!==-1},e}();!function(e){e[e.Dynamic=0]="Dynamic",e[e.Bool=1]="Bool",e[e.String=2]="String",e[e.Int=3]="Int",e[e.Number=4]="Number",e[e.Function=5]="Function",e[e.Null=6]="Null"}(g||(g={}));var y,b=function(e){function t(t,n){void 0===n&&(n=null),e.call(this,n),this.name=t}return m(t,e),t.prototype.visitType=function(e,t){return e.visitBuiltintType(this,t)},t}(v),w=function(e){function t(t,n,r){void 0===n&&(n=null),void 0===r&&(r=null),e.call(this,r),this.value=t,this.typeParams=n}return m(t,e),t.prototype.visitType=function(e,t){return e.visitExternalType(this,t)},t}(v),_=functio
 n(e){function t(t,n){void 0===n&&(n=null),e.call(this,n),this.of=t}return m(t,e),t.prototype.visitType=function(e,t){return e.visitArrayType(this,t)},t}(v),k=function(e){function t(t,n){void 0===n&&(n=null),e.call(this,n),this.valueType=t}return m(t,e),t.prototype.visitType=function(e,t){return e.visitMapType(this,t)},t}(v),x=new b(g.Dynamic),E=new b(g.Bool),C=(new b(g.Int),new b(g.Number)),S=new b(g.String),A=new b(g.Function),T=new b(g.Null);!function(e){e[e.Equals=0]="Equals",e[e.NotEquals=1]="NotEquals",e[e.Identical=2]="Identical",e[e.NotIdentical=3]="NotIdentical",e[e.Minus=4]="Minus",e[e.Plus=5]="Plus",e[e.Divide=6]="Divide",e[e.Multiply=7]="Multiply",e[e.Modulo=8]="Modulo",e[e.And=9]="And",e[e.Or=10]="Or",e[e.Lower=11]="Lower",e[e.LowerEquals=12]="LowerEquals",e[e.Bigger=13]="Bigger",e[e.BiggerEquals=14]="BiggerEquals"}(y||(y={}));var F,O=function(){function e(e){this.type=e}return e.prototype.prop=function(e){return new K(this,e)},e.prototype.key=function(e,t){return void 0
 ===t&&(t=null),new Y(this,e,t)},e.prototype.callMethod=function(e,t){return new N(this,e,t)},e.prototype.callFn=function(e){return new j(this,e)},e.prototype.instantiate=function(e,t){return void 0===t&&(t=null),new B(this,e,t)},e.prototype.conditional=function(e,t){return void 0===t&&(t=null),new z(this,e,t)},e.prototype.equals=function(e){return new G(y.Equals,this,e)},e.prototype.notEquals=function(e){return new G(y.NotEquals,this,e)},e.prototype.identical=function(e){return new G(y.Identical,this,e)},e.prototype.notIdentical=function(e){return new G(y.NotIdentical,this,e)},e.prototype.minus=function(e){return new G(y.Minus,this,e)},e.prototype.plus=function(e){return new G(y.Plus,this,e)},e.prototype.divide=function(e){return new G(y.Divide,this,e)},e.prototype.multiply=function(e){return new G(y.Multiply,this,e)},e.prototype.modulo=function(e){return new G(y.Modulo,this,e)},e.prototype.and=function(e){return new G(y.And,this,e)},e.prototype.or=function(e){return new G(y.Or,this
 ,e)},e.prototype.lower=function(e){return new G(y.Lower,this,e)},e.prototype.lowerEquals=function(e){return new G(y.LowerEquals,this,e)},e.prototype.bigger=function(e){return new G(y.Bigger,this,e)},e.prototype.biggerEquals=function(e){return new G(y.BiggerEquals,this,e)},e.prototype.isBlank=function(){return this.equals(te)},e.prototype.cast=function(e){return new U(this,e)},e.prototype.toStmt=function(){return new oe(this)},e}();!function(e){e[e.This=0]="This",e[e.Super=1]="Super",e[e.CatchError=2]="CatchError",e[e.CatchStack=3]="CatchStack"}(F||(F={}));var L,M=function(e){function t(t,n){void 0===n&&(n=null),e.call(this,n),"string"==typeof t?(this.name=t,this.builtin=null):(this.name=null,this.builtin=t)}return m(t,e),t.prototype.visitExpression=function(e,t){return e.visitReadVarExpr(this,t)},t.prototype.set=function(e){return new R(this.name,e)},t}(O),R=function(e){function t(t,n,r){void 0===r&&(r=null),e.call(this,r||n.type),this.name=t,this.value=n}return m(t,e),t.prototype.v
 isitExpression=function(e,t){return e.visitWriteVarExpr(this,t)},t.prototype.toDeclStmt=function(e,t){return void 0===e&&(e=null),void 0===t&&(t=null),new re(this.name,this.value,e,t)},t}(O),P=function(e){function t(t,n,r,i){void 0===i&&(i=null),e.call(this,i||r.type),this.receiver=t,this.index=n,this.value=r}return m(t,e),t.prototype.visitExpression=function(e,t){return e.visitWriteKeyExpr(this,t)},t}(O),D=function(e){function t(t,n,r,i){void 0===i&&(i=null),e.call(this,i||r.type),this.receiver=t,this.name=n,this.value=r}return m(t,e),t.prototype.visitExpression=function(e,t){return e.visitWritePropExpr(this,t)},t}(O);!function(e){e[e.ConcatArray=0]="ConcatArray",e[e.SubscribeObservable=1]="SubscribeObservable",e[e.Bind=2]="Bind"}(L||(L={}));var I,N=function(e){function t(t,n,r,i){void 0===i&&(i=null),e.call(this,i),this.receiver=t,this.args=r,"string"==typeof n?(this.name=n,this.builtin=null):(this.name=null,this.builtin=n)}return m(t,e),t.prototype.visitExpression=function(e,t){r
 eturn e.visitInvokeMethodExpr(this,t)},t}(O),j=function(e){function t(t,n,r){void 0===r&&(r=null),e.call(this,r),this.fn=t,this.args=n}return m(t,e),t.prototype.visitExpression=function(e,t){return e.visitInvokeFunctionExpr(this,t)},t}(O),B=function(e){function t(t,n,r){e.call(this,r),this.classExpr=t,this.args=n}return m(t,e),t.prototype.visitExpression=function(e,t){return e.visitInstantiateExpr(this,t)},t}(O),$=function(e){function t(t,n){void 0===n&&(n=null),e.call(this,n),this.value=t}return m(t,e),t.prototype.visitExpression=function(e,t){return e.visitLiteralExpr(this,t)},t}(O),V=function(e){function t(t,n,r){void 0===n&&(n=null),void 0===r&&(r=null),e.call(this,n),this.value=t,this.typeParams=r}return m(t,e),t.prototype.visitExpression=function(e,t){return e.visitExternalExpr(this,t)},t}(O),z=function(e){function t(t,n,r,i){void 0===r&&(r=null),void 0===i&&(i=null),e.call(this,i||n.type),this.condition=t,this.falseCase=r,this.trueCase=n}return m(t,e),t.prototype.visitExpress
 ion=function(e,t){return e.visitConditionalExpr(this,t)},t}(O),H=function(e){function t(t){e.call(this,E),this.condition=t}return m(t,e),t.prototype.visitExpression=function(e,t){return e.visitNotExpr(this,t)},t}(O),U=function(e){function t(t,n){e.call(this,n),this.value=t}return m(t,e),t.prototype.visitExpression=function(e,t){return e.visitCastExpr(this,t)},t}(O),W=function(){function e(e,t){void 0===t&&(t=null),this.name=e,this.type=t}return e}(),q=function(e){function t(t,n,r){void 0===r&&(r=null),e.call(this,r),this.params=t,this.statements=n}return m(t,e),t.prototype.visitExpression=function(e,t){return e.visitFunctionExpr(this,t)},t.prototype.toDeclStmt=function(e,t){return void 0===t&&(t=null),new ie(e,this.params,this.statements,this.type,t)},t}(O),G=function(e){function t(t,n,r,i){void 0===i&&(i=null),e.call(this,i||n.type),this.operator=t,this.rhs=r,this.lhs=n}return m(t,e),t.prototype.visitExpression=function(e,t){return e.visitBinaryOperatorExpr(this,t)},t}(O),K=functio
 n(e){function t(t,n,r){void 0===r&&(r=null),e.call(this,r),this.receiver=t,this.name=n}return m(t,e),t.prototype.visitExpression=function(e,t){return e.visitReadPropExpr(this,t)},t.prototype.set=function(e){return new D(this.receiver,this.name,e)},t}(O),Y=function(e){function t(t,n,r){void 0===r&&(r=null),e.call(this,r),this.receiver=t,this.index=n}return m(t,e),t.prototype.visitExpression=function(e,t){return e.visitReadKeyExpr(this,t)},t.prototype.set=function(e){return new P(this.receiver,this.index,e)},t}(O),X=function(e){function t(t,n){void 0===n&&(n=null),e.call(this,n),this.entries=t}return m(t,e),t.prototype.visitExpression=function(e,t){return e.visitLiteralArrayExpr(this,t)},t}(O),Q=function(e){function t(t,r){void 0===r&&(r=null),e.call(this,r),this.entries=t,this.valueType=null,n.i(h.b)(r)&&(this.valueType=r.valueType)}return m(t,e),t.prototype.visitExpression=function(e,t){return e.visitLiteralMapExpr(this,t)},t}(O),Z=new M(F.This),J=new M(F.Super),ee=(new M(F.CatchErr
 or),new M(F.CatchStack),new $(null,null)),te=new $(null,T);!function(e){e[e.Final=0]="Final",e[e.Private=1]="Private"}(I||(I={}));var ne=function(){function e(e){void 0===e&&(e=null),this.modifiers=e,e||(this.modifiers=[])}return e.prototype.hasModifier=function(e){return this.modifiers.indexOf(e)!==-1},e}(),re=function(e){function t(t,n,r,i){void 0===r&&(r=null),void 0===i&&(i=null),e.call(this,i),this.name=t,this.value=n,this.type=r||n.type}return m(t,e),t.prototype.visitStatement=function(e,t){return e.visitDeclareVarStmt(this,t)},t}(ne),ie=function(e){function t(t,n,r,i,o){void 0===i&&(i=null),void 0===o&&(o=null),e.call(this,o),this.name=t,this.params=n,this.statements=r,this.type=i}return m(t,e),t.prototype.visitStatement=function(e,t){return e.visitDeclareFunctionStmt(this,t)},t}(ne),oe=function(e){function t(t){e.call(this),this.expr=t}return m(t,e),t.prototype.visitStatement=function(e,t){return e.visitExpressionStmt(this,t)},t}(ne),se=function(e){function t(t){e.call(this)
 ,this.value=t}return m(t,e),t.prototype.visitStatement=function(e,t){return e.visitReturnStmt(this,t)},t}(ne),ae=function(){function e(e,t){void 0===e&&(e=null),this.type=e,this.modifiers=t,t||(this.modifiers=[])}return e.prototype.hasModifier=function(e){return this.modifiers.indexOf(e)!==-1},e}(),ce=function(e){function t(t,n,r){void 0===n&&(n=null),void 0===r&&(r=null),e.call(this,n,r),this.name=t}return m(t,e),t}(ae),ue=function(e){function t(t,n,r,i,o){void 0===i&&(i=null),void 0===o&&(o=null),e.call(this,i,o),this.name=t,this.params=n,this.body=r}return m(t,e),t}(ae),le=function(e){function t(t,n,r,i){void 0===r&&(r=null),void 0===i&&(i=null),e.call(this,r,i),this.name=t,this.body=n}return m(t,e),t}(ae),pe=function(e){function t(t,n,r,i,o,s,a){void 0===a&&(a=null),e.call(this,a),this.name=t,this.parent=n,this.fields=r,this.getters=i,this.constructorMethod=o,this.methods=s}return m(t,e),t.prototype.visitStatement=function(e,t){return e.visitDeclareClassStmt(this,t)},t}(ne),de=f
 unction(e){function t(t,n,r){void 0===r&&(r=[]),e.call(this),this.condition=t,this.trueCase=n,this.falseCase=r}return m(t,e),t.prototype.visitStatement=function(e,t){return e.visitIfStmt(this,t)},t}(ne),he=(function(e){function t(t){e.call(this),this.comment=t}return m(t,e),t.prototype.visitStatement=function(e,t){return e.visitCommentStmt(this,t)},t}(ne),function(e){function t(t,n){e.call(this),this.bodyStmts=t,this.catchStmts=n}return m(t,e),t.prototype.visitStatement=function(e,t){return e.visitTryCatchStmt(this,t)},t}(ne)),fe=function(e){function t(t){e.call(this),this.error=t}return m(t,e),t.prototype.visitStatement=function(e,t){return e.visitThrowStmt(this,t)},t}(ne),me=function(){function e(){}return e.prototype.visitReadVarExpr=function(e,t){return e},e.prototype.visitWriteVarExpr=function(e,t){return new R(e.name,e.value.visitExpression(this,t))},e.prototype.visitWriteKeyExpr=function(e,t){return new P(e.receiver.visitExpression(this,t),e.index.visitExpression(this,t),e.va
 lue.visitExpression(this,t))},e.prototype.visitWritePropExpr=function(e,t){return new D(e.receiver.visitExpression(this,t),e.name,e.value.visitExpression(this,t))},e.prototype.visitInvokeMethodExpr=function(e,t){var n=e.builtin||e.name;return new N(e.receiver.visitExpression(this,t),n,this.visitAllExpressions(e.args,t),e.type)},e.prototype.visitInvokeFunctionExpr=function(e,t){return new j(e.fn.visitExpression(this,t),this.visitAllExpressions(e.args,t),e.type)},e.prototype.visitInstantiateExpr=function(e,t){return new B(e.classExpr.visitExpression(this,t),this.visitAllExpressions(e.args,t),e.type)},e.prototype.visitLiteralExpr=function(e,t){return e},e.prototype.visitExternalExpr=function(e,t){return e},e.prototype.visitConditionalExpr=function(e,t){return new z(e.condition.visitExpression(this,t),e.trueCase.visitExpression(this,t),e.falseCase.visitExpression(this,t))},e.prototype.visitNotExpr=function(e,t){return new H(e.condition.visitExpression(this,t))},e.prototype.visitCastExpr
 =function(e,t){return new U(e.value.visitExpression(this,t),t)},e.prototype.visitFunctionExpr=function(e,t){return e},e.prototype.visitBinaryOperatorExpr=function(e,t){return new G(e.operator,e.lhs.visitExpression(this,t),e.rhs.visitExpression(this,t),e.type)},e.prototype.visitReadPropExpr=function(e,t){return new K(e.receiver.visitExpression(this,t),e.name,e.type)},e.prototype.visitReadKeyExpr=function(e,t){return new Y(e.receiver.visitExpression(this,t),e.index.visitExpression(this,t),e.type)},e.prototype.visitLiteralArrayExpr=function(e,t){return new X(this.visitAllExpressions(e.entries,t))},e.prototype.visitLiteralMapExpr=function(e,t){var n=this,r=e.entries.map(function(e){return[e[0],e[1].visitExpression(n,t)]});return new Q(r)},e.prototype.visitAllExpressions=function(e,t){var n=this;return e.map(function(e){return e.visitExpression(n,t)})},e.prototype.visitDeclareVarStmt=function(e,t){return new re(e.name,e.value.visitExpression(this,t),e.type,e.modifiers)},e.prototype.visit
 DeclareFunctionStmt=function(e,t){return e},e.prototype.visitExpressionStmt=function(e,t){return new oe(e.expr.visitExpression(this,t))},e.prototype.visitReturnStmt=function(e,t){return new se(e.value.visitExpression(this,t))},e.prototype.visitDeclareClassStmt=function(e,t){return e},e.prototype.visitIfStmt=function(e,t){return new de(e.condition.visitExpression(this,t),this.visitAllStatements(e.trueCase,t),this.visitAllStatements(e.falseCase,t))},e.prototype.visitTryCatchStmt=function(e,t){return new he(this.visitAllStatements(e.bodyStmts,t),this.visitAllStatements(e.catchStmts,t))},e.prototype.visitThrowStmt=function(e,t){return new fe(e.error.visitExpression(this,t))},e.prototype.visitCommentStmt=function(e,t){return e},e.prototype.visitAllStatements=function(e,t){var n=this;return e.map(function(e){return e.visitStatement(n,t)})},e}(),ge=function(){function e(){}return e.prototype.visitReadVarExpr=function(e,t){return e},e.prototype.visitWriteVarExpr=function(e,t){return e.value
 .visitExpression(this,t),e},e.prototype.visitWriteKeyExpr=function(e,t){return e.receiver.visitExpression(this,t),e.index.visitExpression(this,t),e.value.visitExpression(this,t),e},e.prototype.visitWritePropExpr=function(e,t){return e.receiver.visitExpression(this,t),e.value.visitExpression(this,t),e},e.prototype.visitInvokeMethodExpr=function(e,t){return e.receiver.visitExpression(this,t),this.visitAllExpressions(e.args,t),e},e.prototype.visitInvokeFunctionExpr=function(e,t){return e.fn.visitExpression(this,t),this.visitAllExpressions(e.args,t),e},e.prototype.visitInstantiateExpr=function(e,t){return e.classExpr.visitExpression(this,t),this.visitAllExpressions(e.args,t),e},e.prototype.visitLiteralExpr=function(e,t){return e},e.prototype.visitExternalExpr=function(e,t){return e},e.prototype.visitConditionalExpr=function(e,t){return e.condition.visitExpression(this,t),e.trueCase.visitExpression(this,t),e.falseCase.visitExpression(this,t),e},e.prototype.visitNotExpr=function(e,t){retu
 rn e.condition.visitExpression(this,t),e},e.prototype.visitCastExpr=function(e,t){return e.value.visitExpression(this,t),e},e.prototype.visitFunctionExpr=function(e,t){return e},e.prototype.visitBinaryOperatorExpr=function(e,t){return e.lhs.visitExpression(this,t),e.rhs.visitExpression(this,t),e},e.prototype.visitReadPropExpr=function(e,t){return e.receiver.visitExpression(this,t),e},e.prototype.visitReadKeyExpr=function(e,t){return e.receiver.visitExpression(this,t),e.index.visitExpression(this,t),e},e.prototype.visitLiteralArrayExpr=function(e,t){return this.visitAllExpressions(e.entries,t),e},e.prototype.visitLiteralMapExpr=function(e,t){var n=this;return e.entries.forEach(function(e){return e[1].visitExpression(n,t)}),e},e.prototype.visitAllExpressions=function(e,t){var n=this;e.forEach(function(e){return e.visitExpression(n,t)})},e.prototype.visitDeclareVarStmt=function(e,t){return e.value.visitExpression(this,t),e},e.prototype.visitDeclareFunctionStmt=function(e,t){return e},e
 .prototype.visitExpressionStmt=function(e,t){return e.expr.visitExpression(this,t),e},e.prototype.visitReturnStmt=function(e,t){return e.value.visitExpression(this,t),e},e.prototype.visitDeclareClassStmt=function(e,t){return e},e.prototype.visitIfStmt=function(e,t){return e.condition.visitExpression(this,t),this.visitAllStatements(e.trueCase,t),this.visitAllStatements(e.falseCase,t),e},e.prototype.visitTryCatchStmt=function(e,t){return this.visitAllStatements(e.bodyStmts,t),this.visitAllStatements(e.catchStmts,t),e},e.prototype.visitThrowStmt=function(e,t){return e.error.visitExpression(this,t),e},e.prototype.visitCommentStmt=function(e,t){return e},e.prototype.visitAllStatements=function(e,t){var n=this;e.forEach(function(e){return e.visitStatement(n,t)})},e}(),ve=function(e){function t(t,n){e.call(this),this._varName=t,this._newValue=n}return m(t,e),t.prototype.visitReadVarExpr=function(e,t){return e.name==this._varName?this._newValue:e},t}(me),ye=function(e){function t(){e.apply(
 this,arguments),this.varNames=new Set}return m(t,e),t.prototype.visitReadVarExpr=function(e,t){return this.varNames.add(e.name),null},t}(ge)},function(e,t){e.exports=function(e){try{return!!e()}catch(e){return!0}}},function(e,t){e.exports=function(e){return"object"==typeof e?null!==e:"function"==typeof e}},function(e,t,n){var r=n(128)("wks"),i=n(88),o=n(11).Symbol,s="function"==typeof o,a=e.exports=function(e){return r[e]||(r[e]=s&&o[e]||(s?o:i)("Symbol."+e))};a.store=r},function(e,t,n){"use strict";function r(e,t,n){return void 0===t&&(t=null),void 0===n&&(n="src"),null==t?"asset:@angular/lib/"+e+"/index":"asset:@angular/lib/"+e+"/src/"+t}function i(e){return new u.a({name:e.name,moduleUrl:e.moduleUrl,reference:l.B.resolveIdentifier(e.name,e.moduleUrl,e.runtime)})}function o(e){return new u.b({identifier:e})}function s(e){return o(i(e))}function a(e,t){var n=l.B.resolveEnum(e.reference,t);return new u.a({name:e.name+"."+t,moduleUrl:e.moduleUrl,reference:n})}var c=n(0),u=n(16),l=n(1
 2);n.d(t,"b",function(){return m}),t.d=i,t.c=o,t.a=s,t.e=a;/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var p=r("core","linker/view"),d=r("core","linker/view_utils"),h=r("core","change_detection/change_detection"),f=r("core","animation/animation_style_util"),m=function(){function e(){}return e.ANALYZE_FOR_ENTRY_COMPONENTS={name:"ANALYZE_FOR_ENTRY_COMPONENTS",moduleUrl:r("core","metadata/di"),runtime:c.ANALYZE_FOR_ENTRY_COMPONENTS},e.ViewUtils={name:"ViewUtils",moduleUrl:r("core","linker/view_utils"),runtime:l.a.ViewUtils},e.AppView={name:"AppView",moduleUrl:p,runtime:l.b},e.DebugAppView={name:"DebugAppView",moduleUrl:p,runtime:l.c},e.ViewContainer={name:"ViewContainer",moduleUrl:r("core","linker/view_container"),runtime:l.d},e.ElementRef={name:"ElementRef",moduleUrl:r("core","linker/element_ref"),runtime:c.ElementRef},e.ViewContainerRef={name:"ViewContainerRef",moduleUrl:r("core","linker/view_container_ref"),runtime:c.ViewContainerRef},e.ChangeDetectorRef={name:"ChangeDetectorRef",moduleUrl:r("core","change_detection/change_detector_ref"),runtime:c.ChangeDetectorRef},e.RenderComponent
 Type={name:"RenderComponentType",moduleUrl:r("core","render/api"),runtime:c.RenderComponentType},e.QueryList={name:"QueryList",moduleUrl:r("core","linker/query_list"),runtime:c.QueryList},e.TemplateRef={name:"TemplateRef",moduleUrl:r("core","linker/template_ref"),runtime:c.TemplateRef},e.TemplateRef_={name:"TemplateRef_",moduleUrl:r("core","linker/template_ref"),runtime:l.e},e.CodegenComponentFactoryResolver={name:"CodegenComponentFactoryResolver",moduleUrl:r("core","linker/component_factory_resolver"),runtime:l.f},e.ComponentFactoryResolver={name:"ComponentFactoryResolver",moduleUrl:r("core","linker/component_factory_resolver"),runtime:c.ComponentFactoryResolver},e.ComponentFactory={name:"ComponentFactory",runtime:c.ComponentFactory,moduleUrl:r("core","linker/component_factory")},e.ComponentRef_={name:"ComponentRef_",runtime:l.g,moduleUrl:r("core","linker/component_factory")},e.ComponentRef={name:"ComponentRef",runtime:c.ComponentRef,moduleUrl:r("core","linker/component_factory")},
 e.NgModuleFactory={name:"NgModuleFactory",runtime:c.NgModuleFactory,moduleUrl:r("core","linker/ng_module_factory")},e.NgModuleInjector={name:"NgModuleInjector",runtime:l.h,moduleUrl:r("core","linker/ng_module_factory")},e.RegisterModuleFactoryFn={name:"registerModuleFactory",runtime:l.i,moduleUrl:r("core","linker/ng_module_factory_loader")},e.ValueUnwrapper={name:"ValueUnwrapper",moduleUrl:h,runtime:l.j},e.Injector={name:"Injector",moduleUrl:r("core","di/injector"),runtime:c.Injector},e.ViewEncapsulation={name:"ViewEncapsulation",moduleUrl:r("core","metadata/view"),runtime:c.ViewEncapsulation},e.ViewType={name:"ViewType",moduleUrl:r("core","linker/view_type"),runtime:l.k},e.ChangeDetectionStrategy={name:"ChangeDetectionStrategy",moduleUrl:h,runtime:c.ChangeDetectionStrategy},e.StaticNodeDebugInfo={name:"StaticNodeDebugInfo",moduleUrl:r("core","linker/debug_context"),runtime:l.l},e.DebugContext={name:"DebugContext",moduleUrl:r("core","linker/debug_context"),runtime:l.m},e.Renderer={n
 ame:"Renderer",moduleUrl:r("core","render/api"),runtime:c.Renderer},e.SimpleChange={name:"SimpleChange",moduleUrl:h,runtime:c.SimpleChange},e.UNINITIALIZED={name:"UNINITIALIZED",moduleUrl:h,runtime:l.n},e.ChangeDetectorStatus={name:"ChangeDetectorStatus",moduleUrl:h,runtime:l.o},e.checkBinding={name:"checkBinding",moduleUrl:d,runtime:l.a.checkBinding},e.devModeEqual={name:"devModeEqual",moduleUrl:h,runtime:l.p},e.inlineInterpolate={name:"inlineInterpolate",moduleUrl:d,runtime:l.a.inlineInterpolate},e.interpolate={name:"interpolate",moduleUrl:d,runtime:l.a.interpolate},e.castByValue={name:"castByValue",moduleUrl:d,runtime:l.a.castByValue},e.EMPTY_ARRAY={name:"EMPTY_ARRAY",moduleUrl:d,runtime:l.a.EMPTY_ARRAY},e.EMPTY_MAP={name:"EMPTY_MAP",moduleUrl:d,runtime:l.a.EMPTY_MAP},e.createRenderElement={name:"createRenderElement",moduleUrl:d,runtime:l.a.createRenderElement},e.selectOrCreateRenderHostElement={name:"selectOrCreateRenderHostElement",moduleUrl:d,runtime:l.a.selectOrCreateRenderHo
 stElement},e.pureProxies=[null,{name:"pureProxy1",moduleUrl:d,runtime:l.a.pureProxy1},{name:"pureProxy2",moduleUrl:d,runtime:l.a.pureProxy2},{name:"pureProxy3",moduleUrl:d,runtime:l.a.pureProxy3},{name:"pureProxy4",moduleUrl:d,runtime:l.a.pureProxy4},{name:"pureProxy5",moduleUrl:d,runtime:l.a.pureProxy5},{name:"pureProxy6",moduleUrl:d,runtime:l.a.pureProxy6},{name:"pureProxy7",moduleUrl:d,runtime:l.a.pureProxy7},{name:"pureProxy8",moduleUrl:d,runtime:l.a.pureProxy8},{name:"pureProxy9",moduleUrl:d,runtime:l.a.pureProxy9},{name:"pureProxy10",moduleUrl:d,runtime:l.a.pureProxy10}],e.SecurityContext={name:"SecurityContext",moduleUrl:r("core","security"),runtime:c.SecurityContext},e.AnimationKeyframe={name:"AnimationKeyframe",moduleUrl:r("core","animation/animation_keyframe"),runtime:l.q},e.AnimationStyles={name:"AnimationStyles",moduleUrl:r("core","animation/animation_styles"),runtime:l.r},e.NoOpAnimationPlayer={name:"NoOpAnimationPlayer",moduleUrl:r("core","animation/animation_player"),
 runtime:l.s},e.AnimationGroupPlayer={name:"AnimationGroupPlayer",moduleUrl:r("core","animation/animation_group_player"),runtime:l.t},e.AnimationSequencePlayer={name:"AnimationSequencePlayer",moduleUrl:r("core","animation/animation_sequence_player"),runtime:l.u},e.prepareFinalAnimationStyles={name:"prepareFinalAnimationStyles",moduleUrl:f,runtime:l.v},e.balanceAnimationKeyframes={name:"balanceAnimationKeyframes",moduleUrl:f,runtime:l.w},e.clearStyles={name:"clearStyles",moduleUrl:f,runtime:l.x},e.renderStyles={name:"renderStyles",moduleUrl:f,runtime:l.y},e.collectAndResolveStyles={name:"collectAndResolveStyles",moduleUrl:f,runtime:l.z},e.LOCALE_ID={name:"LOCALE_ID",moduleUrl:r("core","i18n/tokens"),runtime:c.LOCALE_ID},e.TRANSLATIONS_FORMAT={name:"TRANSLATIONS_FORMAT",moduleUrl:r("core","i18n/tokens"),runtime:c.TRANSLATIONS_FORMAT},e.setBindingDebugInfo={name:"setBindingDebugInfo",moduleUrl:d,runtime:l.a.setBindingDebugInfo},e.setBindingDebugInfoForChanges={name:"setBindingDebugInfoF
 orChanges",moduleUrl:d,runtime:l.a.setBindingDebugInfoForChanges},e.AnimationTransition={name:"AnimationTransition",moduleUrl:r("core","animation/animation_transition"),runtime:l.A},e.InlineArray={name:"InlineArray",moduleUrl:d,runtime:null},e.inlineArrays=[{name:"InlineArray2",moduleUrl:d,runtime:l.a.InlineArray2},{name:"InlineArray2",moduleUrl:d,runtime:l.a.InlineArray2},{name:"InlineArray4",moduleUrl:d,runtime:l.a.InlineArray4},{name:"InlineArray8",moduleUrl:d,runtime:l.a.InlineArray8},{name:"InlineArray16",moduleUrl:d,runtime:l.a.InlineArray16}],e.EMPTY_INLINE_ARRAY={name:"EMPTY_INLINE_ARRAY",moduleUrl:d,runtime:l.a.EMPTY_INLINE_ARRAY},e.InlineArrayDynamic={name:"InlineArrayDynamic",moduleUrl:d,runtime:l.a.InlineArrayDynamic},e.subscribeToRenderElement={name:"subscribeToRenderElement",moduleUrl:d,runtime:l.a.subscribeToRenderElement},e.createRenderComponentType={name:"createRenderComponentType",moduleUrl:d,runtime:l.a.createRenderComponentType},e.noop={name:"noop",moduleUrl:d,ru
 ntime:l.a.noop},e}()},function(e,t){var n=e.exports={version:"2.4.0"};"number"==typeof __e&&(__e=n)},function(e,t){var n=e.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=n)},function(e,t,n){"use strict";var r=n(0);n.d(t,"H",function(){return i}),n.d(t,"o",function(){return o}),n.d(t,"G",function(){return s}),n.d(t,"L",function(){return a}),n.d(t,"J",function(){return c}),n.d(t,"d",function(){return u}),n.d(t,"f",function(){return l}),n.d(t,"g",function(){return p}),n.d(t,"b",function(){return d}),n.d(t,"c",function(){return h}),n.d(t,"h",function(){return f}),n.d(t,"i",function(){return m}),n.d(t,"k",function(){return g}),n.d(t,"a",function(){return v}),n.d(t,"m",function(){return y}),n.d(t,"l",function(){return b}),n.d(t,"p",function(){return w}),n.d(t,"n",function(){return _}),n.d(t,"j",function(){return k}),n.d(t,"e",function(){return x}),n.d(t,"C",function(){return E
 }),n.d(t,"B",function(){return C}),n.d(t,"M",function(){return S}),n.d(t,"N",function(){return A}),n.d(t,"s",function(){return T}),n.d(t,"u",function(){return F}),n.d(t,"t",function(){return O}),n.d(t,"q",function(){return L}),n.d(t,"r",function(){return M}),n.d(t,"D",function(){return R}),n.d(t,"E",function(){return P}),n.d(t,"F",function(){return D}),n.d(t,"I",function(){return I}),n.d(t,"v",function(){return N}),n.d(t,"w",function(){return j}),n.d(t,"x",function(){return B}),n.d(t,"z",function(){return $}),n.d(t,"y",function(){return V}),n.d(t,"K",function(){return z}),n.d(t,"A",function(){return H});/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var i=r.__core_private__.isDefaultChangeDetectionStrategy,o=r.__core_private__.ChangeDetectorStatus,s=r.__core_private__.LifecycleHooks,a=r.__core_private__.LIFECYCLE_HOOKS_VALUES,c=r.__core_private__.ReflectorReader,u=r.__core_private__.ViewContainer,l=r.__core_private__.CodegenComponentFactoryResolver,p=r.__core_private__.ComponentRef_,d=r.__core_private__.AppView,h=r.__core_private__.DebugAppView,f=r.__core_private__.NgModuleInjector,m=r.__core_private__.registerModuleFactory,g=r.__core_private__.ViewType,v=r.__core_private__.view_utils,y=r.__core_private__.DebugContext,b=r.__core_private__.StaticNodeDebugInfo,w=r.__core_private__.devModeEqual,_=r.__core_private__.UNINITIALIZED,k=r.__core_private__.ValueUnwrapper,x=r.__core_private__.TemplateRef_,E=(r.__core_private__.RenderDebugInfo,r.__core_private__.Console),C=r.__core_private__.reflector,S=r.__core_private__.Reflector,A=r.__core_private__.ReflectionCapabilities,T=r.__core_private__.NoOpAnimationPlayer,F=(r.__core_private__.An
 imationPlayer,r.__core_private__.AnimationSequencePlayer),O=r.__core_private__.AnimationGroupPlayer,L=r.__core_private__.AnimationKeyframe,M=r.__core_private__.AnimationStyles,R=r.__core_private__.ANY_STATE,P=r.__core_private__.DEFAULT_STATE,D=r.__core_private__.EMPTY_STATE,I=r.__core_private__.FILL_STYLE_FLAG,N=r.__core_private__.prepareFinalAnimationStyles,j=r.__core_private__.balanceAnimationKeyframes,B=r.__core_private__.clearStyles,$=r.__core_private__.collectAndResolveStyles,V=r.__core_private__.renderStyles,z=(r.__core_private__.ViewMetadata,r.__core_private__.ComponentStillLoadingError),H=r.__core_private__.AnimationTransition},function(e,t,n){var r=n(4),i=n(308),o=n(58),s=Object.defineProperty;t.f=n(15)?Object.defineProperty:function(e,t,n){if(r(e),t=o(t,!0),r(n),i)try{return s(e,t,n)}catch(e){}if("get"in n||"set"in n)throw TypeError("Accessors not supported!");return"value"in n&&(e[t]=n.value),e}},function(e,t,n){"use strict";function r(){return o}function i(e){o||(o=e)}t.
 a=r,t.c=i,n.d(t,"b",function(){return s});/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var o=null,s=function(){function e(){this.resourceLoaderType=null}return Object.defineProperty(e.prototype,"attrToPropMap",{get:function(){return this._attrToPropMap},set:function(e){this._attrToPropMap=e},enumerable:!0,configurable:!0}),e}()},function(e,t,n){e.exports=!n(6)(function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a})},function(e,t,n){"use strict";function r(){throw new Error("unimplemented")}function i(e){var t=l.a.parse(e.selector)[0].getMatchingElementTemplate();return P.create({type:new O({reference:Object,name:e.type.name+"_Host",moduleUrl:e.type.moduleUrl,isHost:!0}),template:new R({encapsulation:a.ViewEncapsulation.None,template:t,templateUrl:"",styles:[],styleUrls:[],ngContentSelectors:[],animations:[]}),changeDetection:a.ChangeDetectionStrategy.Default,inputs:[],outputs:[],host:{},isComponent:!0,selector:"*",providers:[],viewProviders:[],queries:[],viewQueries:[]})}function o(e){return e||[]}function s(e){return"object"==typeof e&&null!
 ==e&&e.name&&e.filePath}var a=n(0),c=n(60),u=n(2),l=n(97),p=n(36);n.d(t,"p",function(){return f}),n.d(t,"g",function(){return g}),n.d(t,"q",function(){return v}),n.d(t,"m",function(){return b}),n.d(t,"k",function(){return w}),n.d(t,"l",function(){return _}),n.d(t,"j",function(){return k}),n.d(t,"h",function(){return x}),n.d(t,"i",function(){return E}),n.d(t,"a",function(){return C}),n.d(t,"c",function(){return S}),n.d(t,"d",function(){return A}),n.d(t,"u",function(){return T}),n.d(t,"b",function(){return F}),n.d(t,"e",function(){return O}),n.d(t,"x",function(){return L}),n.d(t,"n",function(){return M}),n.d(t,"o",function(){return R}),n.d(t,"r",function(){return P}),t.f=i,n.d(t,"v",function(){return D}),n.d(t,"s",function(){return I}),n.d(t,"t",function(){return N}),t.y=s,n.d(t,"w",function(){return j});/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var d=this&&this.__extends||function(e,t){function n(){this.constructor=e}for(var r in t)t.hasOwnProperty(r)&&(e[r]=t[r]);e.prototype=null===t?Object.create(t):(n.prototype=t.prototype,new n)},h=/^(?:(?:\[([^\]]+)\])|(?:\(([^\)]+)\)))|(\@[-\w]+)$/,f=(function(){function e(){}return Object.defineProperty(e.prototype,"identifier",{get:function(){return r()},enumerable:!0,configurable:!0}),e}(),function(){function e(e,t){void 0===e&&(e=null),void 0===t&&(t=null),this.name=e,this.definitions=t}return e}()),m=function(){function e(){}return e}(),g=function(e){function t(t,n){e.call(this),this.stateNameExpr=t,this.styles=n}return d(t,e),t}(m),v=function(e){function t(t,n){e.call(this),this.stateChangeExpr=t,this.steps=n}return d(t,e),t}(m),y=function(){function e(){}return e}(),b=function(e){function t(t){void 0===t&&(t=[]),e.call(this),this.steps=t}return d(t,e),t}(y),w=function(e){function t(t,n){void 0===n&&(n=null),e.call(this),this.offset=t,this.styles=n}return d(t,e),t}(y),_=functio
 n(e){function t(t,n){void 0===t&&(t=0),void 0===n&&(n=null),e.call(this),this.timings=t,this.styles=n}return d(t,e),t}(y),k=function(e){function t(t){void 0===t&&(t=null),e.call(this),this.steps=t}return d(t,e),t}(y),x=function(e){function t(t){void 0===t&&(t=null),e.call(this,t)}return d(t,e),t}(k),E=function(e){function t(t){void 0===t&&(t=null),e.call(this,t)}return d(t,e),t}(k),C=function(){function e(e){var t=void 0===e?{}:e,n=t.reference,r=t.name,i=t.moduleUrl,o=t.prefix,s=t.value;this.reference=n,this.name=r,this.prefix=o,this.moduleUrl=i,this.value=s}return Object.defineProperty(e.prototype,"identifier",{get:function(){return this},enumerable:!0,configurable:!0}),e}(),S=function(){function e(e){var t=void 0===e?{}:e,n=t.isAttribute,r=t.isSelf,i=t.isHost,o=t.isSkipSelf,s=t.isOptional,a=t.isValue,c=t.token,u=t.value;this.isAttribute=!!n,this.isSelf=!!r,this.isHost=!!i,this.isSkipSelf=!!o,this.isOptional=!!s,this.isValue=!!a,this.token=c,this.value=u}return e}(),A=function(){fu
 nction e(e){var t=e.token,n=e.useClass,r=e.useValue,i=e.useExisting,o=e.useFactory,s=e.deps,a=e.multi;this.token=t,this.useClass=n,this.useValue=r,this.useExisting=i,this.useFactory=o,this.deps=s||null,this.multi=!!a}return e}(),T=function(e){function t(t){var n=t.reference,r=t.name,i=t.moduleUrl,s=t.prefix,a=t.diDeps,c=t.value;e.call(this,{reference:n,name:r,prefix:s,moduleUrl:i,value:c}),this.diDeps=o(a)}return d(t,e),t}(C),F=function(){function e(e){var t=e.value,n=e.identifier,r=e.identifierIsInstance;this.value=t,this.identifier=n,this.identifierIsInstance=!!r}return Object.defineProperty(e.prototype,"reference",{get:function(){return n.i(u.b)(this.identifier)?this.identifier.reference:this.value},enumerable:!0,configurable:!0}),Object.defineProperty(e.prototype,"name",{get:function(){return n.i(u.b)(this.value)?n.i(p.a)(this.value):this.identifier.name},enumerable:!0,configurable:!0}),e}(),O=function(e){function t(t){var n=void 0===t?{}:t,r=n.reference,i=n.name,s=n.moduleUrl,a
 =n.prefix,c=n.isHost,u=n.value,l=n.diDeps,p=n.lifecycleHooks;e.call(this,{reference:r,name:i,moduleUrl:s,prefix:a,value:u}),this.isHost=!!c,this.diDeps=o(l),this.lifecycleHooks=o(p)}return d(t,e),t}(C),L=function(){function e(e){var t=void 0===e?{}:e,n=t.selectors,r=t.descendants,i=t.first,o=t.propertyName,s=t.read;this.selectors=n,this.descendants=!!r,this.first=!!i,this.propertyName=o,this.read=s}return e}(),M=function(){function e(e){var t=void 0===e?{}:e,n=t.moduleUrl,r=t.styles,i=t.styleUrls;this.moduleUrl=n,this.styles=o(r),this.styleUrls=o(i)}return e}(),R=function(){function e(e){var t=void 0===e?{}:e,n=t.encapsulation,r=t.template,i=t.templateUrl,s=t.styles,a=t.styleUrls,u=t.externalStylesheets,l=t.animations,p=t.ngContentSelectors,d=t.interpolation;if(this.encapsulation=n,this.template=r,this.templateUrl=i,this.styles=o(s),this.styleUrls=o(a),this.externalStylesheets=o(u),this.animations=l?c.a.flatten(l):[],this.ngContentSelectors=p||[],d&&2!=d.length)throw new Error("'int
 erpolation' should have a start and an end symbol.");this.interpolation=d}return e.prototype.toSummary=function(){return{isSummary:!0,animations:this.animations.map(function(e){return e.name}),ngContentSelectors:this.ngContentSelectors,encapsulation:this.encapsulation}},e}(),P=function(){function e(e){var t=void 0===e?{}:e,n=t.type,r=t.isComponent,i=t.selector,s=t.exportAs,a=t.changeDetection,c=t.inputs,u=t.outputs,l=t.hostListeners,p=t.hostProperties,d=t.hostAttributes,h=t.providers,f=t.viewProviders,m=t.queries,g=t.viewQueries,v=t.entryComponents,y=t.template;this.type=n,this.isComponent=r,this.selector=i,this.exportAs=s,this.changeDetection=a,this.inputs=c,this.outputs=u,this.hostListeners=l,this.hostProperties=p,this.hostAttributes=d,this.providers=o(h),this.viewProviders=o(f),this.queries=o(m),this.viewQueries=o(g),this.entryComponents=o(v),this.template=y}return e.create=function(t){var r=void 0===t?{}:t,i=r.type,o=r.isComponent,s=r.selector,a=r.exportAs,c=r.changeDetection,l=
 r.inputs,d=r.outputs,f=r.host,m=r.providers,g=r.viewProviders,v=r.queries,y=r.viewQueries,b=r.entryComponents,w=r.template,_={},k={},x={};n.i(u.b)(f)&&Object.keys(f).forEach(function(e){var t=f[e],r=e.match(h);null===r?x[e]=t:n.i(u.b)(r[1])?k[r[1]]=t:n.i(u.b)(r[2])&&(_[r[2]]=t)});var E={};n.i(u.b)(l)&&l.forEach(function(e){var t=n.i(p.b)(e,[e,e]);E[t[0]]=t[1]});var C={};return n.i(u.b)(d)&&d.forEach(function(e){var t=n.i(p.b)(e,[e,e]);C[t[0]]=t[1]}),new e({type:i,isComponent:!!o,selector:s,exportAs:a,changeDetection:c,inputs:E,outputs:C,hostListeners:_,hostProperties:k,hostAttributes:x,providers:m,viewProviders:g,queries:v,viewQueries:y,entryComponents:b,template:w})},Object.defineProperty(e.prototype,"identifier",{get:function(){return this.type},enumerable:!0,configurable:!0}),e.prototype.toSummary=function(){return{isSummary:!0,type:this.type,isComponent:this.isComponent,selector:this.selector,exportAs:this.exportAs,inputs:this.inputs,outputs:this.outputs,hostListeners:this.hostL
 isteners,hostProperties:this.hostProperties,hostAttributes:this.hostAttributes,providers:this.providers,viewProviders:this.viewProviders,queries:this.queries,entryComponents:this.entryComponents,changeDetection:this.changeDetection,template:this.template&&this.template.toSummary()}},e}(),D=function(){function e(e){var t=void 0===e?{}:e,n=t.type,r=t.name,i=t.pure;this.type=n,this.name=r,this.pure=!!i}return Object.defineProperty(e.prototype,"identifier",{get:function(){return this.type},enumerable:!0,configurable:!0}),e.prototype.toSummary=function(){return{isSummary:!0,type:this.type,name:this.name,pure:this.pure}},e}(),I=function(){function e(e){var t=void 0===e?{}:e,n=t.type,r=t.providers,i=t.declaredDirectives,s=t.exportedDirectives,a=t.declaredPipes,c=t.exportedPipes,u=t.entryComponents,l=t.bootstrapComponents,p=t.importedModules,d=t.exportedModules,h=t.schemas,f=t.transitiveModule,m=t.id;this.type=n,this.declaredDirectives=o(i),this.exportedDirectives=o(s),this.declaredPipes=o(
 a),this.exportedPipes=o(c),this.providers=o(r),this.entryComponents=o(u),this.bootstrapComponents=o(l),this.importedModules=o(p),this.exportedModules=o(d),this.schemas=o(h),this.id=m,this.transitiveModule=f}return Object.defineProperty(e.prototype,"identifier",{get:function(){return this.type},enumerable:!0,configurable:!0}),e.prototype.toSummary=function(){return{isSummary:!0,type:this.type,entryComponents:this.entryComponents,providers:this.providers,importedModules:this.importedModules,exportedModules:this.exportedModules,exportedDirectives:this.exportedDirectives,exportedPipes:this.exportedPipes,directiveLoaders:this.transitiveModule.directiveLoaders}},e.prototype.toInjectorSummary=function(){return{isSummary:!0,type:this.type,entryComponents:this.entryComponents,providers:this.providers,importedModules:this.importedModules,exportedModules:this.exportedModules}},e.prototype.toDirectiveSummary=function(){return{isSummary:!0,type:this.type,exportedDirectives:this.exportedDirective
 s,exportedPipes:this.exportedPipes,exportedModules:this.exportedModules,directiveLoaders:this.transitiveModule.directiveLoaders}},e}(),N=function(){function e(e,t,n,r,i,o){var s=this;this.modules=e,this.providers=t,this.entryComponents=n,this.directives=r,this.pipes=i,this.directiveLoaders=o,this.directivesSet=new Set,this.pipesSet=new Set,r.forEach(function(e){return s.directivesSet.add(e.reference)}),i.forEach(function(e){return s.pipesSet.add(e.reference)})}return e}(),j=function(){function e(e,t){var n=t.useClass,r=t.useValue,i=t.useExisting,o=t.useFactory,s=t.deps,a=t.multi;this.token=e,this.useClass=n,this.useValue=r,this.useExisting=i,this.useFactory=o,this.dependencies=s,this.multi=!!a}return e}()},function(e,t){var n={}.hasOwnProperty;e.exports=function(e,t){return n.call(e,t)}},function(e,t,n){var r=n(11),i=n(40),o=n(17),s=n(88)("src"),a="toString",c=Function[a],u=(""+c).split(a);n(10).inspectSource=function(e){return c.call(e)},(e.exports=function(e,t,n,a){var c="function
 "==typeof n;c&&(o(n,"name")||i(n,"name",t)),e[t]!==n&&(c&&(o(n,s)||i(n,s,e[t]?""+e[t]:u.join(String(t)))),e===r?e[t]=n:a?e[t]?e[t]=n:i(e,t,n):(delete e[t],i(e,t,n)))})(Function.prototype,a,function(){return"function"==typeof this&&this[s]||c.call(this)})},function(e,t,n){var r=n(1),i=n(6),o=n(39),s=/"/g,a=function(e,t,n,r){var i=String(o(e)),a="<"+t;return""!==n&&(a+=" "+n+'="'+String(r).replace(s,"&quot;")+'"'),a+">"+i+"</"+t+">"};e.exports=function(e,t){var n={};n[e]=t(a),r(r.P+r.F*i(function(){var t=""[e]('"');return t!==t.toLowerCase()||t.split('"').length>3}),"String",n)}},function(e,t,n){"use strict";(function(e){function r(e){return e.name||typeof e}function i(e){return null!=e}function o(e){return null==e}function s(e){return e instanceof Date&&!isNaN(e.valueOf())}function a(e){if("string"==typeof e)return e;if(null==e)return""+e;if(e.overriddenName)return e.overriddenName;if(e.name)return e.name;var t=e.toString(),n=t.indexOf("\n");return n===-1?t:t.substring(0,n)}function 
 c(e){return null!==e&&("function"==typeof e||"object"==typeof e)}function u(){if(!h)if(l.Symbol&&Symbol.iterator)h=Symbol.iterator;else for(var e=Object.getOwnPropertyNames(Map.prototype),t=0;t<e.length;++t){var n=e[t];"entries"!==n&&"size"!==n&&Map.prototype[n]===Map.prototype.entries&&(h=n)}return h}t.f=r,t.a=i,t.b=o,t.g=s,t.e=a,n.d(t,"h",function(){return d}),t.c=c,t.d=u;/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var l;l="undefined"==typeof window?"undefined"!=typeof WorkerGlobalScope&&self instanceof WorkerGlobalScope?self:e:window;var p=l;p.assert=function(e){};var d=(Object.getPrototypeOf({}),function(){function e(){}return e.parseIntAutoRadix=function(e){var t=parseInt(e);if(isNaN(t))throw new Error("Invalid integer literal when parsing "+e);return t},e.isNumeric=function(e){return!isNaN(e-parseFloat(e))},e}()),h=null}).call(t,n(43))},function(e,t,n){"use strict";function r(){throw new Error("unimplemented")}t.a=r,n.d(t,"b",function(){return o}),n.d(t,"c",function(){return s});/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var i=this&&this.__extends||function(e,t){function n(){this.constructor=e}for(var r in t)t.hasOwnProperty(r)&&(e[r]=t[r]);e.prototype=null===t?Object.create(t):(n.prototype=t.prototype,new n)},o=function(e){function t(t){var n=e.call(this,t);this._nativeError=n}return i(t,e),Object.defineProperty(t.prototype,"message",{get:function(){return this._nativeError.message},set:function(e){this._nativeError.message=e},enumerable:!0,configurable:!0}),Object.defineProperty(t.prototype,"name",{get:function(){return this._nativeError.name},enumerable:!0,configurable:!0}),Object.defineProperty(t.prototype,"stack",{get:function(){return this._nativeError.stack},set:function(e){this._nativeError.stack=e},enumerable:!0,configurable:!0}),t.prototype.toString=function(){return this._nativeError.toString()},t}(Error),s=function(e){function t(t,n){e.call(this,t+" caused by: "+(n instanceof Error?n.message:n)),this.originalError=n}return i(t,e),Object.defineProperty(t.prototype,"stack",{get:function(){
 return(this.originalError instanceof Error?this.originalError:this._nativeError).stack},enumerable:!0,configurable:!0}),t}(o)},function(e,t,n){var r=n(69),i=Math.min;e.exports=function(e){return e>0?i(r(e),9007199254740991):0}},function(e,t,n){"use strict";var r=n(2);n.d(t,"c",function(){return o}),n.d(t,"b",function(){return s}),n.d(t,"d",function(){return a}),n.d(t,"e",function(){return i}),n.d(t,"a",function(){return c});var i,o=function(){function e(e,t,n,r){this.file=e,this.offset=t,this.line=n,this.col=r}return e.prototype.toString=function(){return n.i(r.b)(this.offset)?this.file.url+"@"+this.line+":"+this.col:this.file.url},e}(),s=function(){function e(e,t){this.content=e,this.url=t}return e}(),a=function(){function e(e,t,n){void 0===n&&(n=null),this.start=e,this.end=t,this.details=n}return e.prototype.toString=function(){return this.start.file.content.substring(this.start.offset,this.end.offset)},e}();!function(e){e[e.WARNING=0]="WARNING",e[e.FATAL=1]="FATAL"}(i||(i={}));va
 r c=function(){function e(e,t,n){void 0===n&&(n=i.FATAL),this.span=e,this.msg=t,this.level=n}return e.prototype.toString=function(){var e=this.span.start.file.content,t=this.span.start.offset,i="",o="";if(n.i(r.b)(t)){t>e.length-1&&(t=e.length-1);for(var s=t,a=0,c=0;a<100&&t>0&&(t--,a++,"\n"!=e[t]||3!=++c););for(a=0,c=0;a<100&&s<e.length-1&&(s++,a++,"\n"!=e[s]||3!=++c););var u=e.substring(t,this.span.start.offset)+"[ERROR ->]"+e.substring(this.span.start.offset,s+1);i=' ("'+u+'")'}return this.span.details&&(o=", "+this.span.details),""+this.msg+i+": "+this.span.start+o},e}()},function(e,t,n){"use strict";var r=n(105),i=n(159),o=n(73),s=n(402),a=n(162),c=n(161),u=n(160);n.d(t,"b",function(){return r.a}),n.d(t,"c",function(){return r.b}),n.d(t,"d",function(){return r.c}),n.d(t,"e",function(){return r.f}),n.d(t,"i",function(){return r.e}),n.d(t,"j",function(){return r.d}),n.d(t,"k",function(){return i.b}),n.d(t,"h",function(){return i.a}),n.d(t,"g",function(){return o.b}),n.d(t,"f",fun
 ction(){return s.a}),n.d(t,"l",function(){return a.c}),n.d(t,"m",function(){return c.a}),n.d(t,"a",function(){return u.a})},function(e,t,n){"use strict";var r=n(0);n.d(t,"a",function(){return i});/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var i=new r.OpaqueToken("NgValueAccessor")},function(e,t,n){var r=n(6);e.exports=function(e,t){return!!e&&r(function(){t?e.call(null,function(){},1):e.call(null)})}},function(e,t,n){var r=n(84),i=n(39);e.exports=function(e){return r(i(e))}},function(e,t,n){var r=n(39);e.exports=function(e){return Object(r(e))}},function(e,t,n){"use strict";var r=n(91),i=n(625),o=n(620),s=function(){function e(e){this._isScalar=!1,e&&(this._subscribe=e)}return e.prototype.lift=function(t){var n=new e;return n.source=this,n.operator=t,n},e.prototype.subscribe=function(e,t,n){var r=this.operator,o=i.toSubscriber(e,t,n);if(r?r.call(o,this):o.add(this._subscribe(o)),o.syncErrorThrowable&&(o.syncErrorThrowable=!1,o.syncErrorThrown))throw o.syncErrorValue;return o},e.prototype.forEach=function(e,t){var n=this;if(t||(r.root.Rx&&r.root.Rx.config&&r.root.Rx.config.Promise?t=r.root.Rx.config.Promise:r.root.Promise&&(t=r.root.Promise)),!t)throw new Error("no Promise impl found");return new t(function(t,r){var i
 =n.subscribe(function(t){if(i)try{e(t)}catch(e){r(e),i.unsubscribe()}else e(t)},r,t)})},e.prototype._subscribe=function(e){return this.source.subscribe(e)},e.prototype[o.$$observable]=function(){return this},e.create=function(t){return new e(t)},e}();t.Observable=s},function(e,t,n){"use strict";/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-function r(e){return n.i(a.b)(e.value)?u.d(e.value):e.identifierIsInstance?u.e(e.identifier).instantiate([],u.k(e.identifier,[],[u.m.Const])):u.e(e.identifier)}function i(e){if(0===e.length)return u.e(n.i(c.d)(c.b.EMPTY_INLINE_ARRAY));var t=Math.log(e.length)/Math.log(2),r=Math.ceil(t),i=r<c.b.inlineArrays.length?c.b.inlineArrays[r]:c.b.InlineArrayDynamic,o=n.i(c.d)(i);return u.e(o).instantiate([u.d(e.length)].concat(e))}function o(e,t,r,i){i.fields.push(new u.n(r.name,null));var o=t<c.b.pureProxies.length?c.b.pureProxies[t]:null;if(!o)throw new Error("Unsupported number of argument for pure functions: "+t);i.ctorStmts.push(u.o.prop(r.name).set(u.e(n.i(c.d)(o)).callFn([e])).toStmt())}function s(e,t){var r=Object.keys(e.runtime).find(function(n){return e.runtime[n]===t});if(!r)throw new Error("Unknown enum value "+t+" in "+e.name);return u.e(n.i(c.e)(n.i(c.d)(e),r))}var a=n(2),c=n(9),u=n(5);t.c=r,t.d=i,t.a=o,t.b=s},function(e,t,n){"use strict";var r=n(219);n.d(t,"b",function(){return
  i}),n.d(t,"a",function(){return o});/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var i=function(){function e(e,t){this.start=e,this.end=t}return e.fromArray=function(t){return t?(n.i(r.a)("interpolation",t),new e(t[0],t[1])):o},e}(),o=new i("{{","}}")},function(e,t,n){"use strict";function r(e,t,n){void 0===n&&(n=null);var r=[],i=e.visit?function(t){return e.visit(t,n)||t.visit(e,n)}:function(t){return t.visit(e,n)};return t.forEach(function(e){var t=i(e);t&&r.push(t)}),r}n.d(t,"i",function(){return o}),n.d(t,"h",function(){return s}),n.d(t,"j",function(){return a}),n.d(t,"d",function(){return c}),n.d(t,"f",function(){return u}),n.d(t,"n",function(){return l}),n.d(t,"c",function(){return p}),n.d(t,"m",function(){return d}),n.d(t,"l",function(){return h}),n.d(t,"p",function(){return f}),n.d(t,"o",function(){return m}),n.d(t,"b",function(){return g}),n.d(t,"a",function(){return i}),n.d(t,"k",function(){return y}),n.d(t,"e",function(){return v}),t.g=r;/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var i,o=function(){function e(e,t,n){this.value=e,this.ngContentIndex=t,this.sourceSpan=n}return e.prototype.visit=function(e,t){return e.visitText(this,t)},e}(),s=function(){function e(e,t,n){this.value=e,this.ngContentIndex=t,this.sourceSpan=n}return e.prototype.visit=function(e,t){return e.visitBoundText(this,t)},e}(),a=function(){function e(e,t,n){this.name=e,this.value=t,this.sourceSpan=n}return e.prototype.visit=function(e,t){return e.visitAttr(this,t)},e}(),c=function(){function e(e,t,n,r,i,o,s){this.name=e,this.type=t,this.securityContext=n,this.needsRuntimeSecurityContext=r,this.value=i,this.unit=o,this.sourceSpan=s}return e.prototype.visit=function(e,t){return e.visitElementProperty(this,t)},Object.defineProperty(e.prototype,"isAnimation",{get:function(){return this.type===v.Animation},enumerable:!0,configurable:!0}),e}(),u=function(){function e(e,t,n,r,i){this.name=e,this.target=t,this.phase=n,this.handler=r,this.sourceSpan=i}return e.calcFullName=function(e,t,n){return t
 ?t+":"+e:n?"@"+e+"."+n:e},e.prototype.visit=function(e,t){return e.visitEvent(this,t)},Object.defineProperty(e.prototype,"fullName",{get:function(){return e.calcFullName(this.name,this.target,this.phase)},enumerable:!0,configurable:!0}),Object.defineProperty(e.prototype,"isAnimation",{get:function(){return!!this.phase},enumerable:!0,configurable:!0}),e}(),l=function(){function e(e,t,n){this.name=e,this.value=t,this.sourceSpan=n}return e.prototype.visit=function(e,t){return e.visitReference(this,t)},e}(),p=function(){function e(e,t,n){this.name=e,this.value=t,this.sourceSpan=n}return e.prototype.visit=function(e,t){return e.visitVariable(this,t)},e}(),d=function(){function e(e,t,n,r,i,o,s,a,c,u,l,p){this.name=e,this.attrs=t,this.inputs=n,this.outputs=r,this.references=i,this.directives=o,this.providers=s,this.hasViewContainer=a,this.children=c,this.ngContentIndex=u,this.sourceSpan=l,this.endSourceSpan=p}return e.prototype.visit=function(e,t){return e.visitElement(this,t)},e}(),h=func
 tion(){function e(e,t,n,r,i,o,s,a,c,u){this.attrs=e,this.outputs=t,this.references=n,this.variables=r,this.directives=i,this.providers=o,this.hasViewContainer=s,this.children=a,this.ngContentIndex=c,this.sourceSpan=u}return e.prototype.visit=function(e,t){return e.visitEmbeddedTemplate(this,t)},e}(),f=function(){function e(e,t,n,r){this.directiveName=e,this.templateName=t,this.value=n,this.sourceSpan=r}return e.prototype.visit=function(e,t){return e.visitDirectiveProperty(this,t)},e}(),m=function(){function e(e,t,n,r,i){this.directive=e,this.inputs=t,this.hostProperties=n,this.hostEvents=r,this.sourceSpan=i}return e.prototype.visit=function(e,t){return e.visitDirective(this,t)},e}(),g=function(){function e(e,t,n,r,i,o,s){this.token=e,this.multiProvider=t,this.eager=n,this.providers=r,this.providerType=i,this.lifecycleHooks=o,this.sourceSpan=s}return e.prototype.visit=function(e,t){return null},e}();!function(e){e[e.PublicService=0]="PublicService",e[e.PrivateService=1]="PrivateServi
 ce",e[e.Component=2]="Component",e[e.Directive=3]="Directive",e[e.Builtin=4]="Builtin"}(i||(i={}));var v,y=function(){function e(e,t,n){this.index=e,this.ngContentIndex=t,this.sourceSpan=n}return e.prototype.visit=function(e,t){return e.visitNgContent(this,t)},e}();!function(e){e[e.Property=0]="Property",e[e.Attribute=1]="Attribute",e[e.Class=2]="Class",e[e.Style=3]="Style",e[e.Animation=4]="Animation"}(v||(v={}))},function(e,t,n){"use strict";/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-function r(e){return null==e||"string"==typeof e&&0===e.length}function i(e){return n.i(d.a)(e)?e:u.toPromise.call(e)}function o(e,t){return t.map(function(t){return t(e)})}function s(e,t){return t.map(function(t){return t(e)})}function a(e){var t=e.reduce(function(e,t){return n.i(p.c)(t)?l.a.merge(e,t):e},{});return 0===Object.keys(t).length?null:t}var c=n(0),u=n(342),l=(n.n(u),n(272)),p=n(64),d=n(274);n.d(t,"b",function(){return h}),n.d(t,"c",function(){return f}),n.d(t,"a",function(){return m});var h=new c.OpaqueToken("NgValidators"),f=new c.OpaqueToken("NgAsyncValidators"),m=function(){function e(){}return e.required=function(e){return r(e.value)?{required:!0}:null},e.minLength=function(e){return function(t){if(r(t.value))return null;var n="string"==typeof t.value?t.value.length:0;return n<e?{minlength:{requiredLength:e,actualLength:n}}:null}},e.maxLength=function(e){return function(t){var n="string"==typeof t.value?t.value.length:0;return n>e?{maxlength:{requiredLength:e,actual
 Length:n}}:null}},e.pattern=function(t){if(!t)return e.nullValidator;var n,i;return"string"==typeof t?(i="^"+t+"$",n=new RegExp(i)):(i=t.toString(),n=t),function(e){if(r(e.value))return null;var t=e.value;return n.test(t)?null:{pattern:{requiredPattern:i,actualValue:t}}}},e.nullValidator=function(e){return null},e.compose=function(e){if(!e)return null;var t=e.filter(p.c);return 0==t.length?null:function(e){return a(o(e,t))}},e.composeAsync=function(e){if(!e)return null;var t=e.filter(p.c);return 0==t.length?null:function(e){var n=s(e,t).map(i);return Promise.all(n).then(a)}},e}()},function(e,t,n){"use strict";(function(e){function r(e){return null!=e}function i(e){return null==e}function o(e){if("string"==typeof e)return e;if(null==e)return""+e;if(e.overriddenName)return e.overriddenName;if(e.name)return e.name;var t=e.toString(),n=t.indexOf("\n");return n===-1?t:t.substring(0,n)}function s(e){return null!==e&&("function"==typeof e||"object"==typeof e)}function a(e,t,n){for(var r=t.
 split("."),i=e;r.length>1;){var o=r.shift();i=i.hasOwnProperty(o)&&null!=i[o]?i[o]:i[o]={}}void 0!==i&&null!==i||(i={}),i[r.shift()]=n}function c(){if(!p)if(u.Symbol&&Symbol.iterator)p=Symbol.iterator;else for(var e=Object.getOwnPropertyNames(Map.prototype),t=0;t<e.length;++t){var n=e[t];"entries"!==n&&"size"!==n&&Map.prototype[n]===Map.prototype.entries&&(p=n)}return p}n.d(t,"d",function(){return l}),t.a=r,t.b=i,t.g=o,t.e=s,t.c=a,t.f=c;/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var u;u="undefined"==typeof window?"undefined"!=typeof WorkerGlobalScope&&self instanceof WorkerGlobalScope?self:e:window;var l=u;l.assert=function(e){};var p=(Object.getPrototypeOf({}),function(){function e(){}return e.parseIntAutoRadix=function(e){var t=parseInt(e);if(isNaN(t))throw new Error("Invalid integer literal when parsing "+e);return t},e.isNumeric=function(e){return!isNaN(e-parseFloat(e))},e}(),null)}).call(t,n(43))},function(e,t,n){var r=n(1),i=n(10),o=n(6);e.exports=function(e,t){var n=(i.Object||{})[e]||Object[e],s={};s[e]=t(n),r(r.S+r.F*o(function(){n(1)}),"Object",s)}},function(e,t,n){"use strict";function r(e){return e.replace(p,function(){for(var e=[],t=0;t<arguments.length;t++)e[t-0]=arguments[t];return e[1].toUpperCase()})}function i(e,t){return s(e,":",t)}function o(e,t){return s(e,".",t)}function s(e,t,n){var r=e.indexOf(t);return r==-1?n:[e.slice(0,r).trim(),e.slice(r+1).trim()]}function a(e){return e.replace(/\W/g,"_")}function c(e,t,r){return Array.isArray(e
 )?t.visitArray(e,r):n.i(u.g)(e)?t.visitStringMap(e,r):n.i(u.a)(e)||n.i(u.h)(e)?t.visitPrimitive(e,r):t.visitOther(e,r)}var u=n(2);n.d(t,"f",function(){return l}),t.h=r,t.b=i,t.c=o,t.a=a,t.d=c,n.d(t,"g",function(){return d}),n.d(t,"e",function(){return h});/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var l="",p=/-+([a-z0-9])/g,d=function(){function e(){}return e.prototype.visitArray=function(e,t){var n=this;return e.map(function(e){return c(e,n,t)})},e.prototype.visitStringMap=function(e,t){var n=this,r={};return Object.keys(e).forEach(function(i){r[i]=c(e[i],n,t)}),r},e.prototype.visitPrimitive=function(e,t){return e},e.prototype.visitOther=function(e,t){return e},e}(),h=function(){function e(e,t){void 0===t&&(t=null),this.syncResult=e,this.asyncResult=t,t||(this.asyncResult=Promise.resolve(e))}return e}()},function(e,t,n){"use strict";var r=n(172);n.d(t,"a",function(){return o});/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var i=this&&this.__extends||function(e,t){function n(){this.constructor=e}for(var r in t)t.hasOwnProperty(r)&&(e[r]=t[r]);e.prototype=null===t?Object.create(t):(n.prototype=t.prototype,new n)},o=function(e){function t(){e.apply(this,arguments)}return i(t,e),Object.defineProperty(t.prototype,"formDirective",{get:function(){return null},enumerable:!0,configurable:!0}),Object.defineProperty(t.prototype,"path",{get:function(){return null},enumerable:!0,configurable:!0}),t}(r.a)},function(e,t,n){var r=n(66),i=n(84),o=n(28),s=n(22),a=n(477);e.exports=function(e,t){var n=1==e,c=2==e,u=3==e,l=4==e,p=6==e,d=5==e||p,h=t||a;return function(t,a,f){for(var m,g,v=o(t),y=i(v),b=r(a,f,3),w=s(y.length),_=0,k=n?h(t,w):c?h(t,0):void 0;w>_;_++)if((d||_ in y)&&(m=y[_],g=b(m,_,v),e))if(n)k[_]=g;else if(g)switch(e){case 3:return!0;case 5:return m;case 6:return _;case 2:k.push(m)}else if(l)return!1;return p?-1:u||l?l:k}}},function(e,t){e.exports=function(e){if(void 0==e)throw TypeError("Can't call method o
 n  "+e);return e}},function(e,t,n){var r=n(13),i=n(57);e.exports=n(15)?function(e,t,n){return r.f(e,t,i(1,n))}:function(e,t,n){return e[t]=n,e}},function(e,t,n){var r=n(327),i=n(1),o=n(128)("metadata"),s=o.store||(o.store=new(n(598))),a=function(e,t,n){var i=s.get(e);if(!i){if(!n)return;s.set(e,i=new r)}var o=i.get(t);if(!o){if(!n)return;i.set(t,o=new r)}return o},c=function(e,t,n){var r=a(t,n,!1);return void 0!==r&&r.has(e)},u=function(e,t,n){var r=a(t,n,!1);return void 0===r?void 0:r.get(e)},l=function(e,t,n,r){a(n,r,!0).set(e,t)},p=function(e,t){var n=a(e,t,!1),r=[];return n&&n.forEach(function(e,t){r.push(t)}),r},d=function(e){return void 0===e||"symbol"==typeof e?e:String(e)},h=function(e){i(i.S,"Reflect",e)};e.exports={store:s,map:a,has:c,get:u,set:l,keys:p,key:d,exp:h}},function(e,t,n){var r=n(17),i=n(28),o=n(206)("IE_PROTO"),s=Object.prototype;e.exports=Object.getPrototypeOf||function(e){return e=i(e),r(e,o)?e[o]:"function"==typeof e.constructor&&e instanceof e.constructor?e
 .constructor.prototype:e instanceof Object?s:null}},function(e,t){var n;n=function(){return this}();try{n=n||Function("return this")()||(0,eval)("this")}catch(e){"object"==typeof window&&(n=window)}e.exports=n},function(e,t,n){"use strict";var r=n(355),i=n(20);n.d(t,"a",function(){return s});/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var o=this&&this.__extends||function(e,t){function n(){this.constructor=e}for(var r in t)t.hasOwnProperty(r)&&(e[r]=t[r]);e.prototype=null===t?Object.create(t):(n.prototype=t.prototype,new n)},s=function(e){function t(t,r){e.call(this,"Invalid argument '"+r+"' for pipe '"+n.i(i.e)(t)+"'")}return o(t,e),t}(r.a)},function(e,t,n){"use strict";function r(e,t,n){void 0===n&&(n=null);var r=[],i=e.visit?function(t){return e.visit(t,n)||t.visit(e,n)}:function(t){return t.visit(e,n)};return t.forEach(function(e){var t=i(e);t&&r.push(t)}),r}n.d(t,"d",function(){return i}),n.d(t,"b",function(){return o}),n.d(t,"c",function(){return s}),n.d(t,"f",function(){return a}),n.d(t,"e",function(){return c}),n.d(t,"a",function(){return u}),t.g=r;/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var i=function(){function e(e,t){this.value=e,this.sourceSpan=t}return e.prototype.visit=function(e,t){return e.visitText(this,t)},e}(),o=function(){function e(e,t,n,r,i){this.switchValue=e,this.type=t,this.cases=n,this.sourceSpan=r,this.switchValueSourceSpan=i}return e.prototype.visit=function(e,t){return e.visitExpansion(this,t)},e}(),s=function(){function e(e,t,n,r,i){this.value=e,this.expression=t,this.sourceSpan=n,this.valueSourceSpan=r,this.expSourceSpan=i}return e.prototype.visit=function(e,t){return e.visitExpansionCase(this,t)},e}(),a=function(){function e(e,t,n,r){this.name=e,this.value=t,this.sourceSpan=n,this.valueSpan=r}return e.prototype.visit=function(e,t){return e.visitAttribute(this,t)},e}(),c=function(){function e(e,t,n,r,i,o){this.name=e,this.attrs=t,this.children=n,this.sourceSpan=r,this.startSourceSpan=i,this.endSourceSpan=o}return e.prototype.visit=function(e,t){return e.visitElement(this,t)},e}(),u=function(){function e(e,t){this.value=e,this.sourceSpan=t}retu
 rn e.prototype.visit=function(e,t){return e.visitComment(this,t)},e}()},function(e,t,n){"use strict";n.d(t,"a",function(){return r});/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var r=function(){function e(){}return e}()},function(e,t,n){"use strict";/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-function r(e,t){return t.path.concat([e])}function i(e,t){e||c(t,"Cannot find control with"),t.valueAccessor||c(t,"No value accessor for form control with"),e.validator=m.a.compose([e.validator,t.validator]),e.asyncValidator=m.a.composeAsync([e.asyncValidator,t.asyncValidator]),t.valueAccessor.writeValue(e.value),t.valueAccessor.registerOnChange(function(n){t.viewToModelUpdate(n),e.markAsDirty(),e.setValue(n,{emitModelToViewChange:!1})}),t.valueAccessor.registerOnTouched(function(){return e.markAsTouched()}),e.registerOnChange(function(e,n){t.valueAccessor.writeValue(e),n&&t.viewToModelUpdate(e)}),t.valueAccessor.setDisabledState&&e.registerOnDisabledChange(function(e){t.valueAccessor.setDisabledState(e)}),t._rawValidators.forEach(function(t){t.registerOnValidatorChange&&t.registerOnValidatorChange(function(){return e.updateValueAndValidity()})}),t._rawAsyncValidators.forEach(function(t){t.registerOnValidatorChange&&t.registerOnValidatorChange(function(){return e.updateValueAndValid
 ity()})})}function o(e,t){t.valueAccessor.registerOnChange(function(){return a(t)}),t.valueAccessor.registerOnTouched(function(){return a(t)}),t._rawValidators.forEach(function(e){e.registerOnValidatorChange&&e.registerOnValidatorChange(null)}),t._rawAsyncValidators.forEach(function(e){e.registerOnValidatorChange&&e.registerOnValidatorChange(null)}),e&&e._clearChangeFns()}function s(e,t){n.i(f.f)(e)&&c(t,"Cannot find control with"),e.validator=m.a.compose([e.validator,t.validator]),e.asyncValidator=m.a.composeAsync([e.asyncValidator,t.asyncValidator])}function a(e){return c(e,"There is no FormControl instance attached to form control element with")}function c(e,t){var n;throw n=e.path.length>1?"path: '"+e.path.join(" -> ")+"'":e.path[0]?"name: '"+e.path+"'":"unspecified name attribute",new Error(t+" "+n)}function u(e){return n.i(f.c)(e)?m.a.compose(e.map(y.a)):null}function l(e){return n.i(f.c)(e)?m.a.composeAsync(e.map(y.b)):null}function p(e,t){if(!e.hasOwnProperty("model"))return
 !1;var r=e.model;return!!r.isFirstChange()||!n.i(f.e)(t,r.currentValue)}function d(e){return E.some(function(t){return e.constructor===t})}function h(e,t){if(!t)return null;var n,r,i;return t.forEach(function(t){t.constructor===v.a?n=t:d(t)?(r&&c(e,"More than one built-in value accessor matches form control with"),r=t):(i&&c(e,"More than one custom value accessor matches form control with"),i=t)}),i?i:r?r:n?n:(c(e,"No valid value accessor for form control with"),null)}var f=n(64),m=n(33),g=n(111),v=n(112),y=n(420),b=n(175),w=n(78),_=n(176),k=n(115),x=n(116);t.a=r,t.d=i,t.h=o,t.e=s,t.b=u,t.c=l,t.g=p,t.f=h;var E=[g.a,_.a,b.a,k.a,x.a,w.a]},function(e,t,n){"use strict";n.d(t,"b",function(){return r}),n.d(t,"c",function(){return i}),n.d(t,"a",function(){return o}),n.d(t,"e",function(){return s}),n.d(t,"d",function(){return a});/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var r;!function(e){e[e.Get=0]="Get",e[e.Post=1]="Post",e[e.Put=2]="Put",e[e.Delete=3]="Delete",e[e.Options=4]="Options",e[e.Head=5]="Head",e[e.Patch=6]="Patch"}(r||(r={}));var i;!function(e){e[e.Unsent=0]="Unsent",e[e.Open=1]="Open",e[e.HeadersReceived=2]="HeadersReceived",e[e.Loading=3]="Loading",e[e.Done=4]="Done",e[e.Cancelled=5]="Cancelled"}(i||(i={}));var o;!function(e){e[e.Basic=0]="Basic",e[e.Cors=1]="Cors",e[e.Default=2]="Default",e[e.Error=3]="Error",e[e.Opaque=4]="Opaque"}(o||(o={}));var s;!function(e){e[e.NONE=0]="NONE",e[e.JSON=1]="JSON",e[e.FORM=2]="FORM",e[e.FORM_DATA=3]="FORM_DATA",e[e.TEXT=4]="TEXT",e[e.BLOB=5]="BLOB",e[e.ARRAY_BUFFER=6]="ARRAY_BUFFER"}(s||(s={}));var a;!function(e){e[e.Text=0]="Text",e[e.Json=1]="Json",e[e.ArrayBuffer=2]="ArrayBuffer",e[e.Blob=3]="Blob"}(a||(a={}))},function(e,t,n){var r=n(88)("meta"),i=n(7),o=n(17),s=n(13).f,a=0,c=Object.isExtensible||function(){return!0},u=!n(6)(function(){return c(Object.preventExtensions({}))}),l=function(e){s(e
 ,r,{value:{i:"O"+ ++a,w:{}}})},p=function(e,t){if(!i(e))return"symbol"==typeof e?e:("string"==typeof e?"S":"P")+e;if(!o(e,r)){if(!c(e))return"F";if(!t)return"E";l(e)}return e[r].i},d=function(e,t){if(!o(e,r)){if(!c(e))return!0;if(!t)return!1;l(e)}return e[r].w},h=function(e){return u&&f.NEED&&c(e)&&!o(e,r)&&l(e),e},f=e.exports={KEY:r,NEED:!1,fastKey:p,getWeak:d,onFreeze:h}},function(e,t,n){var r=n(126),i=n(57),o=n(27),s=n(58),a=n(17),c=n(308),u=Object.getOwnPropertyDescriptor;t.f=n(15)?u:function(e,t){if(e=o(e),t=s(t,!0),c)try{return u(e,t)}catch(e){}if(a(e,t))return i(!r.f.call(e,t),e[t])}},function(e,t,n){"use strict";function r(e){var t=b.a("changed"),r=[t.set(b.o.prop(A)).toDeclStmt(),b.o.prop(A).set(b.d(!1)).toStmt()],i=[];if(e.genChanges){var o=[];e.ngOnChanges&&o.push(b.o.prop(C).callMethod("ngOnChanges",[b.o.prop(S)]).toStmt()),e.compilerConfig.logBindingUpdate&&o.push(b.e(n.i(g.d)(g.b.setBindingDebugInfoForChanges)).callFn([M.prop("renderer"),P,b.o.prop(S)]).toStmt()),o.pus
 h(I),i.push(new b.g(t,o))}e.ngOnInit&&i.push(new b.g(M.prop("numberOfChecks").identical(new b.F(0)),[b.o.prop(C).callMethod("ngOnInit",[]).toStmt()])),e.ngDoCheck&&i.push(b.o.prop(C).callMethod("ngDoCheck",[]).toStmt()),i.length>0&&r.push(new b.g(b.u(O),i)),r.push(new b.i(t)),e.methods.push(new b.B("ngDoCheck",[new b.j(M.name,b.k(n.i(g.d)(g.b.AppView),[b.l])),new b.j(P.name,b.l),new b.j(O.name,b.E)],r,b.E))}function i(e,t){var r=n.i(p.a)(t),i=[b.o.prop(A).set(b.d(!0)).toStmt(),b.o.prop(C).prop(e).set(F).toStmt()];t.genChanges&&i.push(b.o.prop(S).key(b.d(e)).set(b.e(n.i(g.d)(g.b.SimpleChange)).instantiate([r.expression,F])).toStmt());var o=n.i(p.b)({currValExpr:F,forceUpdate:L,stmts:[]},r.expression,O,i);t.methods.push(new b.B("check_"+e,[new b.j(F.name,b.l),new b.j(O.name,b.E),new b.j(L.name,b.E)],o))}function o(e,t){var r=[],i=[new b.j(M.name,b.k(n.i(g.d)(g.b.AppView),[b.l])),new b.j(R.name,b.k(n.i(g.d)(g.b.AppView),[b.l])),new b.j(P.name,b.l),new b.j(O.name,b.E)];e.forEach(functio
 n(e,o){var s=n.i(p.a)(t),a=n.i(d.a)(t,null,b.o.prop(C),e.value,s.bindingId);if(a){var c;e.needsRuntimeSecurityContext&&(c=b.a("secCtx_"+i.length),i.push(new b.j(c.name,b.k(n.i(g.d)(g.b.SecurityContext)))));var u;if(e.isAnimation){var l=n.i(h.a)(M,R,e,b.o.prop(T).or(b.e(n.i(g.d)(g.b.noop))),P,a.currValExpr,s.expression),f=l.updateStmts,m=l.detachStmts;u=f,(v=t.detachStmts).push.apply(v,m)}else u=n.i(h.b)(M,e,P,a.currValExpr,t.compilerConfig.logBindingUpdate,c);r.push.apply(r,n.i(p.b)(a,s.expression,O,u));var v}}),t.methods.push(new b.B("checkHost",i,r))}function s(e,t){var r=b.a("result"),i=[r.set(b.d(!0)).toDeclStmt(b.E)];e.forEach(function(e,o){var s=n.i(d.b)(t,null,b.o.prop(C),e.handler,"sub_"+o),a=s.stmts;s.preventDefault&&a.push(r.set(s.preventDefault.and(r)).toStmt()),i.push(new b.g(D.equals(b.d(e.fullName)),a))}),i.push(new b.i(r)),t.methods.push(new b.B("handleEvent",[new b.j(D.name,b.G),new b.j(d.c.event.name,b.l)],i,b.E))}function a(e,t){var r=[new b.j(M.name,b.k(n.i(g.d)(g
 .b.AppView),[b.l])),new b.j(T,b.l)],i=[b.o.prop(T).set(b.a(T)).toStmt()];Object.keys(e.outputs).forEach(function(n,o){var s=e.outputs[n],a="emit"+o;r.push(new b.j(a,b.E));var c="subscription"+o;t.fields.push(new b.n(c,b.l)),i.push(new b.g(b.a(a),[b.o.prop(c).set(b.o.prop(C).prop(n).callMethod(b.z.SubscribeObservable,[b.a(T).callMethod(b.z.Bind,[M,b.d(s)])])).toStmt()])),t.destroyStmts.push(b.o.prop(c).and(b.o.prop(c).callMethod("unsubscribe",[])).toStmt())}),t.methods.push(new b.B("subscribe",r,i))}function c(e,t,n){var r=[],i=new x.a(t,v.a,n,[],r),o=e.type.moduleUrl?"in Directive "+e.type.name+" in "+e.type.moduleUrl:"in Directive "+e.type.name,s=new w.b("",o),a=new w.d(new w.c(s,null,null,null),new w.c(s,null,null,null)),c=i.createDirectiveHostPropertyAsts(e.toSummary(),a),u=i.createDirectiveHostEventAsts(e.toSummary(),a);return new B(c,u,r)}function u(e,t){var n=e.filter(function(e){return e.level===w.e.WARNING}),r=e.filter(function(e){return e.level===w.e.FATAL});if(n.length>0&&
 this._console.warn("Directive parse warnings:\n"+n.join("\n")),r.length>0)throw new Error("Directive parse errors:\n"+r.join("\n"))}var l=n(0),p=n(220),d=n(70),h=n(221),f=n(59),m=n(71),g=n(9),v=n(31),y=n(148),b=n(5),w=n(23),_=n(12),k=n(46),x=n(239);n.d(t,"a",function(){return N}),n.d(t,"b",function(){return $});/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var E=function(){function e(e,t){this.statements=e,this.dirWrapperClassVar=t}return e}(),C="context",S="_changes",A="_changed",T="_eventHandler",F=b.a("currValue"),O=b.a("throwOnChange"),L=b.a("forceUpdate"),M=b.a("view"),R=b.a("componentView"),P=b.a("el"),D=b.a("eventName"),I=b.o.prop(S).set(b.b([])).toStmt(),N=function(){function e(e,t,n,r){this.compilerConfig=e,this._exprParser=t,this._schemaRegistry=n,this._console=r}return e.dirWrapperClassName=function(e){return"Wrapper_"+e.name},e.prototype.compile=function(e){var t=c(e,this._exprParser,this._schemaRegistry);u(t.errors,this._console);var n=new j(this.compilerConfig,e);Object.keys(e.inputs).forEach(function(e){i(e,n)}),r(n),o(t.hostProps,n),s(t.hostListeners,n),a(e,n);var l=n.build();return new E([l],l.name)},e.decorators=[{type:l.Injectable}],e.ctorParameters=[{type:f.a},{type:m.a},{type:k.a},{type:_.C}],e}(),j=function(){function e(e,t){this.compilerConfig=e,this.dirMeta=t,this.fields=[],this.getters=[],this.methods=[],this.
 ctorStmts=[],this.detachStmts=[],this.destroyStmts=[];var n=t.type.lifecycleHooks;this.genChanges=n.indexOf(_.G.OnChanges)!==-1||this.compilerConfig.logBindingUpdate,this.ngOnChanges=n.indexOf(_.G.OnChanges)!==-1,this.ngOnInit=n.indexOf(_.G.OnInit)!==-1,this.ngDoCheck=n.indexOf(_.G.DoCheck)!==-1,this.ngOnDestroy=n.indexOf(_.G.OnDestroy)!==-1,this.ngOnDestroy&&this.destroyStmts.push(b.o.prop(C).callMethod("ngOnDestroy",[]).toStmt())}return e.prototype.build=function(){for(var e=[],t=0;t<this.dirMeta.type.diDeps.length;t++)e.push("p"+t);var r=[new b.B("ngOnDetach",[new b.j(M.name,b.k(n.i(g.d)(g.b.AppView),[b.l])),new b.j(R.name,b.k(n.i(g.d)(g.b.AppView),[b.l])),new b.j(P.name,b.l)],this.detachStmts),new b.B("ngOnDestroy",[],this.destroyStmts)],i=[new b.n(T,b.D,[b.p.Private]),new b.n(C,b.k(this.dirMeta.type)),new b.n(A,b.E,[b.p.Private])],o=[b.o.prop(A).set(b.d(!1)).toStmt()];return this.genChanges&&(i.push(new b.n(S,new b.x(b.l),[b.p.Private])),o.push(I)),o.push(b.o.prop(C).set(b.e(th
 is.dirMeta.type).instantiate(e.map(function(e){return b.a(e)}))).toStmt()),n.i(y.a)({name:N.dirWrapperClassName(this.dirMeta.type),ctorParams:e.map(function(e){return new b.j(e,b.l)}),builders:[{fields:i,ctorStmts:o,methods:r},this]})},e}(),B=function(){function e(e,t,n){this.hostProps=e,this.hostListeners=t,this.errors=n}return e}(),$=function(){function e(){}return e.create=function(e,t){return b.e(e).instantiate(t,b.k(e))},e.context=function(e){return e.prop(C)},e.ngDoCheck=function(e,t,n,r){return e.callMethod("ngDoCheck",[t,n,r])},e.checkHost=function(e,t,n,r,i,o,s){return e.length?[t.callMethod("checkHost",[n,r,i,o].concat(s)).toStmt()]:[]},e.ngOnDetach=function(e,t,n,r,i){return e.some(function(e){return e.isAnimation})?[t.callMethod("ngOnDetach",[n,r,i]).toStmt()]:[]},e.ngOnDestroy=function(e,t){return e.type.lifecycleHooks.indexOf(_.G.OnDestroy)!==-1||Object.keys(e.outputs).length>0?[t.callMethod("ngOnDestroy",[]).toStmt()]:[]},e.subscribe=function(e,t,n,r,i,o){var s=!1,a=[
 ];return Object.keys(e.outputs).forEach(function(t){var r=e.outputs[t],i=n.indexOf(r)>-1;s=s||i,a.push(b.d(i))}),t.forEach(function(e){e.isAnimation&&n.length>0&&(s=!0)}),s?[r.callMethod("subscribe",[i,o].concat(a)).toStmt()]:[]},e.handleEvent=function(e,t,n,r){return t.callMethod("handleEvent",[n,r])},e}()},function(e,t,n){"use strict";function r(e){if(":"!=e[0])return[null,e];var t=e.indexOf(":",1);if(t==-1)throw new Error('Unsupported format "'+e+'" expecting ":namespace:name"');return[e.slice(1,t),e.slice(t+1)]}function i(e){return null===e?null:r(e)[0]}function o(e,t){return e?":"+e+":"+t:t}n.d(t,"b",function(){return s}),t.e=r,t.c=i,t.d=o,n.d(t,"a",function(){return a});/**
- * @license
- * Copyright Google Inc. All Rights Reserved.
- *
- * Use of this source code is governed by an MIT-style license that can be
- * found in the LICENSE file at https://angular.io/license
- */
-var s;!function(e){e[e.RAW_TEXT=0]="RAW_TEXT",e[e.ESCAPABLE_RAW_TEXT=1]="ESCAPABLE_RAW_TEXT",e[e.PARSABLE_DATA=2]="PARSABLE_DATA"}(s||(s={}));var a={Aacute:"Á",aacute:"á",Acirc:"Â",acirc:"â",acute:"´",AElig:"Æ",aelig:"æ",Agrave:"À",agrave:"à",alefsym:"ℵ",Alpha:"Α",alpha:"α",amp:"&",and:"∧",ang:"∠",apos:"'",Aring:"Å",aring:"å",asymp:"≈",Atilde:"Ã",atilde:"ã",Auml:"Ä",auml:"ä",bdquo:"„",Beta:"Β",beta:"β",brvbar:"¦",bull:"•",cap:"∩",Ccedil:"Ç",ccedil:"ç",cedil:"¸",cent:"¢",Chi:"Χ",chi:"χ",circ:"ˆ",clubs:"♣",cong:"≅",copy:"©",crarr:"↵",cup:"∪",curren:"¤",dagger:"†",Dagger:"‡",darr:"↓",dArr:"⇓",deg:"°",Delta:"Δ",delta:"δ",diams:"♦",divide:"÷",Eacute:"É",eacute:"é",Ecirc:"Ê",ecirc:"ê",Egrave:"È",egrave:"è",empty:"∅",emsp:" ",ensp:" ",Epsilon:"Ε",epsilon:"

<TRUNCATED>

[47/49] knox git commit: KNOX-998 - Merge from trunk 0.14.0 code

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientService.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientService.java
deleted file mode 100644
index 0bfc39a..0000000
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientService.java
+++ /dev/null
@@ -1,263 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.config.remote;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.service.config.remote.config.RemoteConfigurationRegistriesAccessor;
-import org.apache.hadoop.gateway.services.ServiceLifecycleException;
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClient;
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
-import org.apache.hadoop.gateway.services.security.AliasService;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.nio.file.attribute.PosixFilePermission;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.function.Function;
-
-/**
- * An implementation of RemoteConfigurationRegistryClientService intended to be used for testing without having to
- * connect to an actual remote configuration registry.
- */
-public class LocalFileSystemRemoteConfigurationRegistryClientService implements RemoteConfigurationRegistryClientService {
-
-    public static final String TYPE = "LocalFileSystem";
-
-    private Map<String, RemoteConfigurationRegistryClient> clients = new HashMap<>();
-
-
-    @Override
-    public void setAliasService(AliasService aliasService) {
-        // N/A
-    }
-
-    @Override
-    public RemoteConfigurationRegistryClient get(String name) {
-        return clients.get(name);
-    }
-
-    @Override
-    public void init(GatewayConfig config, Map<String, String> options) throws ServiceLifecycleException {
-        List<RemoteConfigurationRegistryConfig> registryConfigurations =
-                                        RemoteConfigurationRegistriesAccessor.getRemoteRegistryConfigurations(config);
-        for (RemoteConfigurationRegistryConfig registryConfig : registryConfigurations) {
-            if (TYPE.equalsIgnoreCase(registryConfig.getRegistryType())) {
-                RemoteConfigurationRegistryClient registryClient = createClient(registryConfig);
-                clients.put(registryConfig.getName(), registryClient);
-            }
-        }
-    }
-
-    @Override
-    public void start() throws ServiceLifecycleException {
-
-    }
-
-    @Override
-    public void stop() throws ServiceLifecycleException {
-
-    }
-
-
-    private RemoteConfigurationRegistryClient createClient(RemoteConfigurationRegistryConfig config) {
-        String rootDir = config.getConnectionString();
-
-        return new RemoteConfigurationRegistryClient() {
-            private File root = new File(rootDir);
-
-            @Override
-            public String getAddress() {
-                return root.getAbsolutePath();
-            }
-
-            @Override
-            public boolean entryExists(String path) {
-                return (new File(root, path)).exists();
-            }
-
-            @Override
-            public List<EntryACL> getACL(String path) {
-                List<EntryACL> result = new ArrayList<>();
-
-                Path resolved = Paths.get(rootDir, path);
-                try {
-                    Map<String, List<String>> collected = new HashMap<>();
-
-                    Set<PosixFilePermission> perms = Files.getPosixFilePermissions(resolved);
-                    for (PosixFilePermission perm : perms) {
-                        String[] parsed = perm.toString().split("_");
-                        collected.computeIfAbsent(parsed[0].toLowerCase(), s -> new ArrayList<>()).add(parsed[1].toLowerCase());
-                    }
-
-                    for (String id : collected.keySet()) {
-                        EntryACL acl = new EntryACL() {
-                            @Override
-                            public String getId() {
-                                return id;
-                            }
-
-                            @Override
-                            public String getType() {
-                                return "fs";
-                            }
-
-                            @Override
-                            public Object getPermissions() {
-                                return collected.get(id).toString();
-                            }
-
-                            @Override
-                            public boolean canRead() {
-                                return true;
-                            }
-
-                            @Override
-                            public boolean canWrite() {
-                                return true;
-                            }
-                        };
-                        result.add(acl);
-                    }
-                } catch (IOException e) {
-                    e.printStackTrace();
-                }
-                return result;
-            }
-
-            @Override
-            public List<String> listChildEntries(String path) {
-                List<String> result = new ArrayList<>();
-
-                File entry = new File(root, path);
-                if (entry.exists() && entry.isDirectory()) {
-                    String[] list = entry.list();
-                    if (list != null) {
-                        result.addAll(Arrays.asList(entry.list()));
-                    }
-                }
-
-                return result;
-            }
-
-            @Override
-            public String getEntryData(String path) {
-                return getEntryData(path, "UTF-8");
-            }
-
-            @Override
-            public String getEntryData(String path, String encoding) {
-                String result = null;
-                File entry = new File(root, path);
-                if (entry.isFile() && entry.exists()) {
-                    try {
-                        result = FileUtils.readFileToString(entry, encoding);
-                    } catch (IOException e) {
-                        e.printStackTrace();
-                    }
-                }
-                return result;
-            }
-
-            @Override
-            public void createEntry(String path) {
-                createEntry(path, "");
-            }
-
-            @Override
-            public void createEntry(String path, String data) {
-                createEntry(path, data, "UTF-8");
-            }
-
-            @Override
-            public void createEntry(String path, String data, String encoding) {
-                File entry = new File(root, path);
-                if (!entry.exists()) {
-                    if (data != null) {
-                        try {
-                            FileUtils.writeStringToFile(entry, data, encoding);
-                        } catch (IOException e) {
-                            e.printStackTrace();
-                        }
-                    }
-                }
-            }
-
-            @Override
-            public int setEntryData(String path, String data) {
-                setEntryData(path, data, "UTF-8");
-                return 0;
-            }
-
-            @Override
-            public int setEntryData(String path, String data, String encoding) {
-                File entry = new File(root, path);
-                if (entry.exists()) {
-                    try {
-                        FileUtils.writeStringToFile(entry, data, encoding);
-                    } catch (IOException e) {
-                        e.printStackTrace();
-                    }
-                }
-                return 0;
-            }
-
-            @Override
-            public boolean isAuthenticationConfigured() {
-                return false;
-            }
-
-            @Override
-            public void setACL(String path, List<EntryACL> acls) {
-                //
-            }
-
-            @Override
-            public void deleteEntry(String path) {
-                File entry = new File(root, path);
-                if (entry.exists()) {
-                    entry.delete();
-                }
-            }
-
-            @Override
-            public void addChildEntryListener(String path, ChildEntryListener listener) throws Exception {
-                // N/A
-            }
-
-            @Override
-            public void addEntryListener(String path, EntryListener listener) throws Exception {
-                // N/A
-            }
-
-            @Override
-            public void removeEntryListener(String path) throws Exception {
-                // N/A
-            }
-        };
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientServiceProvider.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientServiceProvider.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientServiceProvider.java
deleted file mode 100644
index 42e79c1..0000000
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientServiceProvider.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.config.remote;
-
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
-
-public class LocalFileSystemRemoteConfigurationRegistryClientServiceProvider implements RemoteConfigurationRegistryClientServiceProvider {
-
-    @Override
-    public String getType() {
-        return LocalFileSystemRemoteConfigurationRegistryClientService.TYPE;
-    }
-
-    @Override
-    public RemoteConfigurationRegistryClientService newInstance() {
-        return new LocalFileSystemRemoteConfigurationRegistryClientService();
-    }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/monitor/ZooKeeperConfigurationMonitorTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/monitor/ZooKeeperConfigurationMonitorTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/monitor/ZooKeeperConfigurationMonitorTest.java
deleted file mode 100644
index 1c4ed6e..0000000
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/monitor/ZooKeeperConfigurationMonitorTest.java
+++ /dev/null
@@ -1,355 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.monitor;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.CuratorFrameworkFactory;
-import org.apache.curator.retry.ExponentialBackoffRetry;
-import org.apache.curator.test.InstanceSpec;
-import org.apache.curator.test.TestingCluster;
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.service.config.remote.zk.ZooKeeperClientService;
-import org.apache.hadoop.gateway.service.config.remote.zk.ZooKeeperClientServiceProvider;
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.zookeeper.CreateMode;
-import org.apache.zookeeper.ZooDefs;
-import org.apache.zookeeper.data.ACL;
-import org.easymock.EasyMock;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-/**
- * Test the ZooKeeperConfigMonitor WITHOUT SASL configured or znode ACLs applied.
- * The implementation of the monitor is the same regardless, since the ACLs are defined by the ZooKeeper znode
- * creator, and the SASL config is purely JAAS (and external to the implementation).
- */
-public class ZooKeeperConfigurationMonitorTest {
-
-    private static final String PATH_KNOX = "/knox";
-    private static final String PATH_KNOX_CONFIG = PATH_KNOX + "/config";
-    private static final String PATH_KNOX_PROVIDERS = PATH_KNOX_CONFIG + "/shared-providers";
-    private static final String PATH_KNOX_DESCRIPTORS = PATH_KNOX_CONFIG + "/descriptors";
-
-    private static File testTmp;
-    private static File providersDir;
-    private static File descriptorsDir;
-
-    private static TestingCluster zkCluster;
-
-    private static CuratorFramework client;
-
-    private GatewayConfig gc;
-
-
-    @BeforeClass
-    public static void setupSuite() throws Exception {
-        testTmp = TestUtils.createTempDir(ZooKeeperConfigurationMonitorTest.class.getName());
-        File confDir   = TestUtils.createTempDir(testTmp + "/conf");
-        providersDir   = TestUtils.createTempDir(confDir + "/shared-providers");
-        descriptorsDir = TestUtils.createTempDir(confDir + "/descriptors");
-
-        configureAndStartZKCluster();
-    }
-
-    private static void configureAndStartZKCluster() throws Exception {
-        // Configure security for the ZK cluster instances
-        Map<String, Object> customInstanceSpecProps = new HashMap<>();
-        customInstanceSpecProps.put("authProvider.1", "org.apache.zookeeper.server.auth.SASLAuthenticationProvider");
-        customInstanceSpecProps.put("requireClientAuthScheme", "sasl");
-
-        // Define the test cluster
-        List<InstanceSpec> instanceSpecs = new ArrayList<>();
-        for (int i = 0 ; i < 3 ; i++) {
-            InstanceSpec is = new InstanceSpec(null, -1, -1, -1, false, (i+1), -1, -1, customInstanceSpecProps);
-            instanceSpecs.add(is);
-        }
-        zkCluster = new TestingCluster(instanceSpecs);
-
-        // Start the cluster
-        zkCluster.start();
-
-        // Create the client for the test cluster
-        client = CuratorFrameworkFactory.builder()
-                                        .connectString(zkCluster.getConnectString())
-                                        .retryPolicy(new ExponentialBackoffRetry(100, 3))
-                                        .build();
-        assertNotNull(client);
-        client.start();
-
-        // Create the knox config paths with an ACL for the sasl user configured for the client
-        List<ACL> acls = new ArrayList<>();
-        acls.add(new ACL(ZooDefs.Perms.ALL, ZooDefs.Ids.ANYONE_ID_UNSAFE));
-
-        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_DESCRIPTORS);
-        assertNotNull("Failed to create node:" + PATH_KNOX_DESCRIPTORS,
-                client.checkExists().forPath(PATH_KNOX_DESCRIPTORS));
-        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_PROVIDERS);
-        assertNotNull("Failed to create node:" + PATH_KNOX_PROVIDERS,
-                client.checkExists().forPath(PATH_KNOX_PROVIDERS));
-    }
-
-    @AfterClass
-    public static void tearDownSuite() throws Exception {
-        // Clean up the ZK nodes, and close the client
-        if (client != null) {
-            client.delete().deletingChildrenIfNeeded().forPath(PATH_KNOX);
-            client.close();
-        }
-
-        // Shutdown the ZK cluster
-        zkCluster.close();
-
-        // Delete the working dir
-        testTmp.delete();
-    }
-
-    @Test
-    public void testZooKeeperConfigMonitor() throws Exception {
-        String configMonitorName = "remoteConfigMonitorClient";
-
-        // Setup the base GatewayConfig mock
-        gc = EasyMock.createNiceMock(GatewayConfig.class);
-        EasyMock.expect(gc.getGatewayProvidersConfigDir()).andReturn(providersDir.getAbsolutePath()).anyTimes();
-        EasyMock.expect(gc.getGatewayDescriptorsDir()).andReturn(descriptorsDir.getAbsolutePath()).anyTimes();
-        EasyMock.expect(gc.getRemoteRegistryConfigurationNames())
-                .andReturn(Collections.singletonList(configMonitorName))
-                .anyTimes();
-        final String registryConfig =
-                                GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
-                                GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString();
-        EasyMock.expect(gc.getRemoteRegistryConfiguration(configMonitorName))
-                .andReturn(registryConfig)
-                .anyTimes();
-        EasyMock.expect(gc.getRemoteConfigurationMonitorClientName()).andReturn(configMonitorName).anyTimes();
-        EasyMock.replay(gc);
-
-        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
-        EasyMock.replay(aliasService);
-
-        RemoteConfigurationRegistryClientService clientService = (new ZooKeeperClientServiceProvider()).newInstance();
-        clientService.setAliasService(aliasService);
-        clientService.init(gc, Collections.emptyMap());
-        clientService.start();
-
-        DefaultRemoteConfigurationMonitor cm = new DefaultRemoteConfigurationMonitor(gc, clientService);
-
-        try {
-            cm.start();
-        } catch (Exception e) {
-            fail("Failed to start monitor: " + e.getMessage());
-        }
-
-        try {
-            final String pc_one_znode = getProviderPath("providers-config1.xml");
-            final File pc_one         = new File(providersDir, "providers-config1.xml");
-            final String pc_two_znode = getProviderPath("providers-config2.xml");
-            final File pc_two         = new File(providersDir, "providers-config2.xml");
-
-            client.create().withMode(CreateMode.PERSISTENT).forPath(pc_one_znode, TEST_PROVIDERS_CONFIG_1.getBytes());
-            Thread.sleep(100);
-            assertTrue(pc_one.exists());
-            assertEquals(TEST_PROVIDERS_CONFIG_1, FileUtils.readFileToString(pc_one));
-
-            client.create().withMode(CreateMode.PERSISTENT).forPath(getProviderPath("providers-config2.xml"), TEST_PROVIDERS_CONFIG_2.getBytes());
-            Thread.sleep(100);
-            assertTrue(pc_two.exists());
-            assertEquals(TEST_PROVIDERS_CONFIG_2, FileUtils.readFileToString(pc_two));
-
-            client.setData().forPath(pc_two_znode, TEST_PROVIDERS_CONFIG_1.getBytes());
-            Thread.sleep(100);
-            assertTrue(pc_two.exists());
-            assertEquals(TEST_PROVIDERS_CONFIG_1, FileUtils.readFileToString(pc_two));
-
-            client.delete().forPath(pc_two_znode);
-            Thread.sleep(100);
-            assertFalse(pc_two.exists());
-
-            client.delete().forPath(pc_one_znode);
-            Thread.sleep(100);
-            assertFalse(pc_one.exists());
-
-            final String desc_one_znode   = getDescriptorPath("test1.json");
-            final String desc_two_znode   = getDescriptorPath("test2.json");
-            final String desc_three_znode = getDescriptorPath("test3.json");
-            final File desc_one           = new File(descriptorsDir, "test1.json");
-            final File desc_two           = new File(descriptorsDir, "test2.json");
-            final File desc_three         = new File(descriptorsDir, "test3.json");
-
-            client.create().withMode(CreateMode.PERSISTENT).forPath(desc_one_znode, TEST_DESCRIPTOR_1.getBytes());
-            Thread.sleep(100);
-            assertTrue(desc_one.exists());
-            assertEquals(TEST_DESCRIPTOR_1, FileUtils.readFileToString(desc_one));
-
-            client.create().withMode(CreateMode.PERSISTENT).forPath(desc_two_znode, TEST_DESCRIPTOR_1.getBytes());
-            Thread.sleep(100);
-            assertTrue(desc_two.exists());
-            assertEquals(TEST_DESCRIPTOR_1, FileUtils.readFileToString(desc_two));
-
-            client.setData().forPath(desc_two_znode, TEST_DESCRIPTOR_2.getBytes());
-            Thread.sleep(100);
-            assertTrue(desc_two.exists());
-            assertEquals(TEST_DESCRIPTOR_2, FileUtils.readFileToString(desc_two));
-
-            client.create().withMode(CreateMode.PERSISTENT).forPath(desc_three_znode, TEST_DESCRIPTOR_1.getBytes());
-            Thread.sleep(100);
-            assertTrue(desc_three.exists());
-            assertEquals(TEST_DESCRIPTOR_1, FileUtils.readFileToString(desc_three));
-
-            client.delete().forPath(desc_two_znode);
-            Thread.sleep(100);
-            assertFalse("Expected test2.json to have been deleted.", desc_two.exists());
-
-            client.delete().forPath(desc_three_znode);
-            Thread.sleep(100);
-            assertFalse(desc_three.exists());
-
-            client.delete().forPath(desc_one_znode);
-            Thread.sleep(100);
-            assertFalse(desc_one.exists());
-        } finally {
-            cm.stop();
-        }
-    }
-
-    private static String getDescriptorPath(String descriptorName) {
-        return PATH_KNOX_DESCRIPTORS + "/" + descriptorName;
-    }
-
-    private static String getProviderPath(String providerConfigName) {
-        return PATH_KNOX_PROVIDERS + "/" + providerConfigName;
-    }
-
-
-    private static final String TEST_PROVIDERS_CONFIG_1 =
-            "<gateway>\n" +
-            "    <provider>\n" +
-            "        <role>identity-assertion</role>\n" +
-            "        <name>Default</name>\n" +
-            "        <enabled>true</enabled>\n" +
-            "    </provider>\n" +
-            "    <provider>\n" +
-            "        <role>hostmap</role>\n" +
-            "        <name>static</name>\n" +
-            "        <enabled>true</enabled>\n" +
-            "        <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" +
-            "    </provider>\n" +
-            "</gateway>\n";
-
-    private static final String TEST_PROVIDERS_CONFIG_2 =
-            "<gateway>\n" +
-            "    <provider>\n" +
-            "        <role>authentication</role>\n" +
-            "        <name>ShiroProvider</name>\n" +
-            "        <enabled>true</enabled>\n" +
-            "        <param>\n" +
-            "            <name>sessionTimeout</name>\n" +
-            "            <value>30</value>\n" +
-            "        </param>\n" +
-            "        <param>\n" +
-            "            <name>main.ldapRealm</name>\n" +
-            "            <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>\n" +
-            "        </param>\n" +
-            "        <param>\n" +
-            "            <name>main.ldapContextFactory</name>\n" +
-            "            <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
-            "        </param>\n" +
-            "        <param>\n" +
-            "            <name>main.ldapRealm.contextFactory</name>\n" +
-            "            <value>$ldapContextFactory</value>\n" +
-            "        </param>\n" +
-            "        <param>\n" +
-            "            <name>main.ldapRealm.userDnTemplate</name>\n" +
-            "            <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
-            "        </param>\n" +
-            "        <param>\n" +
-            "            <name>main.ldapRealm.contextFactory.url</name>\n" +
-            "            <value>ldap://localhost:33389</value>\n" +
-            "        </param>\n" +
-            "        <param>\n" +
-            "            <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
-            "            <value>simple</value>\n" +
-            "        </param>\n" +
-            "        <param>\n" +
-            "            <name>urls./**</name>\n" +
-            "            <value>authcBasic</value>\n" +
-            "        </param>\n" +
-            "    </provider>\n" +
-            "</gateway>\n";
-
-    private static final String TEST_DESCRIPTOR_1 =
-            "{\n" +
-            "  \"discovery-type\":\"AMBARI\",\n" +
-            "  \"discovery-address\":\"http://sandbox.hortonworks.com:8080\",\n" +
-            "  \"discovery-user\":\"maria_dev\",\n" +
-            "  \"discovery-pwd-alias\":\"sandbox.ambari.discovery.password\",\n" +
-            "  \"provider-config-ref\":\"sandbox-providers.xml\",\n" +
-            "  \"cluster\":\"Sandbox\",\n" +
-            "  \"services\":[\n" +
-            "    {\"name\":\"NODEUI\"},\n" +
-            "    {\"name\":\"YARNUI\"},\n" +
-            "    {\"name\":\"HDFSUI\"},\n" +
-            "    {\"name\":\"OOZIEUI\"},\n" +
-            "    {\"name\":\"HBASEUI\"},\n" +
-            "    {\"name\":\"NAMENODE\"},\n" +
-            "    {\"name\":\"JOBTRACKER\"},\n" +
-            "    {\"name\":\"WEBHDFS\"},\n" +
-            "    {\"name\":\"WEBHCAT\"},\n" +
-            "    {\"name\":\"OOZIE\"},\n" +
-            "    {\"name\":\"WEBHBASE\"},\n" +
-            "    {\"name\":\"RESOURCEMANAGER\"},\n" +
-            "    {\"name\":\"AMBARI\", \"urls\":[\"http://c6401.ambari.apache.org:8080\"]},\n" +
-            "    {\"name\":\"AMBARIUI\", \"urls\":[\"http://c6401.ambari.apache.org:8080\"]}\n" +
-            "  ]\n" +
-            "}\n";
-
-    private static final String TEST_DESCRIPTOR_2 =
-            "{\n" +
-            "  \"discovery-type\":\"AMBARI\",\n" +
-            "  \"discovery-address\":\"http://sandbox.hortonworks.com:8080\",\n" +
-            "  \"discovery-user\":\"maria_dev\",\n" +
-            "  \"discovery-pwd-alias\":\"sandbox.ambari.discovery.password\",\n" +
-            "  \"provider-config-ref\":\"sandbox-providers.xml\",\n" +
-            "  \"cluster\":\"Sandbox\",\n" +
-            "  \"services\":[\n" +
-            "    {\"name\":\"NAMENODE\"},\n" +
-            "    {\"name\":\"JOBTRACKER\"},\n" +
-            "    {\"name\":\"WEBHDFS\"},\n" +
-            "    {\"name\":\"WEBHCAT\"},\n" +
-            "    {\"name\":\"OOZIE\"},\n" +
-            "    {\"name\":\"WEBHBASE\"},\n" +
-            "    {\"name\":\"RESOURCEMANAGER\"}\n" +
-            "  ]\n" +
-            "}\n";
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/test/java/org/apache/knox/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientService.java b/gateway-server/src/test/java/org/apache/knox/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientService.java
new file mode 100644
index 0000000..3bf7d2e
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientService.java
@@ -0,0 +1,263 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.config.remote;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.service.config.remote.config.RemoteConfigurationRegistriesAccessor;
+import org.apache.knox.gateway.services.ServiceLifecycleException;
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClient;
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+import org.apache.knox.gateway.services.security.AliasService;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.attribute.PosixFilePermission;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.Function;
+
+/**
+ * An implementation of RemoteConfigurationRegistryClientService intended to be used for testing without having to
+ * connect to an actual remote configuration registry.
+ */
+public class LocalFileSystemRemoteConfigurationRegistryClientService implements RemoteConfigurationRegistryClientService {
+
+    public static final String TYPE = "LocalFileSystem";
+
+    private Map<String, RemoteConfigurationRegistryClient> clients = new HashMap<>();
+
+
+    @Override
+    public void setAliasService(AliasService aliasService) {
+        // N/A
+    }
+
+    @Override
+    public RemoteConfigurationRegistryClient get(String name) {
+        return clients.get(name);
+    }
+
+    @Override
+    public void init(GatewayConfig config, Map<String, String> options) throws ServiceLifecycleException {
+        List<RemoteConfigurationRegistryConfig> registryConfigurations =
+                                        RemoteConfigurationRegistriesAccessor.getRemoteRegistryConfigurations(config);
+        for (RemoteConfigurationRegistryConfig registryConfig : registryConfigurations) {
+            if (TYPE.equalsIgnoreCase(registryConfig.getRegistryType())) {
+                RemoteConfigurationRegistryClient registryClient = createClient(registryConfig);
+                clients.put(registryConfig.getName(), registryClient);
+            }
+        }
+    }
+
+    @Override
+    public void start() throws ServiceLifecycleException {
+
+    }
+
+    @Override
+    public void stop() throws ServiceLifecycleException {
+
+    }
+
+
+    private RemoteConfigurationRegistryClient createClient(RemoteConfigurationRegistryConfig config) {
+        String rootDir = config.getConnectionString();
+
+        return new RemoteConfigurationRegistryClient() {
+            private File root = new File(rootDir);
+
+            @Override
+            public String getAddress() {
+                return root.getAbsolutePath();
+            }
+
+            @Override
+            public boolean entryExists(String path) {
+                return (new File(root, path)).exists();
+            }
+
+            @Override
+            public List<EntryACL> getACL(String path) {
+                List<EntryACL> result = new ArrayList<>();
+
+                Path resolved = Paths.get(rootDir, path);
+                try {
+                    Map<String, List<String>> collected = new HashMap<>();
+
+                    Set<PosixFilePermission> perms = Files.getPosixFilePermissions(resolved);
+                    for (PosixFilePermission perm : perms) {
+                        String[] parsed = perm.toString().split("_");
+                        collected.computeIfAbsent(parsed[0].toLowerCase(), s -> new ArrayList<>()).add(parsed[1].toLowerCase());
+                    }
+
+                    for (String id : collected.keySet()) {
+                        EntryACL acl = new EntryACL() {
+                            @Override
+                            public String getId() {
+                                return id;
+                            }
+
+                            @Override
+                            public String getType() {
+                                return "fs";
+                            }
+
+                            @Override
+                            public Object getPermissions() {
+                                return collected.get(id).toString();
+                            }
+
+                            @Override
+                            public boolean canRead() {
+                                return true;
+                            }
+
+                            @Override
+                            public boolean canWrite() {
+                                return true;
+                            }
+                        };
+                        result.add(acl);
+                    }
+                } catch (IOException e) {
+                    e.printStackTrace();
+                }
+                return result;
+            }
+
+            @Override
+            public List<String> listChildEntries(String path) {
+                List<String> result = new ArrayList<>();
+
+                File entry = new File(root, path);
+                if (entry.exists() && entry.isDirectory()) {
+                    String[] list = entry.list();
+                    if (list != null) {
+                        result.addAll(Arrays.asList(entry.list()));
+                    }
+                }
+
+                return result;
+            }
+
+            @Override
+            public String getEntryData(String path) {
+                return getEntryData(path, "UTF-8");
+            }
+
+            @Override
+            public String getEntryData(String path, String encoding) {
+                String result = null;
+                File entry = new File(root, path);
+                if (entry.isFile() && entry.exists()) {
+                    try {
+                        result = FileUtils.readFileToString(entry, encoding);
+                    } catch (IOException e) {
+                        e.printStackTrace();
+                    }
+                }
+                return result;
+            }
+
+            @Override
+            public void createEntry(String path) {
+                createEntry(path, "");
+            }
+
+            @Override
+            public void createEntry(String path, String data) {
+                createEntry(path, data, "UTF-8");
+            }
+
+            @Override
+            public void createEntry(String path, String data, String encoding) {
+                File entry = new File(root, path);
+                if (!entry.exists()) {
+                    if (data != null) {
+                        try {
+                            FileUtils.writeStringToFile(entry, data, encoding);
+                        } catch (IOException e) {
+                            e.printStackTrace();
+                        }
+                    }
+                }
+            }
+
+            @Override
+            public int setEntryData(String path, String data) {
+                setEntryData(path, data, "UTF-8");
+                return 0;
+            }
+
+            @Override
+            public int setEntryData(String path, String data, String encoding) {
+                File entry = new File(root, path);
+                if (entry.exists()) {
+                    try {
+                        FileUtils.writeStringToFile(entry, data, encoding);
+                    } catch (IOException e) {
+                        e.printStackTrace();
+                    }
+                }
+                return 0;
+            }
+
+            @Override
+            public boolean isAuthenticationConfigured() {
+                return false;
+            }
+
+            @Override
+            public void setACL(String path, List<EntryACL> acls) {
+                //
+            }
+
+            @Override
+            public void deleteEntry(String path) {
+                File entry = new File(root, path);
+                if (entry.exists()) {
+                    entry.delete();
+                }
+            }
+
+            @Override
+            public void addChildEntryListener(String path, ChildEntryListener listener) throws Exception {
+                // N/A
+            }
+
+            @Override
+            public void addEntryListener(String path, EntryListener listener) throws Exception {
+                // N/A
+            }
+
+            @Override
+            public void removeEntryListener(String path) throws Exception {
+                // N/A
+            }
+        };
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/test/java/org/apache/knox/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientServiceProvider.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientServiceProvider.java b/gateway-server/src/test/java/org/apache/knox/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientServiceProvider.java
new file mode 100644
index 0000000..3b96068
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/service/config/remote/LocalFileSystemRemoteConfigurationRegistryClientServiceProvider.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.config.remote;
+
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+
+public class LocalFileSystemRemoteConfigurationRegistryClientServiceProvider implements RemoteConfigurationRegistryClientServiceProvider {
+
+    @Override
+    public String getType() {
+        return LocalFileSystemRemoteConfigurationRegistryClientService.TYPE;
+    }
+
+    @Override
+    public RemoteConfigurationRegistryClientService newInstance() {
+        return new LocalFileSystemRemoteConfigurationRegistryClientService();
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/test/java/org/apache/knox/gateway/topology/monitor/ZooKeeperConfigurationMonitorTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/topology/monitor/ZooKeeperConfigurationMonitorTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/topology/monitor/ZooKeeperConfigurationMonitorTest.java
new file mode 100644
index 0000000..75cd5d0
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/monitor/ZooKeeperConfigurationMonitorTest.java
@@ -0,0 +1,355 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.monitor;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.apache.curator.test.InstanceSpec;
+import org.apache.curator.test.TestingCluster;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.service.config.remote.zk.ZooKeeperClientService;
+import org.apache.knox.gateway.service.config.remote.zk.ZooKeeperClientServiceProvider;
+import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.test.TestUtils;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.ZooDefs;
+import org.apache.zookeeper.data.ACL;
+import org.easymock.EasyMock;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/**
+ * Test the ZooKeeperConfigMonitor WITHOUT SASL configured or znode ACLs applied.
+ * The implementation of the monitor is the same regardless, since the ACLs are defined by the ZooKeeper znode
+ * creator, and the SASL config is purely JAAS (and external to the implementation).
+ */
+public class ZooKeeperConfigurationMonitorTest {
+
+    private static final String PATH_KNOX = "/knox";
+    private static final String PATH_KNOX_CONFIG = PATH_KNOX + "/config";
+    private static final String PATH_KNOX_PROVIDERS = PATH_KNOX_CONFIG + "/shared-providers";
+    private static final String PATH_KNOX_DESCRIPTORS = PATH_KNOX_CONFIG + "/descriptors";
+
+    private static File testTmp;
+    private static File providersDir;
+    private static File descriptorsDir;
+
+    private static TestingCluster zkCluster;
+
+    private static CuratorFramework client;
+
+    private GatewayConfig gc;
+
+
+    @BeforeClass
+    public static void setupSuite() throws Exception {
+        testTmp = TestUtils.createTempDir(ZooKeeperConfigurationMonitorTest.class.getName());
+        File confDir   = TestUtils.createTempDir(testTmp + "/conf");
+        providersDir   = TestUtils.createTempDir(confDir + "/shared-providers");
+        descriptorsDir = TestUtils.createTempDir(confDir + "/descriptors");
+
+        configureAndStartZKCluster();
+    }
+
+    private static void configureAndStartZKCluster() throws Exception {
+        // Configure security for the ZK cluster instances
+        Map<String, Object> customInstanceSpecProps = new HashMap<>();
+        customInstanceSpecProps.put("authProvider.1", "org.apache.zookeeper.server.auth.SASLAuthenticationProvider");
+        customInstanceSpecProps.put("requireClientAuthScheme", "sasl");
+
+        // Define the test cluster
+        List<InstanceSpec> instanceSpecs = new ArrayList<>();
+        for (int i = 0 ; i < 3 ; i++) {
+            InstanceSpec is = new InstanceSpec(null, -1, -1, -1, false, (i+1), -1, -1, customInstanceSpecProps);
+            instanceSpecs.add(is);
+        }
+        zkCluster = new TestingCluster(instanceSpecs);
+
+        // Start the cluster
+        zkCluster.start();
+
+        // Create the client for the test cluster
+        client = CuratorFrameworkFactory.builder()
+                                        .connectString(zkCluster.getConnectString())
+                                        .retryPolicy(new ExponentialBackoffRetry(100, 3))
+                                        .build();
+        assertNotNull(client);
+        client.start();
+
+        // Create the knox config paths with an ACL for the sasl user configured for the client
+        List<ACL> acls = new ArrayList<>();
+        acls.add(new ACL(ZooDefs.Perms.ALL, ZooDefs.Ids.ANYONE_ID_UNSAFE));
+
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_DESCRIPTORS);
+        assertNotNull("Failed to create node:" + PATH_KNOX_DESCRIPTORS,
+                client.checkExists().forPath(PATH_KNOX_DESCRIPTORS));
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_PROVIDERS);
+        assertNotNull("Failed to create node:" + PATH_KNOX_PROVIDERS,
+                client.checkExists().forPath(PATH_KNOX_PROVIDERS));
+    }
+
+    @AfterClass
+    public static void tearDownSuite() throws Exception {
+        // Clean up the ZK nodes, and close the client
+        if (client != null) {
+            client.delete().deletingChildrenIfNeeded().forPath(PATH_KNOX);
+            client.close();
+        }
+
+        // Shutdown the ZK cluster
+        zkCluster.close();
+
+        // Delete the working dir
+        testTmp.delete();
+    }
+
+    @Test
+    public void testZooKeeperConfigMonitor() throws Exception {
+        String configMonitorName = "remoteConfigMonitorClient";
+
+        // Setup the base GatewayConfig mock
+        gc = EasyMock.createNiceMock(GatewayConfig.class);
+        EasyMock.expect(gc.getGatewayProvidersConfigDir()).andReturn(providersDir.getAbsolutePath()).anyTimes();
+        EasyMock.expect(gc.getGatewayDescriptorsDir()).andReturn(descriptorsDir.getAbsolutePath()).anyTimes();
+        EasyMock.expect(gc.getRemoteRegistryConfigurationNames())
+                .andReturn(Collections.singletonList(configMonitorName))
+                .anyTimes();
+        final String registryConfig =
+                                GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
+                                GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString();
+        EasyMock.expect(gc.getRemoteRegistryConfiguration(configMonitorName))
+                .andReturn(registryConfig)
+                .anyTimes();
+        EasyMock.expect(gc.getRemoteConfigurationMonitorClientName()).andReturn(configMonitorName).anyTimes();
+        EasyMock.replay(gc);
+
+        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
+        EasyMock.replay(aliasService);
+
+        RemoteConfigurationRegistryClientService clientService = (new ZooKeeperClientServiceProvider()).newInstance();
+        clientService.setAliasService(aliasService);
+        clientService.init(gc, Collections.emptyMap());
+        clientService.start();
+
+        DefaultRemoteConfigurationMonitor cm = new DefaultRemoteConfigurationMonitor(gc, clientService);
+
+        try {
+            cm.start();
+        } catch (Exception e) {
+            fail("Failed to start monitor: " + e.getMessage());
+        }
+
+        try {
+            final String pc_one_znode = getProviderPath("providers-config1.xml");
+            final File pc_one         = new File(providersDir, "providers-config1.xml");
+            final String pc_two_znode = getProviderPath("providers-config2.xml");
+            final File pc_two         = new File(providersDir, "providers-config2.xml");
+
+            client.create().withMode(CreateMode.PERSISTENT).forPath(pc_one_znode, TEST_PROVIDERS_CONFIG_1.getBytes());
+            Thread.sleep(100);
+            assertTrue(pc_one.exists());
+            assertEquals(TEST_PROVIDERS_CONFIG_1, FileUtils.readFileToString(pc_one));
+
+            client.create().withMode(CreateMode.PERSISTENT).forPath(getProviderPath("providers-config2.xml"), TEST_PROVIDERS_CONFIG_2.getBytes());
+            Thread.sleep(100);
+            assertTrue(pc_two.exists());
+            assertEquals(TEST_PROVIDERS_CONFIG_2, FileUtils.readFileToString(pc_two));
+
+            client.setData().forPath(pc_two_znode, TEST_PROVIDERS_CONFIG_1.getBytes());
+            Thread.sleep(100);
+            assertTrue(pc_two.exists());
+            assertEquals(TEST_PROVIDERS_CONFIG_1, FileUtils.readFileToString(pc_two));
+
+            client.delete().forPath(pc_two_znode);
+            Thread.sleep(100);
+            assertFalse(pc_two.exists());
+
+            client.delete().forPath(pc_one_znode);
+            Thread.sleep(100);
+            assertFalse(pc_one.exists());
+
+            final String desc_one_znode   = getDescriptorPath("test1.json");
+            final String desc_two_znode   = getDescriptorPath("test2.json");
+            final String desc_three_znode = getDescriptorPath("test3.json");
+            final File desc_one           = new File(descriptorsDir, "test1.json");
+            final File desc_two           = new File(descriptorsDir, "test2.json");
+            final File desc_three         = new File(descriptorsDir, "test3.json");
+
+            client.create().withMode(CreateMode.PERSISTENT).forPath(desc_one_znode, TEST_DESCRIPTOR_1.getBytes());
+            Thread.sleep(100);
+            assertTrue(desc_one.exists());
+            assertEquals(TEST_DESCRIPTOR_1, FileUtils.readFileToString(desc_one));
+
+            client.create().withMode(CreateMode.PERSISTENT).forPath(desc_two_znode, TEST_DESCRIPTOR_1.getBytes());
+            Thread.sleep(100);
+            assertTrue(desc_two.exists());
+            assertEquals(TEST_DESCRIPTOR_1, FileUtils.readFileToString(desc_two));
+
+            client.setData().forPath(desc_two_znode, TEST_DESCRIPTOR_2.getBytes());
+            Thread.sleep(100);
+            assertTrue(desc_two.exists());
+            assertEquals(TEST_DESCRIPTOR_2, FileUtils.readFileToString(desc_two));
+
+            client.create().withMode(CreateMode.PERSISTENT).forPath(desc_three_znode, TEST_DESCRIPTOR_1.getBytes());
+            Thread.sleep(100);
+            assertTrue(desc_three.exists());
+            assertEquals(TEST_DESCRIPTOR_1, FileUtils.readFileToString(desc_three));
+
+            client.delete().forPath(desc_two_znode);
+            Thread.sleep(100);
+            assertFalse("Expected test2.json to have been deleted.", desc_two.exists());
+
+            client.delete().forPath(desc_three_znode);
+            Thread.sleep(100);
+            assertFalse(desc_three.exists());
+
+            client.delete().forPath(desc_one_znode);
+            Thread.sleep(100);
+            assertFalse(desc_one.exists());
+        } finally {
+            cm.stop();
+        }
+    }
+
+    private static String getDescriptorPath(String descriptorName) {
+        return PATH_KNOX_DESCRIPTORS + "/" + descriptorName;
+    }
+
+    private static String getProviderPath(String providerConfigName) {
+        return PATH_KNOX_PROVIDERS + "/" + providerConfigName;
+    }
+
+
+    private static final String TEST_PROVIDERS_CONFIG_1 =
+            "<gateway>\n" +
+            "    <provider>\n" +
+            "        <role>identity-assertion</role>\n" +
+            "        <name>Default</name>\n" +
+            "        <enabled>true</enabled>\n" +
+            "    </provider>\n" +
+            "    <provider>\n" +
+            "        <role>hostmap</role>\n" +
+            "        <name>static</name>\n" +
+            "        <enabled>true</enabled>\n" +
+            "        <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" +
+            "    </provider>\n" +
+            "</gateway>\n";
+
+    private static final String TEST_PROVIDERS_CONFIG_2 =
+            "<gateway>\n" +
+            "    <provider>\n" +
+            "        <role>authentication</role>\n" +
+            "        <name>ShiroProvider</name>\n" +
+            "        <enabled>true</enabled>\n" +
+            "        <param>\n" +
+            "            <name>sessionTimeout</name>\n" +
+            "            <value>30</value>\n" +
+            "        </param>\n" +
+            "        <param>\n" +
+            "            <name>main.ldapRealm</name>\n" +
+            "            <value>org.apache.knox.gateway.shirorealm.KnoxLdapRealm</value>\n" +
+            "        </param>\n" +
+            "        <param>\n" +
+            "            <name>main.ldapContextFactory</name>\n" +
+            "            <value>org.apache.knox.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
+            "        </param>\n" +
+            "        <param>\n" +
+            "            <name>main.ldapRealm.contextFactory</name>\n" +
+            "            <value>$ldapContextFactory</value>\n" +
+            "        </param>\n" +
+            "        <param>\n" +
+            "            <name>main.ldapRealm.userDnTemplate</name>\n" +
+            "            <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
+            "        </param>\n" +
+            "        <param>\n" +
+            "            <name>main.ldapRealm.contextFactory.url</name>\n" +
+            "            <value>ldap://localhost:33389</value>\n" +
+            "        </param>\n" +
+            "        <param>\n" +
+            "            <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
+            "            <value>simple</value>\n" +
+            "        </param>\n" +
+            "        <param>\n" +
+            "            <name>urls./**</name>\n" +
+            "            <value>authcBasic</value>\n" +
+            "        </param>\n" +
+            "    </provider>\n" +
+            "</gateway>\n";
+
+    private static final String TEST_DESCRIPTOR_1 =
+            "{\n" +
+            "  \"discovery-type\":\"AMBARI\",\n" +
+            "  \"discovery-address\":\"http://sandbox.hortonworks.com:8080\",\n" +
+            "  \"discovery-user\":\"maria_dev\",\n" +
+            "  \"discovery-pwd-alias\":\"sandbox.ambari.discovery.password\",\n" +
+            "  \"provider-config-ref\":\"sandbox-providers.xml\",\n" +
+            "  \"cluster\":\"Sandbox\",\n" +
+            "  \"services\":[\n" +
+            "    {\"name\":\"NODEUI\"},\n" +
+            "    {\"name\":\"YARNUI\"},\n" +
+            "    {\"name\":\"HDFSUI\"},\n" +
+            "    {\"name\":\"OOZIEUI\"},\n" +
+            "    {\"name\":\"HBASEUI\"},\n" +
+            "    {\"name\":\"NAMENODE\"},\n" +
+            "    {\"name\":\"JOBTRACKER\"},\n" +
+            "    {\"name\":\"WEBHDFS\"},\n" +
+            "    {\"name\":\"WEBHCAT\"},\n" +
+            "    {\"name\":\"OOZIE\"},\n" +
+            "    {\"name\":\"WEBHBASE\"},\n" +
+            "    {\"name\":\"RESOURCEMANAGER\"},\n" +
+            "    {\"name\":\"AMBARI\", \"urls\":[\"http://c6401.ambari.apache.org:8080\"]},\n" +
+            "    {\"name\":\"AMBARIUI\", \"urls\":[\"http://c6401.ambari.apache.org:8080\"]}\n" +
+            "  ]\n" +
+            "}\n";
+
+    private static final String TEST_DESCRIPTOR_2 =
+            "{\n" +
+            "  \"discovery-type\":\"AMBARI\",\n" +
+            "  \"discovery-address\":\"http://sandbox.hortonworks.com:8080\",\n" +
+            "  \"discovery-user\":\"maria_dev\",\n" +
+            "  \"discovery-pwd-alias\":\"sandbox.ambari.discovery.password\",\n" +
+            "  \"provider-config-ref\":\"sandbox-providers.xml\",\n" +
+            "  \"cluster\":\"Sandbox\",\n" +
+            "  \"services\":[\n" +
+            "    {\"name\":\"NAMENODE\"},\n" +
+            "    {\"name\":\"JOBTRACKER\"},\n" +
+            "    {\"name\":\"WEBHDFS\"},\n" +
+            "    {\"name\":\"WEBHCAT\"},\n" +
+            "    {\"name\":\"OOZIE\"},\n" +
+            "    {\"name\":\"WEBHBASE\"},\n" +
+            "    {\"name\":\"RESOURCEMANAGER\"}\n" +
+            "  ]\n" +
+            "}\n";
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/test/java/org/apache/knox/gateway/util/KnoxCLITest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/util/KnoxCLITest.java b/gateway-server/src/test/java/org/apache/knox/gateway/util/KnoxCLITest.java
index 902327c..b768937 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/util/KnoxCLITest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/util/KnoxCLITest.java
@@ -20,7 +20,7 @@ package org.apache.knox.gateway.util;
 import com.mycila.xmltool.XMLDoc;
 import com.mycila.xmltool.XMLTag;
 import org.apache.commons.io.FileUtils;
-import org.apache.knox.conf.Configuration;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.knox.gateway.config.impl.GatewayConfigImpl;
 import org.apache.knox.gateway.services.GatewayServices;
 import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClient;

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider b/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider
deleted file mode 100644
index ffd9284..0000000
--- a/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider
+++ /dev/null
@@ -1,19 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.hadoop.gateway.service.config.remote.LocalFileSystemRemoteConfigurationRegistryClientServiceProvider

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-server/src/test/resources/META-INF/services/org.apache.knox.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/META-INF/services/org.apache.knox.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider b/gateway-server/src/test/resources/META-INF/services/org.apache.knox.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider
new file mode 100644
index 0000000..46dbdf2
--- /dev/null
+++ b/gateway-server/src/test/resources/META-INF/services/org.apache.knox.gateway.service.config.remote.RemoteConfigurationRegistryClientServiceProvider
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.knox.gateway.service.config.remote.LocalFileSystemRemoteConfigurationRegistryClientServiceProvider

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml
----------------------------------------------------------------------
diff --git a/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml b/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml
deleted file mode 100644
index e69de29..0000000

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationMessages.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationMessages.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationMessages.java
deleted file mode 100644
index 7cd1324..0000000
--- a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationMessages.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.config.remote;
-
-import org.apache.hadoop.gateway.i18n.messages.Message;
-import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
-import org.apache.hadoop.gateway.i18n.messages.Messages;
-import org.apache.hadoop.gateway.i18n.messages.StackTrace;
-
-
-/**
- *
- */
-@Messages(logger="org.apache.hadoop.gateway.service.config.remote")
-public interface RemoteConfigurationMessages {
-
-    @Message(level = MessageLevel.WARN,
-             text = "Multiple remote configuration registries are not currently supported if any of them requires authentication.")
-    void multipleRemoteRegistryConfigurations();
-
-    @Message(level = MessageLevel.ERROR, text = "Failed to resolve the credential alias {0}")
-    void unresolvedCredentialAlias(final String alias);
-
-    @Message(level = MessageLevel.ERROR, text = "An error occurred interacting with the remote configuration registry : {0}")
-    void errorInteractingWithRemoteConfigRegistry(@StackTrace(level = MessageLevel.DEBUG) Exception e);
-
-    @Message(level = MessageLevel.ERROR, text = "An error occurred handling the ACL for remote configuration {0} : {1}")
-    void errorHandlingRemoteConfigACL(final String path,
-                                      @StackTrace(level = MessageLevel.DEBUG) Exception e);
-
-    @Message(level = MessageLevel.ERROR, text = "An error occurred setting the ACL for remote configuration {0} : {1}")
-    void errorSettingEntryACL(final String path,
-                              @StackTrace(level = MessageLevel.DEBUG) Exception e);
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceFactory.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceFactory.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceFactory.java
deleted file mode 100644
index cd58e22..0000000
--- a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceFactory.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.config.remote;
-
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
-
-import java.util.ServiceLoader;
-
-public class RemoteConfigurationRegistryClientServiceFactory {
-
-    public static RemoteConfigurationRegistryClientService newInstance(GatewayConfig config) {
-        RemoteConfigurationRegistryClientService rcs = null;
-
-        ServiceLoader<RemoteConfigurationRegistryClientServiceProvider> providers =
-                                             ServiceLoader.load(RemoteConfigurationRegistryClientServiceProvider.class);
-        for (RemoteConfigurationRegistryClientServiceProvider provider : providers) {
-            rcs = provider.newInstance();
-            if (rcs != null) {
-                break;
-            }
-        }
-
-        return rcs;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceProvider.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceProvider.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceProvider.java
deleted file mode 100644
index ddfc392..0000000
--- a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryClientServiceProvider.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.config.remote;
-
-import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
-
-public interface RemoteConfigurationRegistryClientServiceProvider {
-
-    String getType();
-
-    RemoteConfigurationRegistryClientService newInstance();
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryConfig.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryConfig.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryConfig.java
deleted file mode 100644
index 6409250..0000000
--- a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/RemoteConfigurationRegistryConfig.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.config.remote;
-
-public interface RemoteConfigurationRegistryConfig {
-
-    String getName();
-
-    String getRegistryType();
-
-    String getConnectionString();
-
-    String getNamespace();
-
-    boolean isSecureRegistry();
-
-    String getAuthType(); // digest, kerberos, etc...
-
-    String getPrincipal();
-
-    String getCredentialAlias();
-
-    String getKeytab();
-
-    boolean isUseTicketCache();
-
-    boolean isUseKeyTab();
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistries.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistries.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistries.java
deleted file mode 100644
index ebcae1b..0000000
--- a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/DefaultRemoteConfigurationRegistries.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.config.remote.config;
-
-import org.apache.hadoop.gateway.config.GatewayConfig;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * A set of RemoteConfigurationRegistry configurations based on a set of property name-value pairs.
- */
-class DefaultRemoteConfigurationRegistries extends RemoteConfigurationRegistries {
-
-    private static final String PROPERTY_DELIM       = ";";
-    private static final String PROPERTY_VALUE_DELIM = "=";
-
-    private List<RemoteConfigurationRegistry> configuredRegistries = new ArrayList<>();
-
-    /**
-     * Derive the remote registry configurations from the specified GatewayConfig.
-     *
-     * @param gc The source GatewayConfig
-     */
-    DefaultRemoteConfigurationRegistries(GatewayConfig gc) {
-        List<String> configRegistryNames = gc.getRemoteRegistryConfigurationNames();
-        for (String configRegistryName : configRegistryNames) {
-            configuredRegistries.add(extractConfigForRegistry(gc, configRegistryName));
-        }
-    }
-
-    /**
-     * Extract the configuration for the specified registry configuration name.
-     *
-     * @param gc           The GatewayConfig from which to extract the registry config.
-     * @param registryName The name of the registry config.
-     *
-     * @return The resulting RemoteConfigurationRegistry object, or null.
-     */
-    private static RemoteConfigurationRegistry extractConfigForRegistry(GatewayConfig gc, String registryName) {
-        RemoteConfigurationRegistry result = new RemoteConfigurationRegistry();
-
-        result.setName(registryName);
-
-        Map<String, String> properties = parsePropertyValue(gc.getRemoteRegistryConfiguration(registryName));
-
-        result.setRegistryType(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE));
-        result.setConnectionString(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS));
-        result.setNamespace(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_NAMESPACE));
-        result.setAuthType(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_AUTH_TYPE));
-        result.setPrincipal(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_PRINCIPAL));
-        result.setCredentialAlias(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS));
-        result.setKeytab(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_KEYTAB));
-        result.setUseKeytab(Boolean.valueOf(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_KEYTAB)));
-        result.setUseTicketCache(Boolean.valueOf(properties.get(GatewayConfig.REMOTE_CONFIG_REGISTRY_USE_TICKET_CACHE)));
-
-        return result;
-    }
-
-    /**
-     * Parse the specified registry config properties String.
-     *
-     * @param value The property value content from GatewayConfig.
-     *
-     * @return A Map of the parsed properties and their respective values.
-     */
-    private static Map<String, String> parsePropertyValue(final String value) {
-        Map<String, String> result = new HashMap<>();
-
-        if (value != null) {
-            String[] props = value.split(PROPERTY_DELIM);
-            for (String prop : props) {
-                String[] split = prop.split(PROPERTY_VALUE_DELIM);
-                String propName  = split[0];
-                String propValue = (split.length > 1) ? split[1] : null;
-                result.put(propName, propValue);
-            }
-        }
-
-        return result;
-    }
-
-    @Override
-    List<RemoteConfigurationRegistry> getRegistryConfigurations() {
-        return configuredRegistries;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistries.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistries.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistries.java
deleted file mode 100644
index fa045c0..0000000
--- a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistries.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.config.remote.config;
-
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-import java.util.ArrayList;
-import java.util.List;
-
-@XmlRootElement(name="remote-configuration-registries")
-class RemoteConfigurationRegistries {
-
-    private List<RemoteConfigurationRegistry> registryConfigurations = new ArrayList<>();
-
-    @XmlElement(name="remote-configuration-registry")
-    List<RemoteConfigurationRegistry> getRegistryConfigurations() {
-        return registryConfigurations;
-    }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistriesAccessor.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistriesAccessor.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistriesAccessor.java
deleted file mode 100644
index 9fed589..0000000
--- a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistriesAccessor.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.config.remote.config;
-
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-
-public class RemoteConfigurationRegistriesAccessor {
-
-    // System property for specifying a reference to an XML configuration external to the gateway config
-    private static final String XML_CONFIG_REFERENCE_SYSTEM_PROPERTY_NAME =
-                                                                "org.apache.knox.gateway.remote.registry.config.file";
-
-
-    public static List<RemoteConfigurationRegistryConfig> getRemoteRegistryConfigurations(GatewayConfig gatewayConfig) {
-        List<RemoteConfigurationRegistryConfig> result = new ArrayList<>();
-
-        boolean useReferencedFile = false;
-
-        // First check for the system property pointing to a valid XML config for the remote registries
-        String remoteConfigRegistryConfigFilename = System.getProperty(XML_CONFIG_REFERENCE_SYSTEM_PROPERTY_NAME);
-        if (remoteConfigRegistryConfigFilename != null) {
-            File remoteConfigRegistryConfigFile = new File(remoteConfigRegistryConfigFilename);
-            if (remoteConfigRegistryConfigFile.exists()) {
-                useReferencedFile = true;
-                // Parse the file, and build the registry config set
-                result.addAll(RemoteConfigurationRegistriesParser.getConfig(remoteConfigRegistryConfigFilename));
-            }
-        }
-
-        // If the system property was not set to a valid reference to another config file, then try to derive the
-        // registry configurations from the gateway config.
-        if (!useReferencedFile) {
-            RemoteConfigurationRegistries remoteConfigRegistries =
-                                                            new DefaultRemoteConfigurationRegistries(gatewayConfig);
-            result.addAll(remoteConfigRegistries.getRegistryConfigurations());
-        }
-
-        return result;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e766b3b7/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistriesParser.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistriesParser.java b/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistriesParser.java
deleted file mode 100644
index 3ea71ef..0000000
--- a/gateway-service-remoteconfig/src/main/java/org/apache/hadoop/gateway/service/config/remote/config/RemoteConfigurationRegistriesParser.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.config.remote.config;
-
-import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
-
-import javax.xml.bind.JAXBContext;
-import javax.xml.bind.JAXBException;
-import javax.xml.bind.Unmarshaller;
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-
-class RemoteConfigurationRegistriesParser {
-
-    static List<RemoteConfigurationRegistryConfig> getConfig(String configFilename) {
-        List<RemoteConfigurationRegistryConfig> result = new ArrayList<>();
-
-        File file = new File(configFilename);
-
-        try {
-            JAXBContext jaxbContext = JAXBContext.newInstance(RemoteConfigurationRegistries.class);
-            Unmarshaller jaxbUnmarshaller = jaxbContext.createUnmarshaller();
-            RemoteConfigurationRegistries parsedContent = (RemoteConfigurationRegistries) jaxbUnmarshaller.unmarshal(file);
-            if (parsedContent != null) {
-                result.addAll(parsedContent.getRegistryConfigurations());
-            }
-        } catch (JAXBException e) {
-            e.printStackTrace();
-        }
-
-        return result;
-    }
-}


[26/49] knox git commit: KNOX-1136 - Provision Consistent Credentials For Generated Topologies (phil zampino via lmccay)

Posted by mo...@apache.org.
KNOX-1136 - Provision Consistent Credentials For Generated Topologies (phil zampino via lmccay)

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/6eacf68a
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/6eacf68a
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/6eacf68a

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 6eacf68adf2dfa3f6769ed29d79025d57e681d27
Parents: 8df6e80
Author: Larry McCay <lm...@hortonworks.com>
Authored: Tue Dec 5 10:49:40 2017 -0500
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Tue Dec 5 10:49:40 2017 -0500

----------------------------------------------------------------------
 .../simple/SimpleDescriptorHandler.java         |  58 ++++
 .../simple/SimpleDescriptorMessages.java        |   9 +
 .../SimpleDescriptorHandlerFuncTest.java        | 275 +++++++++++++++++++
 ...eway.topology.discovery.ServiceDiscoveryType |  19 ++
 4 files changed, 361 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/6eacf68a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
index d1dc11d..c44710a 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
@@ -32,8 +32,13 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.gateway.GatewayServer;
 import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.Service;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.services.security.KeystoreService;
+import org.apache.hadoop.gateway.services.security.MasterService;
 import org.apache.hadoop.gateway.topology.discovery.DefaultServiceDiscoveryConfig;
 import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
 import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryFactory;
@@ -135,6 +140,14 @@ public class SimpleDescriptorHandler {
             log.failedToDiscoverClusterServices(desc.getClusterName());
         }
 
+        // Provision the query param encryption password here, rather than relying on the random password generated
+        // when the topology is deployed. This is to support Knox HA deployments, where multiple Knox instances are
+        // generating topologies based on a shared remote descriptor, and they must all be able to encrypt/decrypt
+        // query params with the same credentials. (KNOX-1136)
+        if (!provisionQueryParamEncryptionCredential(desc.getName())) {
+            log.unableCreatePasswordForEncryption(desc.getName());
+        }
+
         BufferedWriter fw = null;
         topologyDescriptor = null;
         File providerConfig;
@@ -262,6 +275,51 @@ public class SimpleDescriptorHandler {
     }
 
 
+    /**
+     * KNOX-1136
+     *
+     * Provision the query string encryption password prior to it being randomly generated during the topology
+     * deployment.
+     *
+     * @param topologyName The name of the topology for which the credential will be provisioned.
+     *
+     * @return true if the credential was successfully provisioned; otherwise, false.
+     */
+    private static boolean provisionQueryParamEncryptionCredential(String topologyName) {
+        boolean result = false;
+
+        try {
+            GatewayServices services = GatewayServer.getGatewayServices();
+            if (services != null) {
+                MasterService ms = services.getService("MasterService");
+                if (ms != null) {
+                    KeystoreService ks = services.getService(GatewayServices.KEYSTORE_SERVICE);
+                    if (ks != null) {
+                        if (!ks.isCredentialStoreForClusterAvailable(topologyName)) {
+                            ks.createCredentialStoreForCluster(topologyName);
+                        }
+
+                        // If the credential store existed, or it was just successfully created
+                        if (ks.getCredentialStoreForCluster(topologyName) != null) {
+                            AliasService aliasService = services.getService(GatewayServices.ALIAS_SERVICE);
+                            if (aliasService != null) {
+                                // Derive and set the query param encryption password
+                                String queryEncryptionPass = new String(ms.getMasterSecret()) + topologyName;
+                                aliasService.addAliasForCluster(topologyName, "encryptQueryString", queryEncryptionPass);
+                                result = true;
+                            }
+                        }
+                    }
+                }
+            }
+        } catch (Exception e) {
+            log.exceptionCreatingPasswordForEncryption(topologyName, e);
+        }
+
+        return result;
+    }
+
+
     private static boolean validateURL(String serviceName, String url) {
         boolean result = false;
 

http://git-wip-us.apache.org/repos/asf/knox/blob/6eacf68a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
index 2a2c4c1..7fe6ca9 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
@@ -47,4 +47,13 @@ public interface SimpleDescriptorMessages {
     void failedToGenerateTopologyFromSimpleDescriptor(final String topologyFile,
                                                       @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 
+    @Message( level = MessageLevel.ERROR,
+              text = "Error creating a password for query string encryption for {0}: {1}" )
+    void exceptionCreatingPasswordForEncryption(String topologyName,
+                                                @StackTrace( level = MessageLevel.DEBUG) Exception e);
+
+    @Message( level = MessageLevel.ERROR,
+            text = "Failed to create a password for query string encryption for {0}." )
+    void unableCreatePasswordForEncryption(String topologyName);
+
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/6eacf68a/gateway-test/src/test/java/org/apache/hadoop/gateway/SimpleDescriptorHandlerFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/SimpleDescriptorHandlerFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/SimpleDescriptorHandlerFuncTest.java
new file mode 100644
index 0000000..bda8952
--- /dev/null
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/SimpleDescriptorHandlerFuncTest.java
@@ -0,0 +1,275 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.services.GatewayServices;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.services.security.KeystoreService;
+import org.apache.hadoop.gateway.services.security.MasterService;
+import org.apache.hadoop.gateway.services.topology.TopologyService;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
+import org.apache.hadoop.gateway.topology.simple.SimpleDescriptor;
+import org.apache.hadoop.gateway.topology.simple.SimpleDescriptorHandler;
+import org.apache.hadoop.test.TestUtils;
+import org.easymock.Capture;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import java.io.File;
+import java.net.InetSocketAddress;
+import java.security.KeyStore;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.capture;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+public class SimpleDescriptorHandlerFuncTest {
+
+
+  private static final String TEST_PROVIDER_CONFIG =
+      "    <gateway>\n" +
+          "        <provider>\n" +
+          "            <role>authentication</role>\n" +
+          "            <name>ShiroProvider</name>\n" +
+          "            <enabled>true</enabled>\n" +
+          "            <param>\n" +
+          "                <name>sessionTimeout</name>\n" +
+          "                <value>30</value>\n" +
+          "            </param>\n" +
+          "            <param>\n" +
+          "                <name>main.ldapRealm</name>\n" +
+          "                <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>\n" +
+          "            </param>\n" +
+          "            <param>\n" +
+          "                <name>main.ldapContextFactory</name>\n" +
+          "                <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
+          "            </param>\n" +
+          "            <param>\n" +
+          "                <name>main.ldapRealm.contextFactory</name>\n" +
+          "                <value>$ldapContextFactory</value>\n" +
+          "            </param>\n" +
+          "            <param>\n" +
+          "                <name>main.ldapRealm.userDnTemplate</name>\n" +
+          "                <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
+          "            </param>\n" +
+          "            <param>\n" +
+          "                <name>main.ldapRealm.contextFactory.url</name>\n" +
+          "                <value>ldap://localhost:33389</value>\n" +
+          "            </param>\n" +
+          "            <param>\n" +
+          "                <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
+          "                <value>simple</value>\n" +
+          "            </param>\n" +
+          "            <param>\n" +
+          "                <name>urls./**</name>\n" +
+          "                <value>authcBasic</value>\n" +
+          "            </param>\n" +
+          "        </provider>\n" +
+          "\n" +
+          "        <provider>\n" +
+          "            <role>identity-assertion</role>\n" +
+          "            <name>Default</name>\n" +
+          "            <enabled>true</enabled>\n" +
+          "        </provider>\n" +
+          "\n" +
+          "        <provider>\n" +
+          "            <role>hostmap</role>\n" +
+          "            <name>static</name>\n" +
+          "            <enabled>true</enabled>\n" +
+          "            <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" +
+          "        </provider>\n" +
+          "    </gateway>\n";
+
+
+  /**
+   * KNOX-1136
+   * <p>
+   * Test that a credential store is created, and a encryptQueryString alias is defined, with a password that is not
+   * random (but is derived from the master secret and the topology name).
+   * <p>
+   * N.B. This test depends on the NoOpServiceDiscovery extension being configured in META-INF/services
+   */
+  @Test
+  public void testSimpleDescriptorHandlerQueryStringCredentialAliasCreation() throws Exception {
+
+    final String testMasterSecret = "mysecret";
+    final String discoveryType = "NO_OP";
+    final String clusterName = "dummy";
+
+    final Map<String, List<String>> serviceURLs = new HashMap<>();
+    serviceURLs.put("RESOURCEMANAGER", Collections.singletonList("http://myhost:1234/resource"));
+
+    File testRootDir = TestUtils.createTempDir(getClass().getSimpleName());
+    File testConfDir = new File(testRootDir, "conf");
+    File testProvDir = new File(testConfDir, "shared-providers");
+    File testTopoDir = new File(testConfDir, "topologies");
+    File testDeployDir = new File(testConfDir, "deployments");
+
+    // Write the externalized provider config to a temp file
+    File providerConfig = new File(testProvDir, "ambari-cluster-policy.xml");
+    FileUtils.write(providerConfig, TEST_PROVIDER_CONFIG);
+
+    File topologyFile = null;
+    try {
+      File destDir = new File(System.getProperty("java.io.tmpdir")).getCanonicalFile();
+
+      // Mock out the simple descriptor
+      SimpleDescriptor testDescriptor = EasyMock.createNiceMock(SimpleDescriptor.class);
+      EasyMock.expect(testDescriptor.getName()).andReturn("mysimpledescriptor").anyTimes();
+      EasyMock.expect(testDescriptor.getDiscoveryAddress()).andReturn(null).anyTimes();
+      EasyMock.expect(testDescriptor.getDiscoveryType()).andReturn(discoveryType).anyTimes();
+      EasyMock.expect(testDescriptor.getDiscoveryUser()).andReturn(null).anyTimes();
+      EasyMock.expect(testDescriptor.getProviderConfig()).andReturn(providerConfig.getAbsolutePath()).anyTimes();
+      EasyMock.expect(testDescriptor.getClusterName()).andReturn(clusterName).anyTimes();
+      List<SimpleDescriptor.Service> serviceMocks = new ArrayList<>();
+      for (String serviceName : serviceURLs.keySet()) {
+        SimpleDescriptor.Service svc = EasyMock.createNiceMock(SimpleDescriptor.Service.class);
+        EasyMock.expect(svc.getName()).andReturn(serviceName).anyTimes();
+        EasyMock.expect(svc.getURLs()).andReturn(serviceURLs.get(serviceName)).anyTimes();
+        EasyMock.expect(svc.getParams()).andReturn(Collections.emptyMap()).anyTimes();
+        EasyMock.replay(svc);
+        serviceMocks.add(svc);
+      }
+      EasyMock.expect(testDescriptor.getServices()).andReturn(serviceMocks).anyTimes();
+      EasyMock.replay(testDescriptor);
+
+      // Try setting up enough of the GatewayServer to support the test...
+      GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+      InetSocketAddress gatewayAddress = new InetSocketAddress(0);
+      EasyMock.expect(config.getGatewayTopologyDir()).andReturn(testTopoDir.getAbsolutePath()).anyTimes();
+      EasyMock.expect(config.getGatewayDeploymentDir()).andReturn(testDeployDir.getAbsolutePath()).anyTimes();
+      EasyMock.expect(config.getGatewayAddress()).andReturn(gatewayAddress).anyTimes();
+      EasyMock.expect(config.getGatewayPortMappings()).andReturn(Collections.emptyMap()).anyTimes();
+      EasyMock.replay(config);
+
+      // Setup the Gateway Services
+      GatewayServices gatewayServices = EasyMock.createNiceMock(GatewayServices.class);
+
+      // Master Service
+      MasterService ms = EasyMock.createNiceMock(MasterService.class);
+      EasyMock.expect(ms.getMasterSecret()).andReturn(testMasterSecret.toCharArray()).anyTimes();
+      EasyMock.replay(ms);
+      EasyMock.expect(gatewayServices.getService("MasterService")).andReturn(ms).anyTimes();
+
+      // Keystore Service
+      KeystoreService ks = EasyMock.createNiceMock(KeystoreService.class);
+      EasyMock.expect(ks.isCredentialStoreForClusterAvailable(testDescriptor.getName())).andReturn(false).once();
+      ks.createCredentialStoreForCluster(testDescriptor.getName());
+      EasyMock.expectLastCall().once();
+      KeyStore credStore = EasyMock.createNiceMock(KeyStore.class);
+      EasyMock.expect(ks.getCredentialStoreForCluster(testDescriptor.getName())).andReturn(credStore).anyTimes();
+      EasyMock.replay(ks);
+      EasyMock.expect(gatewayServices.getService(GatewayServices.KEYSTORE_SERVICE)).andReturn(ks).anyTimes();
+
+      // Alias Service
+      AliasService as = EasyMock.createNiceMock(AliasService.class);
+      // Captures for validating the alias creation for a generated topology
+      Capture<String> capturedCluster = EasyMock.newCapture();
+      Capture<String> capturedAlias = EasyMock.newCapture();
+      Capture<String> capturedPwd = EasyMock.newCapture();
+      as.addAliasForCluster(capture(capturedCluster), capture(capturedAlias), capture(capturedPwd));
+      EasyMock.expectLastCall().anyTimes();
+      EasyMock.replay(as);
+      EasyMock.expect(gatewayServices.getService(GatewayServices.ALIAS_SERVICE)).andReturn(as).anyTimes();
+
+      // Topology Service
+      TopologyService ts = EasyMock.createNiceMock(TopologyService.class);
+      ts.addTopologyChangeListener(anyObject());
+      EasyMock.expectLastCall().anyTimes();
+      ts.reloadTopologies();
+      EasyMock.expectLastCall().anyTimes();
+      EasyMock.expect(ts.getTopologies()).andReturn(Collections.emptyList()).anyTimes();
+      EasyMock.replay(ts);
+      EasyMock.expect(gatewayServices.getService(GatewayServices.TOPOLOGY_SERVICE)).andReturn(ts).anyTimes();
+
+      EasyMock.replay(gatewayServices);
+
+      // Start a GatewayService with the GatewayServices mock
+      GatewayServer server = GatewayServer.startGateway(config, gatewayServices);
+
+      // Invoke the simple descriptor handler, which will also create the credential store
+      // (because it doesn't exist) and the encryptQueryString alias
+      Map<String, File> files = SimpleDescriptorHandler.handle(testDescriptor,
+                                                               providerConfig.getParentFile(),
+                                                               destDir);
+      topologyFile = files.get("topology");
+
+      // Validate the AliasService interaction
+      assertEquals("Unexpected cluster name for the alias (should be the topology name).",
+                   testDescriptor.getName(), capturedCluster.getValue());
+      assertEquals("Unexpected alias name.", "encryptQueryString", capturedAlias.getValue());
+      assertEquals("Unexpected alias value (should be master secret + topology name.",
+                   testMasterSecret + testDescriptor.getName(), capturedPwd.getValue());
+
+    } catch (Exception e) {
+      e.printStackTrace();
+      fail(e.getMessage());
+    } finally {
+      FileUtils.forceDelete(testRootDir);
+      if (topologyFile != null) {
+        topologyFile.delete();
+      }
+    }
+  }
+
+
+  ///////////////////////////////////////////////////////////////////////////////////////////////////////
+  // Test classes for effectively "skipping" service discovery for this test.
+  ///////////////////////////////////////////////////////////////////////////////////////////////////////
+
+  public static final class NoOpServiceDiscoveryType implements ServiceDiscoveryType {
+    @Override
+    public String getType() {
+      return NoOpServiceDiscovery.TYPE;
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+      return new NoOpServiceDiscovery();
+    }
+  }
+
+  private static final class NoOpServiceDiscovery implements ServiceDiscovery {
+    static final String TYPE = "NO_OP";
+
+    @Override
+    public String getType() {
+      return TYPE;
+    }
+
+    @Override
+    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
+      return Collections.emptyMap();
+    }
+
+    @Override
+    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+      return null;
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/6eacf68a/gateway-test/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType b/gateway-test/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
new file mode 100644
index 0000000..0c5fe09
--- /dev/null
+++ b/gateway-test/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.hadoop.gateway.SimpleDescriptorHandlerFuncTest$NoOpServiceDiscoveryType


[03/49] knox git commit: KNOX-1107 - Remote Configuration Registry Client Service (Phil Zampino via lmccay)

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfigTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfigTest.java b/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfigTest.java
new file mode 100644
index 0000000..6cbef9b
--- /dev/null
+++ b/gateway-service-remoteconfig/src/test/java/org/apache/hadoop/gateway/service/config/remote/zk/RemoteConfigurationRegistryJAASConfigTest.java
@@ -0,0 +1,255 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.config.remote.zk;
+
+import org.apache.hadoop.gateway.service.config.remote.RemoteConfigurationRegistryConfig;
+import org.apache.hadoop.gateway.service.config.remote.zk.RemoteConfigurationRegistryJAASConfig;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.Configuration;
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+public class RemoteConfigurationRegistryJAASConfigTest {
+
+    @Test
+    public void testZooKeeperDigestContextEntry() throws Exception {
+        List<RemoteConfigurationRegistryConfig> registryConfigs = new ArrayList<>();
+        final String ENTRY_NAME       = "my_digest_context";
+        final String DIGEST_PRINCIPAL = "myIdentity";
+        final String DIGEST_PWD_ALIAS = "myAlias";
+        final String DIGEST_PWD       = "mysecret";
+
+        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
+        EasyMock.expect(aliasService.getPasswordFromAliasForGateway(DIGEST_PWD_ALIAS)).andReturn(DIGEST_PWD.toCharArray()).anyTimes();
+        EasyMock.replay(aliasService);
+
+        registryConfigs.add(createDigestConfig(ENTRY_NAME, DIGEST_PRINCIPAL, DIGEST_PWD_ALIAS));
+
+        try {
+            RemoteConfigurationRegistryJAASConfig jaasConfig =
+                                    RemoteConfigurationRegistryJAASConfig.configure(registryConfigs, aliasService);
+
+            // Make sure there are no entries for an invalid context entry name
+            assertNull(jaasConfig.getAppConfigurationEntry("invalid"));
+
+            // Validate the intended context entry
+            validateDigestContext(jaasConfig,
+                                  ENTRY_NAME,
+                                  RemoteConfigurationRegistryJAASConfig.digestLoginModules.get("ZOOKEEPER"),
+                                  DIGEST_PRINCIPAL,
+                                  DIGEST_PWD);
+        } finally {
+            Configuration.setConfiguration(null);
+        }
+    }
+
+    @Test
+    public void testKerberosContextEntry() throws Exception {
+        List<RemoteConfigurationRegistryConfig> registryConfigs = new ArrayList<>();
+        final String ENTRY_NAME = "my_kerberos_context";
+        final String PRINCIPAL  = "myIdentity";
+
+        File dummyKeyTab = File.createTempFile("my_context", "keytab");
+        registryConfigs.add(createKerberosConfig(ENTRY_NAME, PRINCIPAL, dummyKeyTab.getAbsolutePath()));
+
+        try {
+            RemoteConfigurationRegistryJAASConfig jaasConfig =
+                                            RemoteConfigurationRegistryJAASConfig.configure(registryConfigs, null);
+
+            // Make sure there are no entries for an invalid context entry name
+            assertNull(jaasConfig.getAppConfigurationEntry("invalid"));
+
+            // Validate the intended context entry
+            validateKerberosContext(jaasConfig,
+                                    ENTRY_NAME,
+                                    PRINCIPAL,
+                                    dummyKeyTab.getAbsolutePath(),
+                                    true,
+                                    false);
+
+        } finally {
+            Configuration.setConfiguration(null);
+        }
+    }
+
+    @Test
+    public void testZooKeeperMultipleContextEntries() throws Exception {
+        List<RemoteConfigurationRegistryConfig> registryConfigs = new ArrayList<>();
+        final String KERBEROS_ENTRY_NAME = "my_kerberos_context";
+        final String KERBEROS_PRINCIPAL  = "myKerberosIdentity";
+        final String DIGEST_ENTRY_NAME   = "my_digest_context";
+        final String DIGEST_PRINCIPAL    = "myDigestIdentity";
+        final String DIGEST_PWD_ALIAS    = "myAlias";
+        final String DIGEST_PWD          = "mysecret";
+
+        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
+        EasyMock.expect(aliasService.getPasswordFromAliasForGateway(DIGEST_PWD_ALIAS)).andReturn(DIGEST_PWD.toCharArray()).anyTimes();
+        EasyMock.replay(aliasService);
+
+        File dummyKeyTab = File.createTempFile("my_context", "keytab");
+        registryConfigs.add(createKerberosConfig(KERBEROS_ENTRY_NAME, KERBEROS_PRINCIPAL, dummyKeyTab.getAbsolutePath()));
+        registryConfigs.add(createDigestConfig(DIGEST_ENTRY_NAME, DIGEST_PRINCIPAL, DIGEST_PWD_ALIAS));
+
+        try {
+            RemoteConfigurationRegistryJAASConfig jaasConfig =
+                                        RemoteConfigurationRegistryJAASConfig.configure(registryConfigs, aliasService);
+
+            // Make sure there are no entries for an invalid context entry name
+            assertNull(jaasConfig.getAppConfigurationEntry("invalid"));
+
+            // Validate the kerberos context entry
+            validateKerberosContext(jaasConfig,
+                                    KERBEROS_ENTRY_NAME,
+                                    KERBEROS_PRINCIPAL,
+                                    dummyKeyTab.getAbsolutePath(),
+                                    true,
+                                    false);
+
+            // Validate the digest context entry
+            validateDigestContext(jaasConfig,
+                                  DIGEST_ENTRY_NAME,
+                                  RemoteConfigurationRegistryJAASConfig.digestLoginModules.get("ZOOKEEPER"),
+                                  DIGEST_PRINCIPAL,
+                                  DIGEST_PWD);
+
+        } finally {
+            Configuration.setConfiguration(null);
+        }
+    }
+
+    @Test
+    public void testZooKeeperDigestContextEntryWithoutAliasService() throws Exception {
+        List<RemoteConfigurationRegistryConfig> registryConfigs = new ArrayList<>();
+        final String ENTRY_NAME       = "my_digest_context";
+        final String DIGEST_PRINCIPAL = "myIdentity";
+        final String DIGEST_PWD_ALIAS = "myAlias";
+
+        registryConfigs.add(createDigestConfig(ENTRY_NAME, DIGEST_PRINCIPAL, DIGEST_PWD_ALIAS));
+
+        try {
+            RemoteConfigurationRegistryJAASConfig jaasConfig =
+                                            RemoteConfigurationRegistryJAASConfig.configure(registryConfigs, null);
+            fail("Expected IllegalArgumentException because the AliasService is not available.");
+        } catch (IllegalArgumentException e) {
+            // Expected
+            assertTrue(e.getMessage().contains("AliasService"));
+        } catch (Throwable e) {
+            fail("Wrong exception encountered: " + e.getClass().getName() + ", " + e.getMessage());
+        } finally {
+            Configuration.setConfiguration(null);
+        }
+    }
+
+    private static RemoteConfigurationRegistryConfig createDigestConfig(String entryName,
+                                                                        String principal,
+                                                                        String credentialAlias) {
+        return createDigestConfig(entryName, principal, credentialAlias, "ZooKeeper");
+    }
+
+    private static RemoteConfigurationRegistryConfig createDigestConfig(String entryName,
+                                                                        String principal,
+                                                                        String credentialAlias,
+                                                                        String registryType) {
+        RemoteConfigurationRegistryConfig rc = EasyMock.createNiceMock(RemoteConfigurationRegistryConfig.class);
+        EasyMock.expect(rc.getRegistryType()).andReturn(registryType).anyTimes();
+        EasyMock.expect(rc.getName()).andReturn(entryName).anyTimes();
+        EasyMock.expect(rc.isSecureRegistry()).andReturn(true).anyTimes();
+        EasyMock.expect(rc.getAuthType()).andReturn("digest").anyTimes();
+        EasyMock.expect(rc.getPrincipal()).andReturn(principal).anyTimes();
+        EasyMock.expect(rc.getCredentialAlias()).andReturn(credentialAlias).anyTimes();
+        EasyMock.replay(rc);
+        return rc;
+    }
+
+
+    private static RemoteConfigurationRegistryConfig createKerberosConfig(String entryName,
+                                                                          String principal,
+                                                                          String keyTabPath) {
+        return createKerberosConfig(entryName, principal, keyTabPath, "ZooKeeper");
+    }
+
+    private static RemoteConfigurationRegistryConfig createKerberosConfig(String entryName,
+                                                                          String principal,
+                                                                          String keyTabPath,
+                                                                          String registryType) {
+        return createKerberosConfig(entryName, principal, keyTabPath, null, null, registryType);
+    }
+
+    private static RemoteConfigurationRegistryConfig createKerberosConfig(String entryName,
+                                                                          String principal,
+                                                                          String keyTabPath,
+                                                                          Boolean useKeyTab,
+                                                                          Boolean useTicketCache,
+                                                                          String registryType) {
+        RemoteConfigurationRegistryConfig rc = EasyMock.createNiceMock(RemoteConfigurationRegistryConfig.class);
+        EasyMock.expect(rc.getRegistryType()).andReturn(registryType).anyTimes();
+        EasyMock.expect(rc.getName()).andReturn(entryName).anyTimes();
+        EasyMock.expect(rc.isSecureRegistry()).andReturn(true).anyTimes();
+        EasyMock.expect(rc.getAuthType()).andReturn("kerberos").anyTimes();
+        EasyMock.expect(rc.getPrincipal()).andReturn(principal).anyTimes();
+        EasyMock.expect(rc.getKeytab()).andReturn(keyTabPath).anyTimes();
+        EasyMock.expect(rc.isUseKeyTab()).andReturn(useKeyTab != null ? useKeyTab : true).anyTimes();
+        EasyMock.expect(rc.isUseTicketCache()).andReturn(useTicketCache != null ? useTicketCache : false).anyTimes();
+        EasyMock.replay(rc);
+        return rc;
+    }
+
+    private static void validateDigestContext(RemoteConfigurationRegistryJAASConfig config,
+                                              String                                entryName,
+                                              String                                loginModule,
+                                              String                                principal,
+                                              String                                password) throws Exception {
+        AppConfigurationEntry[] myContextEntries = config.getAppConfigurationEntry(entryName);
+        assertNotNull(myContextEntries);
+        assertEquals(1, myContextEntries.length);
+        AppConfigurationEntry entry = myContextEntries[0];
+        assertTrue(entry.getLoginModuleName().equals(loginModule));
+        Map<String, ?> entryOpts = entry.getOptions();
+        assertEquals(principal, entryOpts.get("username"));
+        assertEquals(password, entryOpts.get("password"));
+    }
+
+    private static void validateKerberosContext(RemoteConfigurationRegistryJAASConfig config,
+                                                String                                entryName,
+                                                String                                principal,
+                                                String                                keyTab,
+                                                boolean                               useKeyTab,
+                                                boolean                               useTicketCache) throws Exception {
+        AppConfigurationEntry[] myContextEntries = config.getAppConfigurationEntry(entryName);
+        assertNotNull(myContextEntries);
+        assertEquals(1, myContextEntries.length);
+        AppConfigurationEntry entry = myContextEntries[0];
+        assertTrue(entry.getLoginModuleName().endsWith(".security.auth.module.Krb5LoginModule"));
+        Map<String, ?> entryOpts = entry.getOptions();
+        assertEquals(principal, entryOpts.get("principal"));
+        assertEquals(keyTab, entryOpts.get("keyTab"));
+        assertEquals(useKeyTab, Boolean.valueOf((String)entryOpts.get("isUseKeyTab")));
+        assertEquals(useTicketCache, Boolean.valueOf((String)entryOpts.get("isUseTicketCache")));
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
index 66fb83c..5cfaf36 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
@@ -42,6 +42,16 @@ public interface GatewayConfig {
   public static final String SIGNING_KEYSTORE_NAME = "gateway.signing.keystore.name";
   public static final String SIGNING_KEY_ALIAS = "gateway.signing.key.alias";
 
+  String REMOTE_CONFIG_REGISTRY_TYPE = "type";
+  String REMOTE_CONFIG_REGISTRY_ADDRESS = "address";
+  String REMOTE_CONFIG_REGISTRY_NAMESPACE = "namespace";
+  String REMOTE_CONFIG_REGISTRY_AUTH_TYPE = "authType";
+  String REMOTE_CONFIG_REGISTRY_PRINCIPAL = "principal";
+  String REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS = "credentialAlias";
+  String REMOTE_CONFIG_REGISTRY_KEYTAB = "keytab";
+  String REMOTE_CONFIG_REGISTRY_USE_KEYTAB = "useKeytab";
+  String REMOTE_CONFIG_REGISTRY_USE_TICKET_CACHE = "useTicketCache";
+
   /**
    * The location of the gateway configuration.
    * Subdirectories will be: topologies
@@ -76,6 +86,10 @@ public interface GatewayConfig {
 
   String getGatewayPath();
 
+  String getGatewayProvidersConfigDir();
+
+  String getGatewayDescriptorsDir();
+
   String getGatewayTopologyDir();
 
   String getGatewaySecurityDir();
@@ -299,4 +313,24 @@ public interface GatewayConfig {
    * @return
    */
   boolean isGatewayServerHeaderEnabled();
+
+  /**
+   * @return The list of the names of any remote registry configurations defined herein.
+   */
+  List<String> getRemoteRegistryConfigurationNames();
+
+  /**
+   *
+   * @param name The name of the remote registry configuration
+   *
+   * @return The configuration associated with the specified name.
+   */
+  String getRemoteRegistryConfiguration(String name);
+
+  /**
+   *
+   * @return The name of a remote configuration registry client
+   */
+  String getRemoteConfigurationMonitorClientName();
+
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/GatewayServices.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/GatewayServices.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/GatewayServices.java
index 2e6227c..2894bbc 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/GatewayServices.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/GatewayServices.java
@@ -39,6 +39,8 @@ public interface GatewayServices extends Service, ProviderDeploymentContributor
   public static final String SERVICE_DEFINITION_REGISTRY = "ServiceDefinitionRegistry";
   public static final String METRICS_SERVICE = "MetricsService";
 
+  String REMOTE_REGISTRY_CLIENT_SERVICE = "RemoteConfigRegistryClientService";
+
   public abstract Collection<String> getServiceNames();
 
   public abstract <T> T getService( String serviceName );

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClient.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClient.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClient.java
new file mode 100644
index 0000000..6fbf410
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClient.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.services.config.client;
+
+import java.util.List;
+
+public interface RemoteConfigurationRegistryClient {
+
+    String getAddress();
+
+    boolean entryExists(String path);
+
+    List<EntryACL> getACL(String path);
+
+    List<String> listChildEntries(String path);
+
+    String getEntryData(String path);
+
+    String getEntryData(String path, String encoding);
+
+    void createEntry(String path);
+
+    void createEntry(String path, String data);
+
+    void createEntry(String path, String data, String encoding);
+
+    int setEntryData(String path, String data);
+
+    int setEntryData(String path, String data, String encoding);
+
+    void deleteEntry(String path);
+
+    void addChildEntryListener(String path, ChildEntryListener listener) throws Exception;
+
+    void addEntryListener(String path, EntryListener listener) throws Exception;
+
+    void removeEntryListener(String path) throws Exception;
+
+    interface ChildEntryListener {
+
+        enum Type {
+            ADDED,
+            REMOVED,
+            UPDATED
+        }
+
+        void childEvent(RemoteConfigurationRegistryClient client, ChildEntryListener.Type type, String path);
+    }
+
+    interface EntryListener {
+        void entryChanged(RemoteConfigurationRegistryClient client, String path, byte[] data);
+    }
+
+    interface EntryACL {
+        String getId();
+        String getType();
+        Object getPermissions();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClientService.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClientService.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClientService.java
new file mode 100644
index 0000000..1467f75
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/config/client/RemoteConfigurationRegistryClientService.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.services.config.client;
+
+import org.apache.hadoop.gateway.services.Service;
+import org.apache.hadoop.gateway.services.security.AliasService;
+
+public interface RemoteConfigurationRegistryClientService extends Service {
+
+    void setAliasService(AliasService aliasService);
+
+    RemoteConfigurationRegistryClient get(String l);
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitor.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitor.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitor.java
new file mode 100644
index 0000000..82c5809
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitor.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.monitor;
+
+public interface RemoteConfigurationMonitor {
+
+    void start() throws Exception;
+
+    void stop() throws Exception;
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorProvider.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorProvider.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorProvider.java
new file mode 100644
index 0000000..d19dace
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorProvider.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.monitor;
+
+
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+
+public interface RemoteConfigurationMonitorProvider {
+
+    /**
+     *
+     * @param config        The gateway configuration.
+     * @param clientService The RemoteConfigurationRegistryClientService for accessing the remote configuration.
+     *
+     * @return A RemoteConfigurationMonitor for keeping the local config in sync with the remote config
+     */
+    RemoteConfigurationMonitor newInstance(GatewayConfig config, RemoteConfigurationRegistryClientService clientService);
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
----------------------------------------------------------------------
diff --git a/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java b/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
index ff9a877..f7ea633 100644
--- a/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
+++ b/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
@@ -25,6 +25,7 @@ import java.io.File;
 import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
@@ -614,4 +615,29 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
   public boolean isClientAuthWanted() {
     return false;
   }
+
+  @Override
+  public String getGatewayProvidersConfigDir() {
+    return null;
+  }
+
+  @Override
+  public String getGatewayDescriptorsDir() {
+    return null;
+  }
+
+  @Override
+  public List<String> getRemoteRegistryConfigurationNames() {
+    return Collections.emptyList();
+  }
+
+  @Override
+  public String getRemoteRegistryConfiguration(String s) {
+    return null;
+  }
+
+  @Override
+  public String getRemoteConfigurationMonitorClientName() {
+    return null;
+  }
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-test-utils/src/main/java/org/apache/hadoop/test/TestUtils.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/TestUtils.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/TestUtils.java
index 076c312..cf446a3 100644
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/TestUtils.java
+++ b/gateway-test-utils/src/main/java/org/apache/hadoop/test/TestUtils.java
@@ -50,7 +50,7 @@ public class TestUtils {
   private static Logger LOG = Logger.getLogger(TestUtils.class);
 
   public static final long SHORT_TIMEOUT = 1000L;
-  public static final long MEDIUM_TIMEOUT = 20 * 1000L;
+  public static final long MEDIUM_TIMEOUT = 30 * 1000L;
   public static final long LONG_TIMEOUT = 60 * 1000L;
 
   public static String getResourceName( Class clazz, String name ) {

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-test/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-test/pom.xml b/gateway-test/pom.xml
index 3b622aa..4eb5093 100644
--- a/gateway-test/pom.xml
+++ b/gateway-test/pom.xml
@@ -132,6 +132,12 @@
             <scope>test</scope>
         </dependency>
 
+        <dependency>
+            <groupId>org.apache.curator</groupId>
+            <artifactId>curator-test</artifactId>
+            <scope>test</scope>
+        </dependency>
+
     </dependencies>
 
     <build>

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/gateway-test/src/test/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorTest.java
new file mode 100644
index 0000000..14d98a9
--- /dev/null
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/topology/monitor/RemoteConfigurationMonitorTest.java
@@ -0,0 +1,397 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.monitor;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.apache.curator.test.InstanceSpec;
+import org.apache.curator.test.TestingCluster;
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.service.config.remote.zk.ZooKeeperClientService;
+import org.apache.hadoop.gateway.service.config.remote.zk.ZooKeeperClientServiceProvider;
+import org.apache.hadoop.gateway.services.config.client.RemoteConfigurationRegistryClientService;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.test.TestUtils;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.ZooDefs;
+import org.apache.zookeeper.data.ACL;
+import org.apache.zookeeper.data.Id;
+import org.easymock.EasyMock;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/**
+ * Test the RemoteConfigurationMonitor functionality with SASL configured, and znode ACLs applied.
+ *
+ * The expected implementation is org.apache.hadoop.gateway.topology.monitor.zk.ZooKeeperConfigMonitor
+ *
+ * Digest-based SASL is used for this test, but since that is dictated solely by the JAAS config, Kerberos-based SASL
+ * should work in exactly the same way, simply by modifying the SASL config.
+ */
+public class RemoteConfigurationMonitorTest {
+
+    private static final String PATH_KNOX = "/knox";
+    private static final String PATH_KNOX_CONFIG = PATH_KNOX + "/config";
+    private static final String PATH_KNOX_PROVIDERS = PATH_KNOX_CONFIG + "/shared-providers";
+    private static final String PATH_KNOX_DESCRIPTORS = PATH_KNOX_CONFIG + "/descriptors";
+
+    private static final String ZK_USERNAME = "testsasluser";
+    private static final String ZK_PASSWORD = "testsaslpwd";
+
+    private static File testTmp;
+    private static File providersDir;
+    private static File descriptorsDir;
+
+    private static TestingCluster zkCluster;
+
+    private static CuratorFramework client;
+
+    @BeforeClass
+    public static void setupSuite() throws Exception {
+        testTmp = TestUtils.createTempDir(RemoteConfigurationMonitorTest.class.getName());
+        File confDir   = TestUtils.createTempDir(testTmp + "/conf");
+        providersDir   = TestUtils.createTempDir(confDir + "/shared-providers");
+        descriptorsDir = TestUtils.createTempDir(confDir + "/descriptors");
+
+        configureAndStartZKCluster();
+    }
+
+    /**
+     * Create and persist a JAAS configuration file, defining the SASL config for both the ZooKeeper cluster instances
+     * and ZooKeeper clients.
+     *
+     * @param username The digest username
+     * @param password The digest password
+     *
+     * @return The JAAS configuration file
+     */
+    private static File setupDigestSaslConfig(String username, String password) throws Exception {
+        File saslConfigFile = new File(testTmp, "server-jaas.conf");
+        FileWriter fw = new FileWriter(saslConfigFile);
+        fw.write("Server {\n" +
+                "    org.apache.zookeeper.server.auth.DigestLoginModule required\n" +
+                "    user_" + username + " =\"" + password + "\";\n" +
+                "};\n"+
+                "Client {\n" +
+                "    org.apache.zookeeper.server.auth.DigestLoginModule required\n" +
+                "    username=\"" + username + "\"\n" +
+                "    password=\"" + password + "\";\n" +
+                "};\n");
+        fw.close();
+        return saslConfigFile;
+    }
+
+    /**
+     * Configure and start the ZooKeeper test cluster, and create the znodes monitored by the RemoteConfigurationMonitor.
+     */
+    private static void configureAndStartZKCluster() throws Exception {
+        // Configure security for the ZK cluster instances
+        Map<String, Object> customInstanceSpecProps = new HashMap<>();
+        customInstanceSpecProps.put("authProvider.1", "org.apache.zookeeper.server.auth.SASLAuthenticationProvider");
+        customInstanceSpecProps.put("requireClientAuthScheme", "sasl");
+
+        // Define the test cluster
+        List<InstanceSpec> instanceSpecs = new ArrayList<>();
+        for (int i = 0 ; i < 3 ; i++) {
+            InstanceSpec is = new InstanceSpec(null, -1, -1, -1, false, (i+1), -1, -1, customInstanceSpecProps);
+            instanceSpecs.add(is);
+        }
+        zkCluster = new TestingCluster(instanceSpecs);
+
+        // Configure auth for the ZooKeeper servers and the clients
+        File saslConfigFile = setupDigestSaslConfig(ZK_USERNAME, ZK_PASSWORD);
+
+        // This system property is used by the ZooKeeper cluster instances, the test driver client, and the
+        // RemoteConfigurationMonitor implementation for SASL authentication/authorization
+        System.setProperty("java.security.auth.login.config", saslConfigFile.getAbsolutePath());
+
+        // Start the cluster
+        zkCluster.start();
+
+        // Create the client for the test cluster
+        client = CuratorFrameworkFactory.builder()
+                .connectString(zkCluster.getConnectString())
+                .retryPolicy(new ExponentialBackoffRetry(100, 3))
+                .build();
+        assertNotNull(client);
+        client.start();
+
+        // Create the knox config paths with an ACL for the sasl user configured for the client
+        List<ACL> acls = new ArrayList<>();
+        acls.add(new ACL(ZooDefs.Perms.ALL, new Id("sasl", ZK_USERNAME)));
+
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_DESCRIPTORS);
+        assertNotNull("Failed to create node:" + PATH_KNOX_DESCRIPTORS,
+                client.checkExists().forPath(PATH_KNOX_DESCRIPTORS));
+        client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).withACL(acls).forPath(PATH_KNOX_PROVIDERS);
+        assertNotNull("Failed to create node:" + PATH_KNOX_PROVIDERS,
+                client.checkExists().forPath(PATH_KNOX_PROVIDERS));
+    }
+
+    @AfterClass
+    public static void tearDownSuite() throws Exception {
+        // Clean up the ZK nodes, and close the client
+        if (client != null) {
+            client.delete().deletingChildrenIfNeeded().forPath(PATH_KNOX);
+            client.close();
+        }
+
+        // Shutdown the ZK cluster
+        zkCluster.close();
+
+        // Delete the working dir
+        testTmp.delete();
+    }
+
+    @Test
+    public void testZooKeeperConfigMonitorSASL() throws Exception {
+        final String configMonitorName = "zkConfigClient";
+
+        // Setup the base GatewayConfig mock
+        GatewayConfig gc = EasyMock.createNiceMock(GatewayConfig.class);
+        EasyMock.expect(gc.getGatewayProvidersConfigDir()).andReturn(providersDir.getAbsolutePath()).anyTimes();
+        EasyMock.expect(gc.getGatewayDescriptorsDir()).andReturn(descriptorsDir.getAbsolutePath()).anyTimes();
+        EasyMock.expect(gc.getRemoteRegistryConfigurationNames())
+                .andReturn(Collections.singletonList(configMonitorName))
+                .anyTimes();
+        final String registryConfig =
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_TYPE + "=" + ZooKeeperClientService.TYPE + ";" +
+                            GatewayConfig.REMOTE_CONFIG_REGISTRY_ADDRESS + "=" + zkCluster.getConnectString();
+        EasyMock.expect(gc.getRemoteRegistryConfiguration(configMonitorName))
+                .andReturn(registryConfig).anyTimes();
+        EasyMock.expect(gc.getRemoteConfigurationMonitorClientName()).andReturn(configMonitorName).anyTimes();
+        EasyMock.replay(gc);
+
+        AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
+        EasyMock.replay(aliasService);
+
+        RemoteConfigurationRegistryClientService clientService = (new ZooKeeperClientServiceProvider()).newInstance();
+        clientService.setAliasService(aliasService);
+        clientService.init(gc, Collections.emptyMap());
+        clientService.start();
+
+        RemoteConfigurationMonitorFactory.setClientService(clientService);
+
+        RemoteConfigurationMonitor cm = RemoteConfigurationMonitorFactory.get(gc);
+        assertNotNull("Failed to load RemoteConfigurationMonitor", cm);
+
+        try {
+            cm.start();
+        } catch (Exception e) {
+            fail("Failed to start monitor: " + e.getMessage());
+        }
+
+        try {
+            final String pc_one_znode = getProviderPath("providers-config1.xml");
+            final File pc_one         = new File(providersDir, "providers-config1.xml");
+            final String pc_two_znode = getProviderPath("providers-config2.xml");
+            final File pc_two         = new File(providersDir, "providers-config2.xml");
+
+            client.create().withMode(CreateMode.PERSISTENT).forPath(pc_one_znode, TEST_PROVIDERS_CONFIG_1.getBytes());
+            Thread.sleep(100);
+            assertTrue(pc_one.exists());
+            assertEquals(TEST_PROVIDERS_CONFIG_1, FileUtils.readFileToString(pc_one));
+
+            client.create().withMode(CreateMode.PERSISTENT).forPath(getProviderPath("providers-config2.xml"), TEST_PROVIDERS_CONFIG_2.getBytes());
+            Thread.sleep(100);
+            assertTrue(pc_two.exists());
+            assertEquals(TEST_PROVIDERS_CONFIG_2, FileUtils.readFileToString(pc_two));
+
+            client.setData().forPath(pc_two_znode, TEST_PROVIDERS_CONFIG_1.getBytes());
+            Thread.sleep(100);
+            assertTrue(pc_two.exists());
+            assertEquals(TEST_PROVIDERS_CONFIG_1, FileUtils.readFileToString(pc_two));
+
+            client.delete().forPath(pc_two_znode);
+            Thread.sleep(100);
+            assertFalse(pc_two.exists());
+
+            client.delete().forPath(pc_one_znode);
+            Thread.sleep(100);
+            assertFalse(pc_one.exists());
+
+            final String desc_one_znode   = getDescriptorPath("test1.json");
+            final String desc_two_znode   = getDescriptorPath("test2.json");
+            final String desc_three_znode = getDescriptorPath("test3.json");
+            final File desc_one           = new File(descriptorsDir, "test1.json");
+            final File desc_two           = new File(descriptorsDir, "test2.json");
+            final File desc_three         = new File(descriptorsDir, "test3.json");
+
+            client.create().withMode(CreateMode.PERSISTENT).forPath(desc_one_znode, TEST_DESCRIPTOR_1.getBytes());
+            Thread.sleep(100);
+            assertTrue(desc_one.exists());
+            assertEquals(TEST_DESCRIPTOR_1, FileUtils.readFileToString(desc_one));
+
+            client.create().withMode(CreateMode.PERSISTENT).forPath(desc_two_znode, TEST_DESCRIPTOR_1.getBytes());
+            Thread.sleep(100);
+            assertTrue(desc_two.exists());
+            assertEquals(TEST_DESCRIPTOR_1, FileUtils.readFileToString(desc_two));
+
+            client.setData().forPath(desc_two_znode, TEST_DESCRIPTOR_2.getBytes());
+            Thread.sleep(100);
+            assertTrue(desc_two.exists());
+            assertEquals(TEST_DESCRIPTOR_2, FileUtils.readFileToString(desc_two));
+
+            client.create().withMode(CreateMode.PERSISTENT).forPath(desc_three_znode, TEST_DESCRIPTOR_1.getBytes());
+            Thread.sleep(100);
+            assertTrue(desc_three.exists());
+            assertEquals(TEST_DESCRIPTOR_1, FileUtils.readFileToString(desc_three));
+
+            client.delete().forPath(desc_two_znode);
+            Thread.sleep(100);
+            assertFalse("Expected test2.json to have been deleted.", desc_two.exists());
+
+            client.delete().forPath(desc_three_znode);
+            Thread.sleep(100);
+            assertFalse(desc_three.exists());
+
+            client.delete().forPath(desc_one_znode);
+            Thread.sleep(100);
+            assertFalse(desc_one.exists());
+        } finally {
+            cm.stop();
+        }
+    }
+
+    private static String getDescriptorPath(String descriptorName) {
+        return PATH_KNOX_DESCRIPTORS + "/" + descriptorName;
+    }
+
+    private static String getProviderPath(String providerConfigName) {
+        return PATH_KNOX_PROVIDERS + "/" + providerConfigName;
+    }
+
+
+    private static final String TEST_PROVIDERS_CONFIG_1 =
+                    "<gateway>\n" +
+                    "    <provider>\n" +
+                    "        <role>identity-assertion</role>\n" +
+                    "        <name>Default</name>\n" +
+                    "        <enabled>true</enabled>\n" +
+                    "    </provider>\n" +
+                    "    <provider>\n" +
+                    "        <role>hostmap</role>\n" +
+                    "        <name>static</name>\n" +
+                    "        <enabled>true</enabled>\n" +
+                    "        <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" +
+                    "    </provider>\n" +
+                    "</gateway>\n";
+
+    private static final String TEST_PROVIDERS_CONFIG_2 =
+                    "<gateway>\n" +
+                    "    <provider>\n" +
+                    "        <role>authentication</role>\n" +
+                    "        <name>ShiroProvider</name>\n" +
+                    "        <enabled>true</enabled>\n" +
+                    "        <param>\n" +
+                    "            <name>sessionTimeout</name>\n" +
+                    "            <value>30</value>\n" +
+                    "        </param>\n" +
+                    "        <param>\n" +
+                    "            <name>main.ldapRealm</name>\n" +
+                    "            <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>\n" +
+                    "        </param>\n" +
+                    "        <param>\n" +
+                    "            <name>main.ldapContextFactory</name>\n" +
+                    "            <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
+                    "        </param>\n" +
+                    "        <param>\n" +
+                    "            <name>main.ldapRealm.contextFactory</name>\n" +
+                    "            <value>$ldapContextFactory</value>\n" +
+                    "        </param>\n" +
+                    "        <param>\n" +
+                    "            <name>main.ldapRealm.userDnTemplate</name>\n" +
+                    "            <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
+                    "        </param>\n" +
+                    "        <param>\n" +
+                    "            <name>main.ldapRealm.contextFactory.url</name>\n" +
+                    "            <value>ldap://localhost:33389</value>\n" +
+                    "        </param>\n" +
+                    "        <param>\n" +
+                    "            <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
+                    "            <value>simple</value>\n" +
+                    "        </param>\n" +
+                    "        <param>\n" +
+                    "            <name>urls./**</name>\n" +
+                    "            <value>authcBasic</value>\n" +
+                    "        </param>\n" +
+                    "    </provider>\n" +
+                    "</gateway>\n";
+
+    private static final String TEST_DESCRIPTOR_1 =
+                    "{\n" +
+                    "  \"discovery-type\":\"AMBARI\",\n" +
+                    "  \"discovery-address\":\"http://sandbox.hortonworks.com:8080\",\n" +
+                    "  \"discovery-user\":\"maria_dev\",\n" +
+                    "  \"discovery-pwd-alias\":\"sandbox.ambari.discovery.password\",\n" +
+                    "  \"provider-config-ref\":\"sandbox-providers.xml\",\n" +
+                    "  \"cluster\":\"Sandbox\",\n" +
+                    "  \"services\":[\n" +
+                    "    {\"name\":\"NODEUI\"},\n" +
+                    "    {\"name\":\"YARNUI\"},\n" +
+                    "    {\"name\":\"HDFSUI\"},\n" +
+                    "    {\"name\":\"OOZIEUI\"},\n" +
+                    "    {\"name\":\"HBASEUI\"},\n" +
+                    "    {\"name\":\"NAMENODE\"},\n" +
+                    "    {\"name\":\"JOBTRACKER\"},\n" +
+                    "    {\"name\":\"WEBHDFS\"},\n" +
+                    "    {\"name\":\"WEBHCAT\"},\n" +
+                    "    {\"name\":\"OOZIE\"},\n" +
+                    "    {\"name\":\"WEBHBASE\"},\n" +
+                    "    {\"name\":\"RESOURCEMANAGER\"},\n" +
+                    "    {\"name\":\"AMBARI\", \"urls\":[\"http://c6401.ambari.apache.org:8080\"]},\n" +
+                    "    {\"name\":\"AMBARIUI\", \"urls\":[\"http://c6401.ambari.apache.org:8080\"]}\n" +
+                    "  ]\n" +
+                    "}\n";
+
+    private static final String TEST_DESCRIPTOR_2 =
+                    "{\n" +
+                    "  \"discovery-type\":\"AMBARI\",\n" +
+                    "  \"discovery-address\":\"http://sandbox.hortonworks.com:8080\",\n" +
+                    "  \"discovery-user\":\"maria_dev\",\n" +
+                    "  \"discovery-pwd-alias\":\"sandbox.ambari.discovery.password\",\n" +
+                    "  \"provider-config-ref\":\"sandbox-providers.xml\",\n" +
+                    "  \"cluster\":\"Sandbox\",\n" +
+                    "  \"services\":[\n" +
+                    "    {\"name\":\"NAMENODE\"},\n" +
+                    "    {\"name\":\"JOBTRACKER\"},\n" +
+                    "    {\"name\":\"WEBHDFS\"},\n" +
+                    "    {\"name\":\"WEBHCAT\"},\n" +
+                    "    {\"name\":\"OOZIE\"},\n" +
+                    "    {\"name\":\"WEBHBASE\"},\n" +
+                    "    {\"name\":\"RESOURCEMANAGER\"}\n" +
+                    "  ]\n" +
+                    "}\n";
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5af2413c/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 51ff1a4..a55acd6 100644
--- a/pom.xml
+++ b/pom.xml
@@ -83,6 +83,7 @@
         <module>gateway-service-tgs</module>
         <module>gateway-service-rm</module>
         <module>gateway-service-storm</module>
+        <module>gateway-service-remoteconfig</module>
         <module>gateway-service-definitions</module>
         <module>gateway-shell</module>
         <module>gateway-shell-launcher</module>
@@ -629,6 +630,11 @@
             </dependency>
             <dependency>
                 <groupId>${gateway-group}</groupId>
+                <artifactId>gateway-service-remoteconfig</artifactId>
+                <version>${gateway-version}</version>
+            </dependency>
+            <dependency>
+                <groupId>${gateway-group}</groupId>
                 <artifactId>gateway-service-test</artifactId>
                 <version>${gateway-version}</version>
             </dependency>
@@ -1136,12 +1142,17 @@
             <dependency>
                 <groupId>org.apache.zookeeper</groupId>
                 <artifactId>zookeeper</artifactId>
-                <version>3.4.6</version>
+                <version>3.4.10</version>
             </dependency>
             <dependency>
                 <groupId>org.apache.curator</groupId>
                 <artifactId>curator-framework</artifactId>
-                <version>2.6.0</version>
+                <version>4.0.0</version>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.curator</groupId>
+                <artifactId>curator-client</artifactId>
+                <version>4.0.0</version>
             </dependency>
 
             <!-- Html pull parser.  EPLv1 license -->
@@ -1333,11 +1344,10 @@
             <dependency>
                 <groupId>org.apache.curator</groupId>
                 <artifactId>curator-test</artifactId>
-                <version>2.6.0</version>
+                <version>2.12.0</version>
                 <scope>test</scope>
             </dependency>
 
-
         </dependencies>
     </dependencyManagement>
 


[29/49] knox git commit: KNOX-1013 - Monitor Ambari for Cluster Topology changes (Phil Zampino via lmccay)

Posted by mo...@apache.org.
KNOX-1013 - Monitor Ambari for Cluster Topology changes (Phil Zampino via lmccay)

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/a874f399
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/a874f399
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/a874f399

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: a874f399e05835c359ded4ee2c7b822c7baa3231
Parents: 13287d2
Author: Larry McCay <lm...@hortonworks.com>
Authored: Tue Dec 5 15:07:16 2017 -0500
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Tue Dec 5 15:07:32 2017 -0500

----------------------------------------------------------------------
 .../discovery/ambari/AmbariClientCommon.java    | 102 ++++
 .../discovery/ambari/AmbariCluster.java         |   5 +
 ...bariClusterConfigurationMonitorProvider.java |  35 ++
 .../ambari/AmbariConfigurationMonitor.java      | 525 +++++++++++++++++++
 .../ambari/AmbariServiceDiscovery.java          | 228 ++++----
 .../ambari/AmbariServiceDiscoveryMessages.java  |  51 +-
 .../topology/discovery/ambari/RESTInvoker.java  | 136 +++++
 .../ambari/ServiceURLPropertyConfig.java        |   2 +-
 ...iscovery.ClusterConfigurationMonitorProvider |  19 +
 .../ambari/AmbariConfigurationMonitorTest.java  | 319 +++++++++++
 .../ambari/AmbariServiceDiscoveryTest.java      |  28 +-
 gateway-release/home/conf/gateway-site.xml      |  12 +
 .../apache/hadoop/gateway/GatewayMessages.java  |  16 +
 .../gateway/config/impl/GatewayConfigImpl.java  |  15 +
 .../services/DefaultGatewayServices.java        |  10 +
 ...faultClusterConfigurationMonitorService.java |  81 +++
 .../topology/impl/DefaultTopologyService.java   |  57 +-
 .../simple/SimpleDescriptorHandler.java         |   9 +-
 .../hadoop/gateway/config/GatewayConfig.java    |  18 +-
 .../gateway/services/GatewayServices.java       |   2 +
 .../ClusterConfigurationMonitorService.java     |  43 ++
 .../discovery/ClusterConfigurationMonitor.java  |  48 ++
 .../ClusterConfigurationMonitorProvider.java    |  27 +
 .../hadoop/gateway/GatewayTestConfig.java       |  10 +
 24 files changed, 1633 insertions(+), 165 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariClientCommon.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariClientCommon.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariClientCommon.java
new file mode 100644
index 0000000..a2bf4ea
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariClientCommon.java
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import net.minidev.json.JSONArray;
+import net.minidev.json.JSONObject;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+
+import java.util.HashMap;
+import java.util.Map;
+
+class AmbariClientCommon {
+
+    static final String AMBARI_CLUSTERS_URI = "/api/v1/clusters";
+
+    static final String AMBARI_HOSTROLES_URI =
+                                    AMBARI_CLUSTERS_URI + "/%s/services?fields=components/host_components/HostRoles";
+
+    static final String AMBARI_SERVICECONFIGS_URI =
+                                    AMBARI_CLUSTERS_URI + "/%s/configurations/service_config_versions?is_current=true";
+
+    private static final AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+    private RESTInvoker restClient;
+
+
+    AmbariClientCommon(AliasService aliasService) {
+        this(new RESTInvoker(aliasService));
+    }
+
+
+    AmbariClientCommon(RESTInvoker restInvoker) {
+        this.restClient = restInvoker;
+    }
+
+
+
+    Map<String, Map<String, AmbariCluster.ServiceConfiguration>> getActiveServiceConfigurations(String clusterName,
+                                                                                                ServiceDiscoveryConfig config) {
+        return getActiveServiceConfigurations(config.getAddress(),
+                                              clusterName,
+                                              config.getUser(),
+                                              config.getPasswordAlias());
+    }
+
+
+    Map<String, Map<String, AmbariCluster.ServiceConfiguration>> getActiveServiceConfigurations(String discoveryAddress,
+                                                                                                String clusterName,
+                                                                                                String discoveryUser,
+                                                                                                String discoveryPwdAlias) {
+        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigurations = new HashMap<>();
+
+        String serviceConfigsURL = String.format("%s" + AMBARI_SERVICECONFIGS_URI, discoveryAddress, clusterName);
+
+        JSONObject serviceConfigsJSON = restClient.invoke(serviceConfigsURL, discoveryUser, discoveryPwdAlias);
+        if (serviceConfigsJSON != null) {
+            // Process the service configurations
+            JSONArray serviceConfigs = (JSONArray) serviceConfigsJSON.get("items");
+            for (Object serviceConfig : serviceConfigs) {
+                String serviceName = (String) ((JSONObject) serviceConfig).get("service_name");
+                JSONArray configurations = (JSONArray) ((JSONObject) serviceConfig).get("configurations");
+                for (Object configuration : configurations) {
+                    String configType = (String) ((JSONObject) configuration).get("type");
+                    String configVersion = String.valueOf(((JSONObject) configuration).get("version"));
+
+                    Map<String, String> configProps = new HashMap<>();
+                    JSONObject configProperties = (JSONObject) ((JSONObject) configuration).get("properties");
+                    for (String propertyName : configProperties.keySet()) {
+                        configProps.put(propertyName, String.valueOf(((JSONObject) configProperties).get(propertyName)));
+                    }
+                    if (!serviceConfigurations.containsKey(serviceName)) {
+                        serviceConfigurations.put(serviceName, new HashMap<>());
+                    }
+                    serviceConfigurations.get(serviceName).put(configType,
+                                                               new AmbariCluster.ServiceConfiguration(configType,
+                                                                                                      configVersion,
+                                                                                                      configProps));
+                }
+            }
+        }
+
+        return serviceConfigurations;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
index c841d9c..1d308cc 100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
@@ -63,6 +63,11 @@ class AmbariCluster implements ServiceDiscovery.Cluster {
     }
 
 
+    Map<String, Map<String, ServiceConfiguration>> getServiceConfigurations() {
+        return serviceConfigurations;
+    }
+
+
     Map<String, AmbariComponent> getComponents() {
         return components;
     }

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariClusterConfigurationMonitorProvider.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariClusterConfigurationMonitorProvider.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariClusterConfigurationMonitorProvider.java
new file mode 100644
index 0000000..3b31124
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariClusterConfigurationMonitorProvider.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitor;
+import org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitorProvider;
+
+public class AmbariClusterConfigurationMonitorProvider implements ClusterConfigurationMonitorProvider {
+
+    @Override
+    public String getType() {
+        return AmbariConfigurationMonitor.getType();
+    }
+
+    @Override
+    public ClusterConfigurationMonitor newInstance(GatewayConfig config, AliasService aliasService) {
+        return new AmbariConfigurationMonitor(config, aliasService);
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariConfigurationMonitor.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariConfigurationMonitor.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariConfigurationMonitor.java
new file mode 100644
index 0000000..e4b5e43
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariConfigurationMonitor.java
@@ -0,0 +1,525 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitor;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+
+class AmbariConfigurationMonitor implements ClusterConfigurationMonitor {
+
+    private static final String TYPE = "Ambari";
+
+    private static final String CLUSTERS_DATA_DIR_NAME = "clusters";
+
+    private static final String PERSISTED_FILE_COMMENT = "Generated File. Do Not Edit!";
+
+    private static final String PROP_CLUSTER_PREFIX = "cluster.";
+    private static final String PROP_CLUSTER_SOURCE = PROP_CLUSTER_PREFIX + "source";
+    private static final String PROP_CLUSTER_NAME   = PROP_CLUSTER_PREFIX + "name";
+    private static final String PROP_CLUSTER_USER   = PROP_CLUSTER_PREFIX + "user";
+    private static final String PROP_CLUSTER_ALIAS  = PROP_CLUSTER_PREFIX + "pwd.alias";
+
+    static final String INTERVAL_PROPERTY_NAME = "org.apache.hadoop.gateway.topology.discovery.ambari.monitor.interval";
+
+
+    private static final AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+    // Ambari address
+    //    clusterName -> ServiceDiscoveryConfig
+    //
+    Map<String, Map<String, ServiceDiscoveryConfig>> clusterMonitorConfigurations = new HashMap<>();
+
+    // Ambari address
+    //    clusterName
+    //        configType -> version
+    //
+    Map<String, Map<String, Map<String, String>>> ambariClusterConfigVersions = new HashMap<>();
+
+    ReadWriteLock configVersionsLock = new ReentrantReadWriteLock();
+
+    private List<ConfigurationChangeListener> changeListeners = new ArrayList<>();
+
+    private AmbariClientCommon ambariClient;
+
+    PollingConfigAnalyzer internalMonitor;
+
+    GatewayConfig gatewayConfig = null;
+
+    static String getType() {
+        return TYPE;
+    }
+
+    AmbariConfigurationMonitor(GatewayConfig config, AliasService aliasService) {
+        this.gatewayConfig   = config;
+        this.ambariClient    = new AmbariClientCommon(aliasService);
+        this.internalMonitor = new PollingConfigAnalyzer(this);
+
+        // Override the default polling interval if it has been configured
+        int interval = config.getClusterMonitorPollingInterval(getType());
+        if (interval > 0) {
+            setPollingInterval(interval);
+        }
+
+        init();
+    }
+
+    @Override
+    public void setPollingInterval(int interval) {
+        internalMonitor.setInterval(interval);
+    }
+
+    private void init() {
+        loadDiscoveryConfiguration();
+        loadClusterVersionData();
+    }
+
+    /**
+     * Load any previously-persisted service discovery configurations.
+     * This is necessary for checking previously-deployed topologies.
+     */
+    private void loadDiscoveryConfiguration() {
+        File persistenceDir = getPersistenceDir();
+        if (persistenceDir != null) {
+            Collection<File> persistedConfigs = FileUtils.listFiles(persistenceDir, new String[]{"conf"}, false);
+            for (File persisted : persistedConfigs) {
+                Properties props = new Properties();
+                try {
+                    props.load(new FileInputStream(persisted));
+
+                    addDiscoveryConfig(props.getProperty(PROP_CLUSTER_NAME), new ServiceDiscoveryConfig() {
+                                                            public String getAddress() {
+                                                                return props.getProperty(PROP_CLUSTER_SOURCE);
+                                                            }
+
+                                                            public String getUser() {
+                                                                return props.getProperty(PROP_CLUSTER_USER);
+                                                            }
+
+                                                            public String getPasswordAlias() {
+                                                                return props.getProperty(PROP_CLUSTER_ALIAS);
+                                                            }
+                                                        });
+                } catch (IOException e) {
+                    log.failedToLoadClusterMonitorServiceDiscoveryConfig(getType(), e);
+                }
+            }
+        }
+    }
+
+    /**
+     * Load any previously-persisted cluster configuration version records, so the monitor will check
+     * previously-deployed topologies against the current cluster configuration.
+     */
+    private void loadClusterVersionData() {
+        File persistenceDir = getPersistenceDir();
+        if (persistenceDir != null) {
+            Collection<File> persistedConfigs = FileUtils.listFiles(getPersistenceDir(), new String[]{"ver"}, false);
+            for (File persisted : persistedConfigs) {
+                Properties props = new Properties();
+                try {
+                    props.load(new FileInputStream(persisted));
+
+                    String source = props.getProperty(PROP_CLUSTER_SOURCE);
+                    String clusterName = props.getProperty(PROP_CLUSTER_NAME);
+
+                    Map<String, String> configVersions = new HashMap<>();
+                    for (String name : props.stringPropertyNames()) {
+                        if (!name.startsWith(PROP_CLUSTER_PREFIX)) { // Ignore implementation-specific properties
+                            configVersions.put(name, props.getProperty(name));
+                        }
+                    }
+
+                    // Map the config versions to the cluster name
+                    addClusterConfigVersions(source, clusterName, configVersions);
+
+                } catch (IOException e) {
+                    log.failedToLoadClusterMonitorConfigVersions(getType(), e);
+                }
+            }
+        }
+    }
+
+    private void persistDiscoveryConfiguration(String clusterName, ServiceDiscoveryConfig sdc) {
+        File persistenceDir = getPersistenceDir();
+        if (persistenceDir != null) {
+
+            Properties props = new Properties();
+            props.setProperty(PROP_CLUSTER_NAME, clusterName);
+            props.setProperty(PROP_CLUSTER_SOURCE, sdc.getAddress());
+
+            String username = sdc.getUser();
+            if (username != null) {
+                props.setProperty(PROP_CLUSTER_USER, username);
+            }
+            String pwdAlias = sdc.getPasswordAlias();
+            if (pwdAlias != null) {
+                props.setProperty(PROP_CLUSTER_ALIAS, pwdAlias);
+            }
+
+            persist(props, getDiscoveryConfigPersistenceFile(sdc.getAddress(), clusterName));
+        }
+    }
+
+    private void persistClusterVersionData(String address, String clusterName, Map<String, String> configVersions) {
+        File persistenceDir = getPersistenceDir();
+        if (persistenceDir != null) {
+            Properties props = new Properties();
+            props.setProperty(PROP_CLUSTER_NAME, clusterName);
+            props.setProperty(PROP_CLUSTER_SOURCE, address);
+            for (String name : configVersions.keySet()) {
+                props.setProperty(name, configVersions.get(name));
+            }
+
+            persist(props, getConfigVersionsPersistenceFile(address, clusterName));
+        }
+    }
+
+    private void persist(Properties props, File dest) {
+        try {
+            props.store(new FileOutputStream(dest), PERSISTED_FILE_COMMENT);
+        } catch (Exception e) {
+            log.failedToPersistClusterMonitorData(getType(), dest.getAbsolutePath(), e);
+        }
+    }
+
+    private File getPersistenceDir() {
+        File persistenceDir = null;
+
+        File dataDir = new File(gatewayConfig.getGatewayDataDir());
+        if (dataDir.exists()) {
+            File clustersDir = new File(dataDir, CLUSTERS_DATA_DIR_NAME);
+            if (!clustersDir.exists()) {
+                clustersDir.mkdirs();
+            }
+            persistenceDir = clustersDir;
+        }
+
+        return persistenceDir;
+    }
+
+    private File getDiscoveryConfigPersistenceFile(String address, String clusterName) {
+        return getPersistenceFile(address, clusterName, "conf");
+    }
+
+    private File getConfigVersionsPersistenceFile(String address, String clusterName) {
+        return getPersistenceFile(address, clusterName, "ver");
+    }
+
+    private File getPersistenceFile(String address, String clusterName, String ext) {
+        String fileName = address.replace(":", "_").replace("/", "_") + "-" + clusterName + "." + ext;
+        return new File(getPersistenceDir(), fileName);
+    }
+
+    /**
+     * Add cluster configuration details to the monitor's in-memory record.
+     *
+     * @param address        An Ambari instance address.
+     * @param clusterName    The name of a cluster associated with the Ambari instance.
+     * @param configVersions A Map of configuration types and their corresponding versions.
+     */
+    private void addClusterConfigVersions(String address, String clusterName, Map<String, String> configVersions) {
+        configVersionsLock.writeLock().lock();
+        try {
+            ambariClusterConfigVersions.computeIfAbsent(address, k -> new HashMap<>())
+                                       .put(clusterName, configVersions);
+        } finally {
+            configVersionsLock.writeLock().unlock();
+        }
+    }
+
+    public void start() {
+        (new Thread(internalMonitor, "AmbariConfigurationMonitor")).start();
+    }
+
+    public void stop() {
+        internalMonitor.stop();
+    }
+
+    @Override
+    public void addListener(ConfigurationChangeListener listener) {
+        changeListeners.add(listener);
+    }
+
+    /**
+     * Add discovery configuration details for the specified cluster, so the monitor knows how to connect to check for
+     * changes.
+     *
+     * @param clusterName The name of the cluster.
+     * @param config      The associated service discovery configuration.
+     */
+    void addDiscoveryConfig(String clusterName, ServiceDiscoveryConfig config) {
+        clusterMonitorConfigurations.computeIfAbsent(config.getAddress(), k -> new HashMap<>()).put(clusterName, config);
+    }
+
+
+    /**
+     * Get the service discovery configuration associated with the specified Ambari instance and cluster.
+     *
+     * @param address     An Ambari instance address.
+     * @param clusterName The name of a cluster associated with the Ambari instance.
+     *
+     * @return The associated ServiceDiscoveryConfig object.
+     */
+    ServiceDiscoveryConfig getDiscoveryConfig(String address, String clusterName) {
+        ServiceDiscoveryConfig config = null;
+        if (clusterMonitorConfigurations.containsKey(address)) {
+            config = clusterMonitorConfigurations.get(address).get(clusterName);
+        }
+        return config;
+    }
+
+
+    /**
+     * Add cluster configuration data to the monitor, which it will use when determining if configuration has changed.
+     *
+     * @param cluster         An AmbariCluster object.
+     * @param discoveryConfig The discovery configuration associated with the cluster.
+     */
+    void addClusterConfigVersions(AmbariCluster cluster, ServiceDiscoveryConfig discoveryConfig) {
+
+        String clusterName = cluster.getName();
+
+        // Register the cluster discovery configuration for the monitor connections
+        persistDiscoveryConfiguration(clusterName, discoveryConfig);
+        addDiscoveryConfig(clusterName, discoveryConfig);
+
+        // Build the set of configuration versions
+        Map<String, String> configVersions = new HashMap<>();
+        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigs = cluster.getServiceConfigurations();
+        for (String serviceName : serviceConfigs.keySet()) {
+            Map<String, AmbariCluster.ServiceConfiguration> configTypeVersionMap = serviceConfigs.get(serviceName);
+            for (AmbariCluster.ServiceConfiguration config : configTypeVersionMap.values()) {
+                String configType = config.getType();
+                String version = config.getVersion();
+                configVersions.put(configType, version);
+            }
+        }
+
+        persistClusterVersionData(discoveryConfig.getAddress(), clusterName, configVersions);
+        addClusterConfigVersions(discoveryConfig.getAddress(), clusterName, configVersions);
+    }
+
+
+    /**
+     * Remove the configuration record for the specified Ambari instance and cluster name.
+     *
+     * @param address     An Ambari instance address.
+     * @param clusterName The name of a cluster associated with the Ambari instance.
+     *
+     * @return The removed data; A Map of configuration types and their corresponding versions.
+     */
+    Map<String, String> removeClusterConfigVersions(String address, String clusterName) {
+        Map<String, String> result = new HashMap<>();
+
+        configVersionsLock.writeLock().lock();
+        try {
+            if (ambariClusterConfigVersions.containsKey(address)) {
+                result.putAll(ambariClusterConfigVersions.get(address).remove(clusterName));
+            }
+        } finally {
+            configVersionsLock.writeLock().unlock();
+        }
+
+        // Delete the associated persisted record
+        File persisted = getConfigVersionsPersistenceFile(address, clusterName);
+        if (persisted.exists()) {
+            persisted.delete();
+        }
+
+        return result;
+    }
+
+    /**
+     * Get the cluster configuration details for the specified cluster and Ambari instance.
+     *
+     * @param address     An Ambari instance address.
+     * @param clusterName The name of a cluster associated with the Ambari instance.
+     *
+     * @return A Map of configuration types and their corresponding versions.
+     */
+    Map<String, String> getClusterConfigVersions(String address, String clusterName) {
+        Map<String, String> result = new HashMap<>();
+
+        configVersionsLock.readLock().lock();
+        try {
+            if (ambariClusterConfigVersions.containsKey(address)) {
+                result.putAll(ambariClusterConfigVersions.get(address).get(clusterName));
+            }
+        } finally {
+            configVersionsLock.readLock().unlock();
+        }
+
+        return result;
+    }
+
+
+    /**
+     * Get all the clusters the monitor knows about.
+     *
+     * @return A Map of Ambari instance addresses to associated cluster names.
+     */
+    Map<String, List<String>> getClusterNames() {
+        Map<String, List<String>> result = new HashMap<>();
+
+        configVersionsLock.readLock().lock();
+        try {
+            for (String address : ambariClusterConfigVersions.keySet()) {
+                List<String> clusterNames = new ArrayList<>();
+                clusterNames.addAll(ambariClusterConfigVersions.get(address).keySet());
+                result.put(address, clusterNames);
+            }
+        } finally {
+            configVersionsLock.readLock().unlock();
+        }
+
+        return result;
+
+    }
+
+
+    /**
+     * Notify registered change listeners.
+     *
+     * @param source      The address of the Ambari instance from which the cluster details were determined.
+     * @param clusterName The name of the cluster whose configuration details have changed.
+     */
+    void notifyChangeListeners(String source, String clusterName) {
+        for (ConfigurationChangeListener listener : changeListeners) {
+            listener.onConfigurationChange(source, clusterName);
+        }
+    }
+
+
+    /**
+     * Request the current active configuration version info from Ambari.
+     *
+     * @param address     The Ambari instance address.
+     * @param clusterName The name of the cluster for which the details are desired.
+     *
+     * @return A Map of service configuration types and their corresponding versions.
+     */
+    Map<String, String> getUpdatedConfigVersions(String address, String clusterName) {
+        Map<String, String> configVersions = new HashMap<>();
+
+        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigs =
+                    ambariClient.getActiveServiceConfigurations(clusterName, getDiscoveryConfig(address, clusterName));
+
+        for (Map<String, AmbariCluster.ServiceConfiguration> serviceConfig : serviceConfigs.values()) {
+            for (AmbariCluster.ServiceConfiguration config : serviceConfig.values()) {
+                configVersions.put(config.getType(), config.getVersion());
+            }
+        }
+
+        return configVersions;
+    }
+
+
+    /**
+     * The thread that polls Ambari for configuration details for clusters associated with discovered topologies,
+     * compares them with the current recorded values, and notifies any listeners when differences are discovered.
+     */
+    static final class PollingConfigAnalyzer implements Runnable {
+
+        private static final int DEFAULT_POLLING_INTERVAL = 60;
+
+        // Polling interval in seconds
+        private int interval = DEFAULT_POLLING_INTERVAL;
+
+        private AmbariConfigurationMonitor delegate;
+
+        private boolean isActive = false;
+
+        PollingConfigAnalyzer(AmbariConfigurationMonitor delegate) {
+            this.delegate = delegate;
+            this.interval = Integer.getInteger(INTERVAL_PROPERTY_NAME, PollingConfigAnalyzer.DEFAULT_POLLING_INTERVAL);
+        }
+
+        void setInterval(int interval) {
+            this.interval = interval;
+        }
+
+
+        void stop() {
+            isActive = false;
+        }
+
+        @Override
+        public void run() {
+            isActive = true;
+
+            log.startedAmbariConfigMonitor(interval);
+
+            while (isActive) {
+                for (Map.Entry<String, List<String>> entry : delegate.getClusterNames().entrySet()) {
+                    String address = entry.getKey();
+                    for (String clusterName : entry.getValue()) {
+                        Map<String, String> configVersions = delegate.getClusterConfigVersions(address, clusterName);
+                        if (configVersions != null && !configVersions.isEmpty()) {
+                            Map<String, String> updatedVersions = delegate.getUpdatedConfigVersions(address, clusterName);
+                            if (updatedVersions != null && !updatedVersions.isEmpty()) {
+                                boolean configHasChanged = false;
+
+                                // If the config sets don't match in size, then something has changed
+                                if (updatedVersions.size() != configVersions.size()) {
+                                    configHasChanged = true;
+                                } else {
+                                    // Perform the comparison of all the config versions
+                                    for (Map.Entry<String, String> configVersion : configVersions.entrySet()) {
+                                        if (!updatedVersions.get(configVersion.getKey()).equals(configVersion.getValue())) {
+                                            configHasChanged = true;
+                                            break;
+                                        }
+                                    }
+                                }
+
+                                // If a change has occurred, notify the listeners
+                                if (configHasChanged) {
+                                    delegate.notifyChangeListeners(address, clusterName);
+                                }
+                            }
+                        }
+                    }
+                }
+
+                try {
+                    Thread.sleep(interval * 1000);
+                } catch (InterruptedException e) {
+                    // Ignore
+                }
+            }
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
index b7f9f53..765a928 100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
@@ -16,7 +16,7 @@
  */
 package org.apache.hadoop.gateway.topology.discovery.ambari;
 
-import java.io.IOException;
+import java.lang.reflect.Method;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -25,38 +25,32 @@ import java.util.Properties;
 
 import net.minidev.json.JSONArray;
 import net.minidev.json.JSONObject;
-import net.minidev.json.JSONValue;
-import org.apache.hadoop.gateway.config.ConfigurationException;
 import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.hadoop.gateway.services.security.AliasServiceException;
+import org.apache.hadoop.gateway.topology.ClusterConfigurationMonitorService;
+import org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitor;
 import org.apache.hadoop.gateway.topology.discovery.GatewayService;
 import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
 import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
-import org.apache.http.HttpEntity;
-import org.apache.http.HttpStatus;
-import org.apache.http.client.methods.CloseableHttpResponse;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.message.BasicHeader;
-import org.apache.http.util.EntityUtils;
 
 
 class AmbariServiceDiscovery implements ServiceDiscovery {
 
     static final String TYPE = "AMBARI";
 
-    static final String AMBARI_CLUSTERS_URI = "/api/v1/clusters";
+    static final String AMBARI_CLUSTERS_URI = AmbariClientCommon.AMBARI_CLUSTERS_URI;
 
-    static final String AMBARI_HOSTROLES_URI =
-                                       AMBARI_CLUSTERS_URI + "/%s/services?fields=components/host_components/HostRoles";
+    static final String AMBARI_HOSTROLES_URI = AmbariClientCommon.AMBARI_HOSTROLES_URI;
 
-    static final String AMBARI_SERVICECONFIGS_URI =
-            AMBARI_CLUSTERS_URI + "/%s/configurations/service_config_versions?is_current=true";
+    static final String AMBARI_SERVICECONFIGS_URI = AmbariClientCommon.AMBARI_SERVICECONFIGS_URI;
 
     private static final String COMPONENT_CONFIG_MAPPING_FILE =
                                                         "ambari-service-discovery-component-config-mapping.properties";
 
+    private static final String GATEWAY_SERVICES_ACCESSOR_CLASS  = "org.apache.hadoop.gateway.GatewayServer";
+    private static final String GATEWAY_SERVICES_ACCESSOR_METHOD = "getGatewayServices";
+
     private static final AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
 
     // Map of component names to service configuration types
@@ -69,21 +63,76 @@ class AmbariServiceDiscovery implements ServiceDiscovery {
                 componentServiceConfigs.put(componentName, configMapping.getProperty(componentName));
             }
         } catch (Exception e) {
-            log.failedToLoadServiceDiscoveryConfiguration(COMPONENT_CONFIG_MAPPING_FILE, e);
+            log.failedToLoadServiceDiscoveryURLDefConfiguration(COMPONENT_CONFIG_MAPPING_FILE, e);
         }
     }
 
-    private static final String DEFAULT_USER_ALIAS = "ambari.discovery.user";
-    private static final String DEFAULT_PWD_ALIAS  = "ambari.discovery.password";
-
     @GatewayService
     private AliasService aliasService;
 
-    private CloseableHttpClient httpClient = null;
+    private RESTInvoker restClient;
+    private AmbariClientCommon ambariClient;
 
+    // This is used to update the monitor when new cluster configuration details are discovered.
+    private AmbariConfigurationMonitor configChangeMonitor;
+
+    private boolean isInitialized = false;
 
     AmbariServiceDiscovery() {
-        httpClient = org.apache.http.impl.client.HttpClients.createDefault();
+    }
+
+
+    AmbariServiceDiscovery(RESTInvoker restClient) {
+        this.restClient = restClient;
+    }
+
+
+    /**
+     * Initialization must be subsequent to construction because the AliasService member isn't assigned until after
+     * construction time. This is called internally prior to discovery invocations to make sure the clients have been
+     * initialized.
+     */
+    private void init() {
+        if (!isInitialized) {
+            if (this.restClient == null) {
+                this.restClient = new RESTInvoker(aliasService);
+            }
+            this.ambariClient = new AmbariClientCommon(restClient);
+            this.configChangeMonitor = getConfigurationChangeMonitor();
+
+            isInitialized = true;
+        }
+    }
+
+
+    /**
+     * Get the Ambari configuration change monitor from the associated gateway service.
+     */
+    private AmbariConfigurationMonitor getConfigurationChangeMonitor() {
+        AmbariConfigurationMonitor ambariMonitor = null;
+        try {
+            Class clazz = Class.forName(GATEWAY_SERVICES_ACCESSOR_CLASS);
+            if (clazz != null) {
+                Method m = clazz.getDeclaredMethod(GATEWAY_SERVICES_ACCESSOR_METHOD);
+                if (m != null) {
+                    Object obj = m.invoke(null);
+                    if (GatewayServices.class.isAssignableFrom(obj.getClass())) {
+                        ClusterConfigurationMonitorService clusterMonitorService =
+                              ((GatewayServices) obj).getService(GatewayServices.CLUSTER_CONFIGURATION_MONITOR_SERVICE);
+                        ClusterConfigurationMonitor monitor =
+                                                 clusterMonitorService.getMonitor(AmbariConfigurationMonitor.getType());
+                        if (monitor != null) {
+                            if (AmbariConfigurationMonitor.class.isAssignableFrom(monitor.getClass())) {
+                                ambariMonitor = (AmbariConfigurationMonitor) monitor;
+                            }
+                        }
+                    }
+                }
+            }
+        } catch (Exception e) {
+            log.errorAccessingConfigurationChangeMonitor(e);
+        }
+        return ambariMonitor;
     }
 
 
@@ -95,14 +144,16 @@ class AmbariServiceDiscovery implements ServiceDiscovery {
 
     @Override
     public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
-        Map<String, Cluster> clusters = new HashMap<String, Cluster>();
+        Map<String, Cluster> clusters = new HashMap<>();
+
+        init();
 
         String discoveryAddress = config.getAddress();
 
         // Invoke Ambari REST API to discover the available clusters
         String clustersDiscoveryURL = String.format("%s" + AMBARI_CLUSTERS_URI, discoveryAddress);
 
-        JSONObject json = invokeREST(clustersDiscoveryURL, config.getUser(), config.getPasswordAlias());
+        JSONObject json = restClient.invoke(clustersDiscoveryURL, config.getUser(), config.getPasswordAlias());
 
         // Parse the cluster names from the response, and perform the cluster discovery
         JSONArray clusterItems = (JSONArray) json.get("items");
@@ -126,13 +177,15 @@ class AmbariServiceDiscovery implements ServiceDiscovery {
 
         Map<String, String> serviceComponents = new HashMap<>();
 
+        init();
+
         String discoveryAddress = config.getAddress();
         String discoveryUser = config.getUser();
         String discoveryPwdAlias = config.getPasswordAlias();
 
         Map<String, List<String>> componentHostNames = new HashMap<>();
         String hostRolesURL = String.format("%s" + AMBARI_HOSTROLES_URI, discoveryAddress, clusterName);
-        JSONObject hostRolesJSON = invokeREST(hostRolesURL, discoveryUser, discoveryPwdAlias);
+        JSONObject hostRolesJSON = restClient.invoke(hostRolesURL, discoveryUser, discoveryPwdAlias);
         if (hostRolesJSON != null) {
             // Process the host roles JSON
             JSONArray items = (JSONArray) hostRolesJSON.get("items");
@@ -158,7 +211,7 @@ class AmbariServiceDiscovery implements ServiceDiscovery {
                         if (hostName != null) {
                             log.discoveredServiceHost(serviceName, hostName);
                             if (!componentHostNames.containsKey(componentName)) {
-                                componentHostNames.put(componentName, new ArrayList<String>());
+                                componentHostNames.put(componentName, new ArrayList<>());
                             }
                             componentHostNames.get(componentName).add(hostName);
                         }
@@ -167,31 +220,15 @@ class AmbariServiceDiscovery implements ServiceDiscovery {
             }
         }
 
+        // Service configurations
         Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigurations =
-                                                 new HashMap<String, Map<String, AmbariCluster.ServiceConfiguration>>();
-        String serviceConfigsURL = String.format("%s" + AMBARI_SERVICECONFIGS_URI, discoveryAddress, clusterName);
-        JSONObject serviceConfigsJSON = invokeREST(serviceConfigsURL, discoveryUser, discoveryPwdAlias);
-        if (serviceConfigsJSON != null) {
-            // Process the service configurations
-            JSONArray serviceConfigs = (JSONArray) serviceConfigsJSON.get("items");
-            for (Object serviceConfig : serviceConfigs) {
-                String serviceName = (String) ((JSONObject) serviceConfig).get("service_name");
-                JSONArray configurations = (JSONArray) ((JSONObject) serviceConfig).get("configurations");
-                for (Object configuration : configurations) {
-                    String configType = (String) ((JSONObject) configuration).get("type");
-                    String configVersion = String.valueOf(((JSONObject) configuration).get("version"));
-
-                    Map<String, String> configProps = new HashMap<String, String>();
-                    JSONObject configProperties = (JSONObject) ((JSONObject) configuration).get("properties");
-                    for (String propertyName : configProperties.keySet()) {
-                        configProps.put(propertyName, String.valueOf(((JSONObject) configProperties).get(propertyName)));
-                    }
-                    if (!serviceConfigurations.containsKey(serviceName)) {
-                        serviceConfigurations.put(serviceName, new HashMap<String, AmbariCluster.ServiceConfiguration>());
-                    }
-                    serviceConfigurations.get(serviceName).put(configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
-                    cluster.addServiceConfiguration(serviceName, configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
-                }
+                                                        ambariClient.getActiveServiceConfigurations(discoveryAddress,
+                                                                                                    clusterName,
+                                                                                                    discoveryUser,
+                                                                                                    discoveryPwdAlias);
+        for (String serviceName : serviceConfigurations.keySet()) {
+            for (Map.Entry<String, AmbariCluster.ServiceConfiguration> serviceConfig : serviceConfigurations.get(serviceName).entrySet()) {
+                cluster.addServiceConfiguration(serviceName, serviceConfig.getKey(), serviceConfig.getValue());
             }
         }
 
@@ -214,93 +251,12 @@ class AmbariServiceDiscovery implements ServiceDiscovery {
             }
         }
 
-        return cluster;
-    }
-
-
-    protected JSONObject invokeREST(String url, String username, String passwordAlias) {
-        JSONObject result = null;
-
-        CloseableHttpResponse response = null;
-        try {
-            HttpGet request = new HttpGet(url);
-
-            // If no configured username, then use default username alias
-            String password = null;
-            if (username == null) {
-                if (aliasService != null) {
-                    try {
-                        char[] defaultUser = aliasService.getPasswordFromAliasForGateway(DEFAULT_USER_ALIAS);
-                        if (defaultUser != null) {
-                            username = new String(defaultUser);
-                        }
-                    } catch (AliasServiceException e) {
-                        log.aliasServiceUserError(DEFAULT_USER_ALIAS, e.getLocalizedMessage());
-                    }
-                }
-
-                // If username is still null
-                if (username == null) {
-                    log.aliasServiceUserNotFound();
-                    throw new ConfigurationException("No username is configured for Ambari service discovery.");
-                }
-            }
-
-            if (aliasService != null) {
-                // If no password alias is configured, then try the default alias
-                if (passwordAlias == null) {
-                    passwordAlias = DEFAULT_PWD_ALIAS;
-                }
-
-                try {
-                    char[] pwd = aliasService.getPasswordFromAliasForGateway(passwordAlias);
-                    if (pwd != null) {
-                        password = new String(pwd);
-                    }
-
-                } catch (AliasServiceException e) {
-                    log.aliasServicePasswordError(passwordAlias, e.getLocalizedMessage());
-                }
-            }
-
-            // If the password could not be determined
-            if (password == null) {
-                log.aliasServicePasswordNotFound();
-                throw new ConfigurationException("No password is configured for Ambari service discovery.");
-            }
-
-            // Add an auth header if credentials are available
-            String encodedCreds =
-                    org.apache.commons.codec.binary.Base64.encodeBase64String((username + ":" + password).getBytes());
-            request.addHeader(new BasicHeader("Authorization", "Basic " + encodedCreds));
-
-            response = httpClient.execute(request);
-
-            if (HttpStatus.SC_OK == response.getStatusLine().getStatusCode()) {
-                HttpEntity entity = response.getEntity();
-                if (entity != null) {
-                    result = (JSONObject) JSONValue.parse((EntityUtils.toString(entity)));
-                    log.debugJSON(result.toJSONString());
-                } else {
-                    log.noJSON(url);
-                }
-            } else {
-                log.unexpectedRestResponseStatusCode(url, response.getStatusLine().getStatusCode());
-            }
-
-        } catch (IOException e) {
-            log.restInvocationError(url, e);
-        } finally {
-            if(response != null) {
-                try {
-                    response.close();
-                } catch (IOException e) {
-                    // Ignore
-                }
-            }
+        if (configChangeMonitor != null) {
+            // Notify the cluster config monitor about these cluster configuration details
+            configChangeMonitor.addClusterConfigVersions(cluster, config);
         }
-        return result;
-    }
 
+        return cluster;
+    }
 
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
index 0661224..51bbe0e 100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
@@ -25,24 +25,44 @@ import org.apache.hadoop.gateway.i18n.messages.StackTrace;
 public interface AmbariServiceDiscoveryMessages {
 
     @Message(level = MessageLevel.ERROR,
-            text = "Failed to load service discovery configuration: {1}")
-    void failedToLoadServiceDiscoveryConfiguration(@StackTrace(level = MessageLevel.ERROR) Exception e);
+             text = "Failed to persist data for cluster configuration monitor {0} {1}: {2}")
+    void failedToPersistClusterMonitorData(final String monitor,
+                                           final String filename,
+                                           @StackTrace(level = MessageLevel.DEBUG) Exception e);
 
     @Message(level = MessageLevel.ERROR,
-             text = "Failed to load service discovery configuration {0}: {1}")
-    void failedToLoadServiceDiscoveryConfiguration(final String configuration,
-                               @StackTrace(level = MessageLevel.ERROR) Exception e);
+             text = "Failed to load persisted service discovery configuration for cluster monitor {0} : {1}")
+    void failedToLoadClusterMonitorServiceDiscoveryConfig(final String monitor,
+                                                          @StackTrace(level = MessageLevel.DEBUG) Exception e);
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Failed to load persisted cluster configuration version data for cluster monitor {0} : {1}")
+    void failedToLoadClusterMonitorConfigVersions(final String monitor,
+                                                  @StackTrace(level = MessageLevel.DEBUG) Exception e);
+
+    @Message(level = MessageLevel.ERROR,
+             text = "Unable to access the Ambari Configuration Change Monitor: {0}")
+    void errorAccessingConfigurationChangeMonitor(@StackTrace(level = MessageLevel.DEBUG) Exception e);
+
+    @Message(level = MessageLevel.ERROR,
+             text = "Failed to load service discovery URL definition configuration: {1}")
+    void failedToLoadServiceDiscoveryURLDefConfiguration(@StackTrace(level = MessageLevel.DEBUG) Exception e);
+
+    @Message(level = MessageLevel.ERROR,
+             text = "Failed to load service discovery URL definition configuration {0}: {1}")
+    void failedToLoadServiceDiscoveryURLDefConfiguration(final String configuration,
+                                                         @StackTrace(level = MessageLevel.ERROR) Exception e);
 
     @Message(level = MessageLevel.ERROR,
              text = "Encountered an error during cluster {0} discovery: {1}")
     void clusterDiscoveryError(final String clusterName,
-                               @StackTrace(level = MessageLevel.ERROR) Exception e);
+                               @StackTrace(level = MessageLevel.DEBUG) Exception e);
 
 
     @Message(level = MessageLevel.DEBUG,
              text = "REST invocation {0} failed: {1}")
     void restInvocationError(final String url,
-                             @StackTrace(level = MessageLevel.ERROR) Exception e);
+                             @StackTrace(level = MessageLevel.DEBUG) Exception e);
 
 
     @Message(level = MessageLevel.ERROR,
@@ -75,20 +95,23 @@ public interface AmbariServiceDiscoveryMessages {
     void noJSON(final String url);
 
 
-    @Message(level = MessageLevel.DEBUG,
+    @Message(level = MessageLevel.TRACE,
              text = "REST invocation result: {0}")
     void debugJSON(final String json);
 
+
     @Message(level = MessageLevel.DEBUG,
-            text = "Loaded component configuration mappings: {0}")
+             text = "Loaded component configuration mappings: {0}")
     void loadedComponentConfigMappings(final String mappings);
 
+
     @Message(level = MessageLevel.ERROR,
              text = "Failed to load component configuration property mappings {0}: {1}")
     void failedToLoadComponentConfigMappings(final String mappings,
-                                             @StackTrace(level = MessageLevel.ERROR) Exception e);
+                                             @StackTrace(level = MessageLevel.DEBUG) Exception e);
 
-    @Message(level = MessageLevel.DEBUG,
+
+    @Message(level = MessageLevel.TRACE,
              text = "Discovered: Service: {0}, Host: {1}")
     void discoveredServiceHost(final String serviceName, final String hostName);
 
@@ -114,8 +137,12 @@ public interface AmbariServiceDiscoveryMessages {
 
 
     @Message(level = MessageLevel.DEBUG,
-            text = "Determined the service URL mapping property {0} value: {1}")
+             text = "Determined the service URL mapping property {0} value: {1}")
     void determinedPropertyValue(final String propertyName, final String propertyValue);
 
 
+    @Message(level = MessageLevel.INFO,
+             text = "Started Ambari cluster configuration monitor (checking every {0} seconds)")
+    void startedAmbariConfigMonitor(final long pollingInterval);
+
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/RESTInvoker.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/RESTInvoker.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/RESTInvoker.java
new file mode 100644
index 0000000..6a6fad8
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/RESTInvoker.java
@@ -0,0 +1,136 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import net.minidev.json.JSONObject;
+import net.minidev.json.JSONValue;
+import org.apache.hadoop.gateway.config.ConfigurationException;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.services.security.AliasServiceException;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpStatus;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.message.BasicHeader;
+import org.apache.http.util.EntityUtils;
+
+import java.io.IOException;
+
+class RESTInvoker {
+
+    private static final String DEFAULT_USER_ALIAS = "ambari.discovery.user";
+    private static final String DEFAULT_PWD_ALIAS  = "ambari.discovery.password";
+
+    private static final AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+    private AliasService aliasService = null;
+
+    private CloseableHttpClient httpClient = org.apache.http.impl.client.HttpClients.createDefault();
+
+
+    RESTInvoker(AliasService aliasService) {
+        this.aliasService = aliasService;
+    }
+
+
+    JSONObject invoke(String url, String username, String passwordAlias) {
+        JSONObject result = null;
+
+        CloseableHttpResponse response = null;
+        try {
+            HttpGet request = new HttpGet(url);
+
+            // If no configured username, then use default username alias
+            String password = null;
+            if (username == null) {
+                if (aliasService != null) {
+                    try {
+                        char[] defaultUser = aliasService.getPasswordFromAliasForGateway(DEFAULT_USER_ALIAS);
+                        if (defaultUser != null) {
+                            username = new String(defaultUser);
+                        }
+                    } catch (AliasServiceException e) {
+                        log.aliasServiceUserError(DEFAULT_USER_ALIAS, e.getLocalizedMessage());
+                    }
+                }
+
+                // If username is still null
+                if (username == null) {
+                    log.aliasServiceUserNotFound();
+                    throw new ConfigurationException("No username is configured for Ambari service discovery.");
+                }
+            }
+
+            if (aliasService != null) {
+                // If no password alias is configured, then try the default alias
+                if (passwordAlias == null) {
+                    passwordAlias = DEFAULT_PWD_ALIAS;
+                }
+
+                try {
+                    char[] pwd = aliasService.getPasswordFromAliasForGateway(passwordAlias);
+                    if (pwd != null) {
+                        password = new String(pwd);
+                    }
+
+                } catch (AliasServiceException e) {
+                    log.aliasServicePasswordError(passwordAlias, e.getLocalizedMessage());
+                }
+            }
+
+            // If the password could not be determined
+            if (password == null) {
+                log.aliasServicePasswordNotFound();
+                throw new ConfigurationException("No password is configured for Ambari service discovery.");
+            }
+
+            // Add an auth header if credentials are available
+            String encodedCreds =
+                    org.apache.commons.codec.binary.Base64.encodeBase64String((username + ":" + password).getBytes());
+            request.addHeader(new BasicHeader("Authorization", "Basic " + encodedCreds));
+
+            response = httpClient.execute(request);
+
+            if (HttpStatus.SC_OK == response.getStatusLine().getStatusCode()) {
+                HttpEntity entity = response.getEntity();
+                if (entity != null) {
+                    result = (JSONObject) JSONValue.parse((EntityUtils.toString(entity)));
+                    log.debugJSON(result.toJSONString());
+                } else {
+                    log.noJSON(url);
+                }
+            } else {
+                log.unexpectedRestResponseStatusCode(url, response.getStatusLine().getStatusCode());
+            }
+
+        } catch (IOException e) {
+            log.restInvocationError(url, e);
+        } finally {
+            if(response != null) {
+                try {
+                    response.close();
+                } catch (IOException e) {
+                    // Ignore
+                }
+            }
+        }
+        return result;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java
index 3330cc3..deb5bb3 100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java
@@ -110,7 +110,7 @@ class ServiceURLPropertyConfig {
                 }
             }
         } catch (Exception e) {
-            log.failedToLoadServiceDiscoveryConfiguration(e);
+            log.failedToLoadServiceDiscoveryURLDefConfiguration(e);
         } finally {
             try {
                 source.close();

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitorProvider
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitorProvider b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitorProvider
new file mode 100644
index 0000000..d9b2b05
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ClusterConfigurationMonitorProvider
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.hadoop.gateway.topology.discovery.ambari.AmbariClusterConfigurationMonitorProvider
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariConfigurationMonitorTest.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariConfigurationMonitorTest.java b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariConfigurationMonitorTest.java
new file mode 100644
index 0000000..2d8b276
--- /dev/null
+++ b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariConfigurationMonitorTest.java
@@ -0,0 +1,319 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.easymock.EasyMock;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+
+public class AmbariConfigurationMonitorTest {
+
+    private File dataDir = null;
+
+    @Before
+    public void setup() throws Exception {
+        File targetDir = new File( System.getProperty("user.dir"), "target");
+        File tempDir = new File(targetDir, this.getClass().getName() + "__data__" + UUID.randomUUID());
+        FileUtils.forceMkdir(tempDir);
+        dataDir = tempDir;
+    }
+
+    @After
+    public void tearDown() throws Exception {
+        dataDir.delete();
+    }
+
+    @Test
+    public void testPollingMonitor() throws Exception {
+        final String addr1 = "http://host1:8080";
+        final String addr2 = "http://host2:8080";
+        final String cluster1Name = "Cluster_One";
+        final String cluster2Name = "Cluster_Two";
+
+
+        GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+        EasyMock.expect(config.getGatewayDataDir()).andReturn(dataDir.getAbsolutePath()).anyTimes();
+        EasyMock.expect(config.getClusterMonitorPollingInterval(AmbariConfigurationMonitor.getType()))
+                .andReturn(10)
+                .anyTimes();
+        EasyMock.replay(config);
+
+        // Create the monitor
+        TestableAmbariConfigurationMonitor monitor = new TestableAmbariConfigurationMonitor(config);
+
+        // Clear the system property now that the monitor has been initialized
+        System.clearProperty(AmbariConfigurationMonitor.INTERVAL_PROPERTY_NAME);
+
+
+        // Sequence of config changes for testing monitoring for updates
+        Map<String, Map<String, List<List<AmbariCluster.ServiceConfiguration>>>> updateConfigurations = new HashMap<>();
+
+        updateConfigurations.put(addr1, new HashMap<>());
+        updateConfigurations.get(addr1).put(cluster1Name, Arrays.asList(Arrays.asList(createTestServiceConfig("zoo.cfg", "3"),
+                                                                                      createTestServiceConfig("hive-site", "2")),
+                                                                        Arrays.asList(createTestServiceConfig("zoo.cfg", "3"),
+                                                                                      createTestServiceConfig("hive-site", "3")),
+                                                                        Arrays.asList(createTestServiceConfig("zoo.cfg", "2"),
+                                                                                      createTestServiceConfig("hive-site", "1"))));
+
+        updateConfigurations.put(addr2, new HashMap<>());
+        updateConfigurations.get(addr2).put(cluster2Name, Arrays.asList(Arrays.asList(createTestServiceConfig("zoo.cfg", "1"),
+                                                                                      createTestServiceConfig("hive-site", "1")),
+                                                                        Collections.singletonList(createTestServiceConfig("zoo.cfg", "1")),
+                                                                        Arrays.asList(createTestServiceConfig("zoo.cfg", "1"),
+                                                                                      createTestServiceConfig("hive-site", "2"))));
+
+        updateConfigurations.get(addr2).put(cluster1Name, Arrays.asList(Arrays.asList(createTestServiceConfig("zoo.cfg", "2"),
+                                                                                      createTestServiceConfig("hive-site", "4")),
+                                                                        Arrays.asList(createTestServiceConfig("zoo.cfg", "3"),
+                                                                                      createTestServiceConfig("hive-site", "4"),
+                                                                                      createTestServiceConfig("yarn-site", "1")),
+                                                                        Arrays.asList(createTestServiceConfig("zoo.cfg", "1"),
+                                                                                      createTestServiceConfig("hive-site", "2"))));
+
+        Map<String, Map<String, Integer>> configChangeIndex = new HashMap<>();
+        configChangeIndex.put(addr1, new HashMap<>());
+        configChangeIndex.get(addr1).put(cluster1Name, 0);
+        configChangeIndex.get(addr1).put(cluster2Name, 0);
+        configChangeIndex.put(addr2, new HashMap<>());
+        configChangeIndex.get(addr2).put(cluster2Name, 0);
+
+        // Setup the initial test update data
+        // Cluster 1 data change
+        monitor.addTestConfigVersion(addr1, cluster1Name, "zoo.cfg", "2");
+        monitor.addTestConfigVersion(addr1, cluster1Name, "hive-site", "1");
+
+        // Cluster 2 NO data change
+        monitor.addTestConfigVersion(addr2, cluster1Name, "zoo.cfg", "1");
+        monitor.addTestConfigVersion(addr2, cluster1Name, "hive-site", "1");
+
+        // Cluster 3 data change
+        monitor.addTestConfigVersion(addr2, cluster2Name, "zoo.cfg", "1");
+        monitor.addTestConfigVersion(addr2, cluster2Name, "hive-site", "2");
+
+        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> initialAmbariClusterConfigs = new HashMap<>();
+
+        Map<String, AmbariCluster.ServiceConfiguration> cluster1Configs = new HashMap<>();
+        AmbariCluster.ServiceConfiguration zooCfg = createTestServiceConfig("zoo.cfg", "1");
+        cluster1Configs.put("ZOOKEEPER", zooCfg);
+
+        AmbariCluster.ServiceConfiguration hiveSite = createTestServiceConfig("hive-site", "1");
+        cluster1Configs.put("Hive", hiveSite);
+
+        initialAmbariClusterConfigs.put(cluster1Name, cluster1Configs);
+        AmbariCluster cluster1 = createTestCluster(cluster1Name, initialAmbariClusterConfigs);
+
+        // Tell the monitor about the cluster configurations
+        monitor.addClusterConfigVersions(cluster1, createTestDiscoveryConfig(addr1));
+
+        monitor.addClusterConfigVersions(createTestCluster(cluster2Name, initialAmbariClusterConfigs),
+                                         createTestDiscoveryConfig(addr2));
+
+        monitor.addClusterConfigVersions(createTestCluster(cluster1Name, initialAmbariClusterConfigs),
+                                         createTestDiscoveryConfig(addr2));
+
+        final Map<String, Map<String, Integer>> changeNotifications = new HashMap<>();
+        monitor.addListener((src, cname) -> {
+//            System.out.println("Cluster config changed: " + cname + " @ " + src);
+            // Record the notification
+            Integer notificationCount  = changeNotifications.computeIfAbsent(src, s -> new HashMap<>())
+                                                            .computeIfAbsent(cname, c -> Integer.valueOf(0));
+            changeNotifications.get(src).put(cname, (notificationCount+=1));
+
+            // Update the config version
+            int changeIndex = configChangeIndex.get(src).get(cname);
+            if (changeIndex < updateConfigurations.get(src).get(cname).size()) {
+                List<AmbariCluster.ServiceConfiguration> changes = updateConfigurations.get(src).get(cname).get(changeIndex);
+
+//                System.out.println("Applying config update " + changeIndex + " to " + cname + " @ " + src + " ...");
+                for (AmbariCluster.ServiceConfiguration change : changes) {
+                    monitor.updateConfigState(src, cname, change.getType(), change.getVersion());
+//                    System.out.println("    Updated " + change.getType() + " to version " + change.getVersion());
+                }
+
+                // Increment the change index
+                configChangeIndex.get(src).replace(cname, changeIndex + 1);
+
+//                System.out.println("Monitor config updated for " + cname + " @ " + src + " : " + changeIndex );
+            }
+        });
+
+        try {
+            monitor.start();
+
+            long expiration = System.currentTimeMillis() + (1000 * 30);
+            while (!areChangeUpdatesExhausted(updateConfigurations, configChangeIndex)
+                                                                        && (System.currentTimeMillis() < expiration)) {
+                try {
+                    Thread.sleep(5);
+                } catch (InterruptedException e) {
+                    //
+                }
+            }
+
+        } finally {
+            monitor.stop();
+        }
+
+        assertNotNull("Expected changes to have been reported for source 1.",
+                      changeNotifications.get(addr1));
+
+        assertEquals("Expected changes to have been reported.",
+                     3, changeNotifications.get(addr1).get(cluster1Name).intValue());
+
+        assertNotNull("Expected changes to have been reported for source 2.",
+                      changeNotifications.get(addr2));
+
+        assertEquals("Expected changes to have been reported.",
+                     3, changeNotifications.get(addr2).get(cluster2Name).intValue());
+
+        assertNull("Expected changes to have been reported.",
+                   changeNotifications.get(addr2).get(cluster1Name));
+    }
+
+
+    private static boolean areChangeUpdatesExhausted(Map<String, Map<String, List<List<AmbariCluster.ServiceConfiguration>>>> updates,
+                                              Map<String, Map<String, Integer>> configChangeIndeces) {
+        boolean isExhausted = true;
+
+        for (String address : updates.keySet()) {
+            Map<String, List<List<AmbariCluster.ServiceConfiguration>>> clusterConfigs = updates.get(address);
+            for (String clusterName : clusterConfigs.keySet()) {
+                Integer configChangeCount = clusterConfigs.get(clusterName).size();
+                if (configChangeIndeces.get(address).containsKey(clusterName)) {
+                    if (configChangeIndeces.get(address).get(clusterName) < configChangeCount) {
+                        isExhausted = false;
+                        break;
+                    }
+                }
+            }
+        }
+
+        return isExhausted;
+    }
+
+    /**
+     *
+     * @param name           The cluster name
+     * @param serviceConfigs A map of service configurations (keyed by service name)
+     *
+     * @return
+     */
+    private AmbariCluster createTestCluster(String name,
+                                            Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigs) {
+        AmbariCluster c = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(c.getName()).andReturn(name).anyTimes();
+        EasyMock.expect(c.getServiceConfigurations()).andReturn(serviceConfigs).anyTimes();
+        EasyMock.replay(c);
+        return c;
+    }
+
+    private AmbariCluster.ServiceConfiguration createTestServiceConfig(String name, String version) {
+        AmbariCluster.ServiceConfiguration sc = EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
+        EasyMock.expect(sc.getType()).andReturn(name).anyTimes();
+        EasyMock.expect(sc.getVersion()).andReturn(version).anyTimes();
+        EasyMock.replay(sc);
+        return sc;
+    }
+
+    private ServiceDiscoveryConfig createTestDiscoveryConfig(String address) {
+        return createTestDiscoveryConfig(address, null, null);
+    }
+
+    private ServiceDiscoveryConfig createTestDiscoveryConfig(String address, String username, String pwdAlias) {
+        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
+        EasyMock.expect(sdc.getAddress()).andReturn(address).anyTimes();
+        EasyMock.expect(sdc.getUser()).andReturn(username).anyTimes();
+        EasyMock.expect(sdc.getPasswordAlias()).andReturn(pwdAlias).anyTimes();
+        EasyMock.replay(sdc);
+        return sdc;
+    }
+
+    /**
+     * AmbariConfigurationMonitor extension that replaces the collection of updated configuration data with a static
+     * mechanism rather than the REST invocation mechanism.
+     */
+    private static final class TestableAmbariConfigurationMonitor extends AmbariConfigurationMonitor {
+
+        Map<String, Map<String, Map<String, String>>> configVersionData = new HashMap<>();
+
+        TestableAmbariConfigurationMonitor(GatewayConfig config) {
+            super(config, null);
+        }
+
+        void addTestConfigVersion(String address, String clusterName, String configType, String configVersion) {
+            configVersionData.computeIfAbsent(address, a -> new HashMap<>())
+                             .computeIfAbsent(clusterName, cl -> new HashMap<>())
+                             .put(configType, configVersion);
+        }
+
+        void addTestConfigVersions(String address, String clusterName, Map<String, String> configVersions) {
+            configVersionData.computeIfAbsent(address, a -> new HashMap<>())
+                             .computeIfAbsent(clusterName, cl -> new HashMap<>())
+                             .putAll(configVersions);
+        }
+
+        void updateTestConfigVersion(String address, String clusterName, String configType, String updatedVersions) {
+            configVersionData.computeIfAbsent(address, a -> new HashMap<>())
+                             .computeIfAbsent(clusterName, cl -> new HashMap<>())
+                             .replace(configType, updatedVersions);
+        }
+
+        void updateTestConfigVersions(String address, String clusterName, Map<String, String> updatedVersions) {
+            configVersionData.computeIfAbsent(address, a -> new HashMap<>())
+                             .computeIfAbsent(clusterName, cl -> new HashMap<>())
+                             .replaceAll((k,v) -> updatedVersions.get(k));
+        }
+
+        void updateConfigState(String address, String clusterName, String configType, String configVersion) {
+            configVersionsLock.writeLock().lock();
+            try {
+                if (ambariClusterConfigVersions.containsKey(address)) {
+                    ambariClusterConfigVersions.get(address).get(clusterName).replace(configType, configVersion);
+                }
+            } finally {
+                configVersionsLock.writeLock().unlock();
+            }
+        }
+
+        @Override
+        Map<String, String> getUpdatedConfigVersions(String address, String clusterName) {
+            Map<String, Map<String, String>> clusterConfigVersions = configVersionData.get(address);
+            if (clusterConfigVersions != null) {
+                return clusterConfigVersions.get(clusterName);
+            }
+            return null;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
index f7f0553..d4dad95 100644
--- a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
+++ b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
@@ -119,26 +119,38 @@ public class AmbariServiceDiscoveryTest {
      */
     private static final class TestAmbariServiceDiscovery extends AmbariServiceDiscovery {
 
+        final static String CLUSTER_PLACEHOLDER = TestRESTInvoker.CLUSTER_PLACEHOLDER;
+
+        TestAmbariServiceDiscovery(String clusterName) {
+            super(new TestRESTInvoker(clusterName));
+        }
+
+    }
+
+    private static final class TestRESTInvoker extends RESTInvoker {
+
         final static String CLUSTER_PLACEHOLDER = "CLUSTER_NAME";
 
         private Map<String, JSONObject> cannedResponses = new HashMap<>();
 
-        TestAmbariServiceDiscovery(String clusterName) {
+        TestRESTInvoker(String clusterName) {
+            super(null);
+
             cannedResponses.put(AmbariServiceDiscovery.AMBARI_CLUSTERS_URI,
-                                (JSONObject) JSONValue.parse(CLUSTERS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
-                                                                                               clusterName)));
+                    (JSONObject) JSONValue.parse(CLUSTERS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                            clusterName)));
 
             cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_HOSTROLES_URI, clusterName),
-                                (JSONObject) JSONValue.parse(HOSTROLES_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
-                                                                                                clusterName)));
+                    (JSONObject) JSONValue.parse(HOSTROLES_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                            clusterName)));
 
             cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_SERVICECONFIGS_URI, clusterName),
-                                (JSONObject) JSONValue.parse(SERVICECONFIGS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
-                                                                                                     clusterName)));
+                    (JSONObject) JSONValue.parse(SERVICECONFIGS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                            clusterName)));
         }
 
         @Override
-        protected JSONObject invokeREST(String url, String username, String passwordAlias) {
+        JSONObject invoke(String url, String username, String passwordAlias) {
             return cannedResponses.get(url.substring(url.indexOf("/api")));
         }
     }

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-release/home/conf/gateway-site.xml
----------------------------------------------------------------------
diff --git a/gateway-release/home/conf/gateway-site.xml b/gateway-release/home/conf/gateway-site.xml
index e06db72..fec5e87 100644
--- a/gateway-release/home/conf/gateway-site.xml
+++ b/gateway-release/home/conf/gateway-site.xml
@@ -73,4 +73,16 @@ limitations under the License.
         <description>Enable/Disable cookie scoping feature.</description>
     </property>
 
+    <property>
+        <name>gateway.cluster.config.monitor.ambari.enabled</name>
+        <value>false</value>
+        <description>Enable/disable Ambari cluster configuration monitoring.</description>
+    </property>
+
+    <property>
+        <name>gateway.cluster.config.monitor.ambari.interval</name>
+        <value>60</value>
+        <description>The interval (in seconds) for polling Ambari for cluster configuration changes.</description>
+    </property>
+
 </configuration>

http://git-wip-us.apache.org/repos/asf/knox/blob/a874f399/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
index ab0ab39..92b02ea 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
@@ -596,4 +596,20 @@ public interface GatewayMessages {
             text = "Correcting the suspect permissions for the remote configuration registry entry \"{0}\"." )
   void correctingSuspectWritableRemoteConfigurationEntry(String entryPath);
 
+  @Message(level = MessageLevel.INFO,
+           text = "A cluster configuration change was noticed for {1} @ {0}")
+  void noticedClusterConfigurationChange(final String source, final String clusterName);
+
+
+  @Message(level = MessageLevel.INFO,
+           text = "Triggering topology regeneration for descriptor {2} because of change to the {1} @ {0} configuration.")
+  void triggeringTopologyRegeneration(final String source, final String clusterName, final String affected);
+
+
+  @Message(level = MessageLevel.ERROR,
+           text = "Encountered an error while responding to {1} @ {0} configuration change: {2}")
+  void errorRespondingToConfigChange(final String source,
+                                     final String clusterName,
+                                     @StackTrace(level = MessageLevel.DEBUG) Exception e);
+
 }


[37/49] knox git commit: Merge branch 'master' into KNOX-998-Package_Restructuring

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-server/src/main/java/org/apache/knox/gateway/util/KnoxCLI.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/util/KnoxCLI.java
index 9071a1d,0000000..9a87dd0
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/util/KnoxCLI.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/util/KnoxCLI.java
@@@ -1,1777 -1,0 +1,2154 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.util;
 +
 +import java.io.BufferedReader;
 +import java.io.Console;
 +import java.io.File;
 +import java.io.IOException;
 +import java.io.InputStream;
 +import java.io.InputStreamReader;
 +import java.io.PrintStream;
 +import java.net.InetAddress;
 +import java.net.UnknownHostException;
 +import java.security.cert.Certificate;
 +import java.util.Arrays;
 +import java.util.HashMap;
 +import java.util.HashSet;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.Properties;
 +import java.util.UUID;
 +import javax.net.ssl.SSLContext;
 +import javax.net.ssl.SSLException;
 +
 +import org.apache.commons.codec.binary.Base64;
 +import org.apache.commons.io.FileUtils;
 +import org.apache.hadoop.conf.Configuration;
 +import org.apache.hadoop.conf.Configured;
 +import org.apache.knox.gateway.GatewayCommandLine;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.config.impl.GatewayConfigImpl;
 +import org.apache.knox.gateway.deploy.DeploymentFactory;
 +import org.apache.knox.gateway.services.CLIGatewayServices;
 +import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.Service;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
++import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClient;
++import org.apache.knox.gateway.services.config.client.RemoteConfigurationRegistryClientService;
 +import org.apache.knox.gateway.services.security.AliasService;
 +import org.apache.knox.gateway.services.security.KeystoreService;
 +import org.apache.knox.gateway.services.security.KeystoreServiceException;
 +import org.apache.knox.gateway.services.security.MasterService;
 +import org.apache.knox.gateway.services.security.impl.X509CertificateUtil;
 +import org.apache.knox.gateway.services.topology.TopologyService;
 +import org.apache.knox.gateway.topology.Provider;
 +import org.apache.knox.gateway.topology.Topology;
 +import org.apache.knox.gateway.topology.validation.TopologyValidator;
 +import org.apache.hadoop.util.Tool;
 +import org.apache.hadoop.util.ToolRunner;
 +import org.apache.http.client.ClientProtocolException;
 +import org.apache.http.client.methods.CloseableHttpResponse;
 +import org.apache.http.client.methods.HttpGet;
 +import org.apache.http.conn.ssl.SSLContexts;
 +import org.apache.http.conn.ssl.TrustSelfSignedStrategy;
 +import org.apache.http.impl.client.CloseableHttpClient;
 +import org.apache.http.impl.client.HttpClients;
 +import org.apache.log4j.PropertyConfigurator;
 +import org.apache.shiro.SecurityUtils;
 +import org.apache.shiro.authc.AuthenticationException;
 +import org.apache.shiro.authc.UsernamePasswordToken;
 +import org.apache.shiro.config.ConfigurationException;
 +import org.apache.shiro.config.Ini;
 +import org.apache.shiro.config.IniSecurityManagerFactory;
 +import org.apache.shiro.subject.Subject;
 +import org.apache.shiro.util.Factory;
 +import org.apache.shiro.util.ThreadContext;
 +import org.eclipse.persistence.oxm.MediaType;
 +import org.jboss.shrinkwrap.api.exporter.ExplodedExporter;
 +import org.jboss.shrinkwrap.api.spec.EnterpriseArchive;
++
 +/**
 + *
 + */
 +public class KnoxCLI extends Configured implements Tool {
 +
 +  private static final String USAGE_PREFIX = "KnoxCLI {cmd} [options]";
 +  static final private String COMMANDS =
 +      "   [--help]\n" +
 +      "   [" + VersionCommand.USAGE + "]\n" +
 +      "   [" + MasterCreateCommand.USAGE + "]\n" +
 +      "   [" + CertCreateCommand.USAGE + "]\n" +
 +      "   [" + CertExportCommand.USAGE + "]\n" +
 +      "   [" + AliasCreateCommand.USAGE + "]\n" +
 +      "   [" + AliasDeleteCommand.USAGE + "]\n" +
 +      "   [" + AliasListCommand.USAGE + "]\n" +
 +      "   [" + RedeployCommand.USAGE + "]\n" +
 +      "   [" + ListTopologiesCommand.USAGE + "]\n" +
 +      "   [" + ValidateTopologyCommand.USAGE + "]\n" +
 +      "   [" + LDAPAuthCommand.USAGE + "]\n" +
 +      "   [" + LDAPSysBindCommand.USAGE + "]\n" +
-       "   [" + ServiceTestCommand.USAGE + "]\n";
++      "   [" + ServiceTestCommand.USAGE + "]\n" +
++      "   [" + RemoteRegistryClientsListCommand.USAGE + "]\n" +
++      "   [" + RemoteRegistryUploadProviderConfigCommand.USAGE + "]\n" +
++      "   [" + RemoteRegistryUploadDescriptorCommand.USAGE + "]\n" +
++      "   [" + RemoteRegistryDeleteProviderConfigCommand.USAGE + "]\n" +
++      "   [" + RemoteRegistryDeleteDescriptorCommand.USAGE + "]\n" +
++      "   [" + RemoteRegistryGetACLCommand.USAGE + "]\n";
 +
 +  /** allows stdout to be captured if necessary */
 +  public PrintStream out = System.out;
 +  /** allows stderr to be captured if necessary */
 +  public PrintStream err = System.err;
 +
 +  private static GatewayServices services = new CLIGatewayServices();
 +  private Command command;
 +  private String value = null;
 +  private String cluster = null;
 +  private String path = null;
 +  private String generate = "false";
 +  private String hostname = null;
 +  private String port = null;
 +  private boolean force = false;
 +  private boolean debug = false;
 +  private String user = null;
 +  private String pass = null;
 +  private boolean groups = false;
 +
++  private String remoteRegistryClient = null;
++  private String remoteRegistryEntryName = null;
++
 +  // For testing only
 +  private String master = null;
 +  private String type = null;
 +
 +  /* (non-Javadoc)
 +   * @see org.apache.hadoop.util.Tool#run(java.lang.String[])
 +   */
 +  @Override
 +  public int run(String[] args) throws Exception {
 +    int exitCode = 0;
 +    try {
 +      exitCode = init(args);
 +      if (exitCode != 0) {
 +        return exitCode;
 +      }
 +      if (command != null && command.validate()) {
 +        initializeServices( command instanceof MasterCreateCommand );
 +        command.execute();
 +      } else if (!(command instanceof MasterCreateCommand)){
 +        out.println("ERROR: Invalid Command" + "\n" + "Unrecognized option:" +
 +            args[0] + "\n" +
 +            "A fatal exception has occurred. Program will exit.");
 +        exitCode = -2;
 +      }
 +    } catch (ServiceLifecycleException sle) {
 +      out.println("ERROR: Internal Error: Please refer to the knoxcli.log " +
 +          "file for details. " + sle.getMessage());
 +    } catch (Exception e) {
 +      e.printStackTrace( err );
 +      err.flush();
 +      return -3;
 +    }
 +    return exitCode;
 +  }
 +
 +  GatewayServices getGatewayServices() {
 +    return services;
 +  }
 +
 +  private void initializeServices(boolean persisting) throws ServiceLifecycleException {
 +    GatewayConfig config = getGatewayConfig();
 +    Map<String,String> options = new HashMap<>();
 +    options.put(GatewayCommandLine.PERSIST_LONG, Boolean.toString(persisting));
 +    if (master != null) {
 +      options.put("master", master);
 +    }
 +    services.init(config, options);
 +  }
 +
 +  /**
 +   * Parse the command line arguments and initialize the data
 +   * <pre>
 +   * % knoxcli version
 +   * % knoxcli list-topologies
 +   * % knoxcli master-create keyName [--size size] [--generate]
 +   * % knoxcli create-alias alias [--cluster clustername] [--generate] [--value v]
 +   * % knoxcli list-alias [--cluster clustername]
 +   * % knoxcli delete=alias alias [--cluster clustername]
 +   * % knoxcli create-cert alias [--hostname h]
 +   * % knoxcli redeploy [--cluster clustername]
 +   * % knoxcli validate-topology [--cluster clustername] | [--path <path/to/file>]
 +   * % knoxcli user-auth-test [--cluster clustername] [--u username] [--p password]
 +   * % knoxcli system-user-auth-test [--cluster clustername] [--d]
 +   * % knoxcli service-test [--u user] [--p password] [--cluster clustername] [--hostname name] [--port port]
-    *
++   * % knoxcli list-registry-clients
++   * % knoxcli get-registry-acl entryName --registry-client name
++   * % knoxcli upload-provider-config filePath --registry-client name [--entry-name entryName]
++   * % knoxcli upload-descriptor filePath --registry-client name [--entry-name entryName]
++   * % knoxcli delete-provider-config providerConfig --registry-client name
++   * % knoxcli delete-descriptor descriptor --registry-client name
 +   * </pre>
 +   * @param args
 +   * @return
 +   * @throws IOException
 +   */
 +  private int init(String[] args) throws IOException {
 +    if (args.length == 0) {
 +      printKnoxShellUsage();
 +      return -1;
 +    }
 +    for (int i = 0; i < args.length; i++) { // parse command line
 +      if (args[i].equals("create-master")) {
 +        command = new MasterCreateCommand();
 +        if ((args.length > i + 1) && args[i + 1].equals("--help")) {
 +          printKnoxShellUsage();
 +          return -1;
 +        }
 +      } else if (args[i].equals("delete-alias")) {
 +        String alias = null;
 +        if (args.length >= 2) {
 +          alias = args[++i];
 +        }
 +        command = new AliasDeleteCommand(alias);
 +        if (alias == null || alias.equals("--help")) {
 +          printKnoxShellUsage();
 +          return -1;
 +        }
 +      } else if (args[i].equals("create-alias")) {
 +        String alias = null;
 +        if (args.length >= 2) {
 +          alias = args[++i];
 +        }
 +        command = new AliasCreateCommand(alias);
 +        if (alias == null || alias.equals("--help")) {
 +          printKnoxShellUsage();
 +          return -1;
 +        }
 +      } else if (args[i].equals("create-cert")) {
 +        command = new CertCreateCommand();
 +        if ((args.length > i + 1) && args[i + 1].equals("--help")) {
 +          printKnoxShellUsage();
 +          return -1;
 +        }
 +      } else if (args[i].equals("export-cert")) {
 +        command = new CertExportCommand();
 +        if ((args.length > i + 1) && args[i + 1].equals("--help")) {
 +          printKnoxShellUsage();
 +          return -1;
 +        }
 +      }else if(args[i].equals("user-auth-test")) {
 +        if(i + 1 >= args.length) {
 +          printKnoxShellUsage();
 +          return -1;
 +        } else {
 +          command = new LDAPAuthCommand();
 +        }
 +      } else if(args[i].equals("system-user-auth-test")) {
 +        if (i + 1 >= args.length){
 +          printKnoxShellUsage();
 +          return -1;
 +        } else {
 +          command = new LDAPSysBindCommand();
 +        }
 +      } else if (args[i].equals("list-alias")) {
 +        command = new AliasListCommand();
 +      } else if (args[i].equals("--value")) {
 +        if( i+1 >= args.length || args[i+1].startsWith( "-" ) ) {
 +          printKnoxShellUsage();
 +          return -1;
 +        }
 +        this.value = args[++i];
 +        if ( command != null && command instanceof MasterCreateCommand ) {
 +          this.master = this.value;
 +        }
 +      } else if ( args[i].equals("version") ) {
 +        command = new VersionCommand();
 +      } else if ( args[i].equals("redeploy") ) {
 +        command = new RedeployCommand();
 +      } else if ( args[i].equals("validate-topology") ) {
 +        if(i + 1 >= args.length) {
 +          printKnoxShellUsage();
 +          return -1;
 +        } else {
 +          command = new ValidateTopologyCommand();
 +        }
 +      } else if( args[i].equals("list-topologies") ){
 +        command = new ListTopologiesCommand();
 +      }else if ( args[i].equals("--cluster") || args[i].equals("--topology") ) {
 +        if( i+1 >= args.length || args[i+1].startsWith( "-" ) ) {
 +          printKnoxShellUsage();
 +          return -1;
 +        }
 +        this.cluster = args[++i];
 +      } else if (args[i].equals("service-test")) {
-         if( i + 1 >= args[i].length()) {
++        if( i + 1 >= args.length) {
 +          printKnoxShellUsage();
 +          return -1;
 +        } else {
 +          command = new ServiceTestCommand();
 +        }
 +      } else if (args[i].equals("--generate")) {
 +        if ( command != null && command instanceof MasterCreateCommand ) {
 +          this.master = UUID.randomUUID().toString();
 +        } else {
 +          this.generate = "true";
 +        }
 +      } else if(args[i].equals("--type")) {
 +        if( i+1 >= args.length || args[i+1].startsWith( "-" ) ) {
 +          printKnoxShellUsage();
 +          return -1;
 +        }
 +        this.type = args[++i];
 +      } else if(args[i].equals("--path")) {
 +        if( i+1 >= args.length || args[i+1].startsWith( "-" ) ) {
 +          printKnoxShellUsage();
 +          return -1;
 +        }
 +        this.path = args[++i];
 +      }else if (args[i].equals("--hostname")) {
 +        if( i+1 >= args.length || args[i+1].startsWith( "-" ) ) {
 +          printKnoxShellUsage();
 +          return -1;
 +        }
 +        this.hostname = args[++i];
 +      } else if (args[i].equals("--port")) {
 +        if( i+1 >= args.length || args[i+1].startsWith( "-" ) ) {
 +          printKnoxShellUsage();
 +          return -1;
 +        }
 +        this.port = args[++i];
 +      } else if (args[i].equals("--master")) {
 +        // For testing only
 +        if( i+1 >= args.length || args[i+1].startsWith( "-" ) ) {
 +          printKnoxShellUsage();
 +          return -1;
 +        }
 +        this.master = args[++i];
 +      } else if (args[i].equals("--force")) {
 +        this.force = true;
 +      } else if (args[i].equals("--help")) {
 +        printKnoxShellUsage();
 +        return -1;
 +      } else if(args[i].equals("--d")) {
 +        this.debug = true;
 +      } else if(args[i].equals("--u")) {
 +        if(i + 1 <= args.length) {
 +          this.user = args[++i];
 +        } else{
 +          printKnoxShellUsage();
 +          return -1;
 +        }
 +      } else if(args[i].equals("--p")) {
 +        if(i + 1 <= args.length) {
 +          this.pass = args[++i];
 +        } else{
 +          printKnoxShellUsage();
 +          return -1;
 +        }
 +      } else if (args[i].equals("--g")) {
 +        this.groups = true;
++      } else if (args[i].equals("list-registry-clients")) {
++        command = new RemoteRegistryClientsListCommand();
++      } else if (args[i].equals("--registry-client")) {
++        if (i + 1 >= args.length || args[i + 1].startsWith("-")) {
++          printKnoxShellUsage();
++          return -1;
++        }
++        this.remoteRegistryClient = args[++i];
++      } else if (args[i].equalsIgnoreCase("upload-provider-config")) {
++        String fileName;
++        if (i <= (args.length - 1)) {
++          fileName = args[++i];
++          command = new RemoteRegistryUploadProviderConfigCommand(fileName);
++        } else {
++          printKnoxShellUsage();
++          return -1;
++        }
++      } else if (args[i].equals("upload-descriptor")) {
++        String fileName;
++        if (i <= (args.length - 1)) {
++          fileName = args[++i];
++          command = new RemoteRegistryUploadDescriptorCommand(fileName);
++        } else {
++          printKnoxShellUsage();
++          return -1;
++        }
++      } else if (args[i].equals("--entry-name")) {
++        if (i <= (args.length - 1)) {
++          remoteRegistryEntryName = args[++i];
++        } else {
++          printKnoxShellUsage();
++          return -1;
++        }
++      } else if (args[i].equals("delete-descriptor")) {
++        if (i <= (args.length - 1)) {
++          String entry = args[++i];
++          command = new RemoteRegistryDeleteDescriptorCommand(entry);
++        } else {
++          printKnoxShellUsage();
++          return -1;
++        }
++      } else if (args[i].equals("delete-provider-config")) {
++        if (i <= (args.length - 1)) {
++          String entry = args[++i];
++          command = new RemoteRegistryDeleteProviderConfigCommand(entry);
++        } else {
++          printKnoxShellUsage();
++          return -1;
++        }
++      } else if (args[i].equalsIgnoreCase("get-registry-acl")) {
++        if (i <= (args.length - 1)) {
++          String entry = args[++i];
++          command = new RemoteRegistryGetACLCommand(entry);
++        } else {
++          printKnoxShellUsage();
++          return -1;
++        }
 +      } else {
 +        printKnoxShellUsage();
 +        //ToolRunner.printGenericCommandUsage(System.err);
 +        return -1;
 +      }
 +    }
 +    return 0;
 +  }
 +
 +  private void printKnoxShellUsage() {
 +    out.println( USAGE_PREFIX + "\n" + COMMANDS );
 +    if ( command != null ) {
 +      out.println(command.getUsage());
 +    } else {
 +      char[] chars = new char[79];
 +      Arrays.fill( chars, '=' );
 +      String div = new String( chars );
 +
 +      out.println( div );
 +      out.println( VersionCommand.USAGE + "\n\n" + VersionCommand.DESC );
 +      out.println();
 +      out.println( div );
 +      out.println( MasterCreateCommand.USAGE + "\n\n" + MasterCreateCommand.DESC );
 +      out.println();
 +      out.println( div );
 +      out.println( CertCreateCommand.USAGE + "\n\n" + CertCreateCommand.DESC );
 +      out.println();
 +      out.println( div );
 +      out.println( CertExportCommand.USAGE + "\n\n" + CertExportCommand.DESC );
 +      out.println();
 +      out.println( div );
 +      out.println( AliasCreateCommand.USAGE + "\n\n" + AliasCreateCommand.DESC );
 +      out.println();
 +      out.println( div );
 +      out.println( AliasDeleteCommand.USAGE + "\n\n" + AliasDeleteCommand.DESC );
 +      out.println();
 +      out.println( div );
 +      out.println( AliasListCommand.USAGE + "\n\n" + AliasListCommand.DESC );
 +      out.println();
 +      out.println( div );
 +      out.println( RedeployCommand.USAGE + "\n\n" + RedeployCommand.DESC );
 +      out.println();
 +      out.println( div );
 +      out.println(ValidateTopologyCommand.USAGE + "\n\n" + ValidateTopologyCommand.DESC);
 +      out.println();
 +      out.println( div );
 +      out.println(ListTopologiesCommand.USAGE + "\n\n" + ListTopologiesCommand.DESC);
 +      out.println();
 +      out.println( div );
 +      out.println(LDAPAuthCommand.USAGE + "\n\n" + LDAPAuthCommand.DESC);
 +      out.println();
 +      out.println( div );
 +      out.println(LDAPSysBindCommand.USAGE + "\n\n" + LDAPSysBindCommand.DESC);
 +      out.println();
 +      out.println( div );
 +      out.println(ServiceTestCommand.USAGE + "\n\n" + ServiceTestCommand.DESC);
 +      out.println();
 +      out.println( div );
++      out.println(RemoteRegistryClientsListCommand.USAGE + "\n\n" + RemoteRegistryClientsListCommand.DESC);
++      out.println();
++      out.println( div );
++      out.println(RemoteRegistryGetACLCommand.USAGE + "\n\n" + RemoteRegistryGetACLCommand.DESC);
++      out.println();
++      out.println( div );
++      out.println(RemoteRegistryUploadProviderConfigCommand.USAGE + "\n\n" + RemoteRegistryUploadProviderConfigCommand.DESC);
++      out.println();
++      out.println( div );
++      out.println(RemoteRegistryUploadDescriptorCommand.USAGE + "\n\n" + RemoteRegistryUploadDescriptorCommand.DESC);
++      out.println();
++      out.println( div );
++      out.println(RemoteRegistryDeleteProviderConfigCommand.USAGE + "\n\n" + RemoteRegistryDeleteProviderConfigCommand.DESC);
++      out.println();
++      out.println( div );
++      out.println(RemoteRegistryDeleteDescriptorCommand.USAGE + "\n\n" + RemoteRegistryDeleteDescriptorCommand.DESC);
++      out.println();
++      out.println( div );
 +    }
 +  }
 +
 +  private abstract class Command {
 +
 +    public boolean validate() {
 +      return true;
 +    }
 +
 +    protected Service getService(String serviceName) {
 +      Service service = null;
 +
 +      return service;
 +    }
 +
 +    public abstract void execute() throws Exception;
 +
 +    public abstract String getUsage();
 +
 +    protected AliasService getAliasService() {
 +      AliasService as = services.getService(GatewayServices.ALIAS_SERVICE);
 +      return as;
 +    }
 +
 +    protected KeystoreService getKeystoreService() {
 +      KeystoreService ks = services.getService(GatewayServices.KEYSTORE_SERVICE);
 +      return ks;
 +    }
 +
 +    protected TopologyService getTopologyService()  {
 +      TopologyService ts = services.getService(GatewayServices.TOPOLOGY_SERVICE);
 +      return ts;
 +    }
++
++    protected RemoteConfigurationRegistryClientService getRemoteConfigRegistryClientService() {
++      return services.getService(GatewayServices.REMOTE_REGISTRY_CLIENT_SERVICE);
++    }
++
 +  }
 +
 + private class AliasListCommand extends Command {
 +
 +  public static final String USAGE = "list-alias [--cluster clustername]";
 +  public static final String DESC = "The list-alias command lists all of the aliases\n" +
 +                                    "for the given hadoop --cluster. The default\n" +
 +                                    "--cluster being the gateway itself.";
 +
 +   /* (non-Javadoc)
 +    * @see KnoxCLI.Command#execute()
 +    */
 +   @Override
 +   public void execute() throws Exception {
 +     AliasService as = getAliasService();
 +      KeystoreService keystoreService = getKeystoreService();
 +
 +     if (cluster == null) {
 +       cluster = "__gateway";
 +     }
 +      boolean credentialStoreForClusterAvailable =
 +          keystoreService.isCredentialStoreForClusterAvailable(cluster);
 +      if (credentialStoreForClusterAvailable) {
 +        out.println("Listing aliases for: " + cluster);
 +        List<String> aliases = as.getAliasesForCluster(cluster);
 +        for (String alias : aliases) {
 +          out.println(alias);
 +        }
 +        out.println("\n" + aliases.size() + " items.");
 +      } else {
 +        out.println("Invalid cluster name provided: " + cluster);
 +      }
 +   }
 +
 +   /* (non-Javadoc)
-     * @see KnoxCLI.Command#getUsage()
++    * @see org.apache.knox.gateway.util.KnoxCLI.Command#getUsage()
 +    */
 +   @Override
 +   public String getUsage() {
 +     return USAGE + ":\n\n" + DESC;
 +   }
 + }
 +
 + public class CertExportCommand extends Command {
 +
 +   public static final String USAGE = "export-cert";
 +   public static final String DESC = "The export-cert command exports the public certificate\n" +
 +                                     "from the a gateway.jks keystore with the alias of gateway-identity.";
 +   private static final String GATEWAY_CREDENTIAL_STORE_NAME = "__gateway";
 +   private static final String GATEWAY_IDENTITY_PASSPHRASE = "gateway-identity-passphrase";
 +
 +    public CertExportCommand() {
 +    }
 +
 +    private GatewayConfig getGatewayConfig() {
 +      GatewayConfig result;
 +      Configuration conf = getConf();
 +      if( conf != null && conf instanceof GatewayConfig ) {
 +        result = (GatewayConfig)conf;
 +      } else {
 +        result = new GatewayConfigImpl();
 +      }
 +      return result;
 +    }
 +
 +    /* (non-Javadoc)
-      * @see KnoxCLI.Command#execute()
++     * @see org.apache.knox.gateway.util.KnoxCLI.Command#execute()
 +     */
 +    @Override
 +    public void execute() throws Exception {
 +      KeystoreService ks = getKeystoreService();
 +
 +      AliasService as = getAliasService();
 +
 +      if (ks != null) {
 +        try {
 +          if (!ks.isKeystoreForGatewayAvailable()) {
 +            out.println("No keystore has been created for the gateway. Please use the create-cert command or populate with a CA signed cert of your own.");
 +          }
 +          char[] passphrase = as.getPasswordFromAliasForCluster(GATEWAY_CREDENTIAL_STORE_NAME, GATEWAY_IDENTITY_PASSPHRASE);
 +          if (passphrase == null) {
 +            MasterService ms = services.getService("MasterService");
 +            passphrase = ms.getMasterSecret();
 +          }
 +          Certificate cert = ks.getKeystoreForGateway().getCertificate("gateway-identity");
 +          String keyStoreDir = getGatewayConfig().getGatewaySecurityDir() + File.separator + "keystores" + File.separator;
 +          File ksd = new File(keyStoreDir);
 +          if (!ksd.exists()) {
 +            if( !ksd.mkdirs() ) {
 +              // certainly should not happen if the keystore is known to be available
 +              throw new ServiceLifecycleException("Unable to create keystores directory" + ksd.getAbsolutePath());
 +            }
 +          }
 +          if ("PEM".equals(type) || type == null) {
 +            X509CertificateUtil.writeCertificateToFile(cert, new File(keyStoreDir + "gateway-identity.pem"));
 +            out.println("Certificate gateway-identity has been successfully exported to: " + keyStoreDir + "gateway-identity.pem");
 +          }
 +          else if ("JKS".equals(type)) {
 +            X509CertificateUtil.writeCertificateToJKS(cert, new File(keyStoreDir + "gateway-client-trust.jks"));
 +            out.println("Certificate gateway-identity has been successfully exported to: " + keyStoreDir + "gateway-client-trust.jks");
 +          }
 +          else {
 +            out.println("Invalid type for export file provided. Export has not been done. Please use: [PEM|JKS] default value is PEM.");
 +          }
 +        } catch (KeystoreServiceException e) {
 +          throw new ServiceLifecycleException("Keystore was not loaded properly - the provided (or persisted) master secret may not match the password for the keystore.", e);
 +        }
 +      }
 +    }
 +
 +    /* (non-Javadoc)
-      * @see KnoxCLI.Command#getUsage()
++     * @see org.apache.knox.gateway.util.KnoxCLI.Command#getUsage()
 +     */
 +    @Override
 +    public String getUsage() {
 +      return USAGE + ":\n\n" + DESC;
 +    }
 +  }
 +
 + public class CertCreateCommand extends Command {
 +
 +  public static final String USAGE = "create-cert [--hostname h]";
 +  public static final String DESC = "The create-cert command creates and populates\n" +
 +                                    "a gateway.jks keystore with a self-signed certificate\n" +
 +                                    "to be used as the gateway identity. It also adds an alias\n" +
 +                                    "to the __gateway-credentials.jceks credential store for the\n" +
 +                                    "key passphrase.";
 +  private static final String GATEWAY_CREDENTIAL_STORE_NAME = "__gateway";
 +  private static final String GATEWAY_IDENTITY_PASSPHRASE = "gateway-identity-passphrase";
 +
 +   public CertCreateCommand() {
 +   }
 +
 +   /* (non-Javadoc)
-     * @see KnoxCLI.Command#execute()
++    * @see org.apache.knox.gateway.util.KnoxCLI.Command#execute()
 +    */
 +   @Override
 +   public void execute() throws Exception {
 +     KeystoreService ks = getKeystoreService();
 +
 +     AliasService as = getAliasService();
 +
 +     if (ks != null) {
 +       try {
 +         if (!ks.isCredentialStoreForClusterAvailable(GATEWAY_CREDENTIAL_STORE_NAME)) {
 +//           log.creatingCredentialStoreForGateway();
 +           ks.createCredentialStoreForCluster(GATEWAY_CREDENTIAL_STORE_NAME);
 +         }
 +         else {
 +//           log.credentialStoreForGatewayFoundNotCreating();
 +         }
 +         // LET'S NOT GENERATE A DIFFERENT KEY PASSPHRASE BY DEFAULT ANYMORE
 +         // IF A DEPLOYMENT WANTS TO CHANGE THE KEY PASSPHRASE TO MAKE IT MORE SECURE THEN
 +         // THEY CAN ADD THE ALIAS EXPLICITLY WITH THE CLI
 +         //as.generateAliasForCluster(GATEWAY_CREDENTIAL_STORE_NAME, GATEWAY_IDENTITY_PASSPHRASE);
 +       } catch (KeystoreServiceException e) {
 +         throw new ServiceLifecycleException("Keystore was not loaded properly - the provided (or persisted) master secret may not match the password for the keystore.", e);
 +       }
 +
 +       try {
 +         if (!ks.isKeystoreForGatewayAvailable()) {
 +//           log.creatingKeyStoreForGateway();
 +           ks.createKeystoreForGateway();
 +         }
 +         else {
 +//           log.keyStoreForGatewayFoundNotCreating();
 +         }
 +         char[] passphrase = as.getPasswordFromAliasForCluster(GATEWAY_CREDENTIAL_STORE_NAME, GATEWAY_IDENTITY_PASSPHRASE);
 +         if (passphrase == null) {
 +           MasterService ms = services.getService("MasterService");
 +           passphrase = ms.getMasterSecret();
 +         }
 +         ks.addSelfSignedCertForGateway("gateway-identity", passphrase, hostname);
 +//         logAndValidateCertificate();
 +         out.println("Certificate gateway-identity has been successfully created.");
 +       } catch (KeystoreServiceException e) {
 +         throw new ServiceLifecycleException("Keystore was not loaded properly - the provided (or persisted) master secret may not match the password for the keystore.", e);
 +       }
 +     }
 +   }
 +
 +   /* (non-Javadoc)
-     * @see KnoxCLI.Command#getUsage()
++    * @see org.apache.knox.gateway.util.KnoxCLI.Command#getUsage()
 +    */
 +   @Override
 +   public String getUsage() {
 +     return USAGE + ":\n\n" + DESC;
 +   }
 +
 + }
 +
 + public class AliasCreateCommand extends Command {
 +
 +  public static final String USAGE = "create-alias aliasname [--cluster clustername] " +
 +                                     "[ (--value v) | (--generate) ]";
 +  public static final String DESC = "The create-alias command will create an alias\n"
 +                                       + "and secret pair within the credential store for the\n"
 +                                       + "indicated --cluster otherwise within the gateway\n"
 +                                       + "credential store. The actual secret may be specified via\n"
 +                                       + "the --value option or --generate (will create a random secret\n"
 +                                       + "for you) or user will be prompt to provide password.";
 +
 +  private String name = null;
 +
 +  /**
 +    * @param alias
 +    */
 +   public AliasCreateCommand(String alias) {
 +     name = alias;
 +   }
 +
 +   /* (non-Javadoc)
-     * @see KnoxCLI.Command#execute()
++    * @see org.apache.knox.gateway.util.KnoxCLI.Command#execute()
 +    */
 +   @Override
 +   public void execute() throws Exception {
 +     AliasService as = getAliasService();
 +     if (cluster == null) {
 +       cluster = "__gateway";
 +     }
 +     if (value != null) {
 +       as.addAliasForCluster(cluster, name, value);
 +       out.println(name + " has been successfully created.");
 +     }
 +     else {
 +       if ("true".equals(generate)) {
 +         as.generateAliasForCluster(cluster, name);
 +         out.println(name + " has been successfully generated.");
 +       }
 +       else {
 +          value = new String(promptUserForPassword());
 +          as.addAliasForCluster(cluster, name, value);
 +          out.println(name + " has been successfully created.");
 +       }
 +     }
 +   }
 +
 +   /* (non-Javadoc)
-     * @see KnoxCLI.Command#getUsage()
++    * @see org.apache.knox.gateway.util.KnoxCLI.Command#getUsage()
 +    */
 +   @Override
 +   public String getUsage() {
 +     return USAGE + ":\n\n" + DESC;
 +   }
 +
 +    protected char[] promptUserForPassword() {
 +      char[] password = null;
 +      Console c = System.console();
 +      if (c == null) {
 +        System.err
 +            .println("No console to fetch password from user.Consider setting via --generate or --value.");
 +        System.exit(1);
 +      }
 +
 +      boolean noMatch;
 +      do {
 +        char[] newPassword1 = c.readPassword("Enter password: ");
 +        char[] newPassword2 = c.readPassword("Enter password again: ");
 +        noMatch = !Arrays.equals(newPassword1, newPassword2);
 +        if (noMatch) {
 +          c.format("Passwords don't match. Try again.%n");
 +        } else {
 +          password = Arrays.copyOf(newPassword1, newPassword1.length);
 +        }
 +        Arrays.fill(newPassword1, ' ');
 +        Arrays.fill(newPassword2, ' ');
 +      } while (noMatch);
 +      return password;
 +    }
 +
 + }
 +
 + /**
 +  *
 +  */
 + public class AliasDeleteCommand extends Command {
 +  public static final String USAGE = "delete-alias aliasname [--cluster clustername]";
 +  public static final String DESC = "The delete-alias command removes the\n" +
 +                                    "indicated alias from the --cluster specific\n" +
 +                                    "credential store or the gateway credential store.";
 +
 +  private String name = null;
 +
 +  /**
 +    * @param alias
 +    */
 +   public AliasDeleteCommand(String alias) {
 +     name = alias;
 +   }
 +
 +   /* (non-Javadoc)
-     * @see KnoxCLI.Command#execute()
++    * @see org.apache.knox.gateway.util.KnoxCLI.Command#execute()
 +    */
 +   @Override
 +   public void execute() throws Exception {
 +     AliasService as = getAliasService();
 +      KeystoreService keystoreService = getKeystoreService();
 +     if (as != null) {
 +       if (cluster == null) {
 +         cluster = "__gateway";
 +       }
 +        boolean credentialStoreForClusterAvailable =
 +            keystoreService.isCredentialStoreForClusterAvailable(cluster);
 +        if (credentialStoreForClusterAvailable) {
 +          List<String> aliasesForCluster = as.getAliasesForCluster(cluster);
 +          if (null == aliasesForCluster || !aliasesForCluster.contains(name)) {
 +            out.println("Deletion of Alias: " + name + " from cluster: " + cluster + " Failed. "
 +                + "\n" + "No such alias exists in the cluster.");
 +          } else {
 +            as.removeAliasForCluster(cluster, name);
 +            out.println(name + " has been successfully deleted.");
 +          }
 +        } else {
 +          out.println("Invalid cluster name provided: " + cluster);
 +        }
 +     }
 +   }
 +
 +   /* (non-Javadoc)
-     * @see KnoxCLI.Command#getUsage()
++    * @see org.apache.knox.gateway.util.KnoxCLI.Command#getUsage()
 +    */
 +   @Override
 +   public String getUsage() {
 +     return USAGE + ":\n\n" + DESC;
 +   }
 +
 + }
 +
 + /**
 +  *
 +  */
 + public class MasterCreateCommand extends Command {
 +  public static final String USAGE = "create-master [--force]";
 +  public static final String DESC = "The create-master command persists the\n" +
 +                                    "master secret in a file located at:\n" +
 +                                    "{GATEWAY_HOME}/data/security/master. It\n" +
 +                                    "will prompt the user for the secret to persist.\n" +
 +                                    "Use --force to overwrite the master secret.";
 +
 +   public MasterCreateCommand() {
 +   }
 +
 +   private GatewayConfig getGatewayConfig() {
 +     GatewayConfig result;
 +     Configuration conf = getConf();
 +     if( conf != null && conf instanceof GatewayConfig ) {
 +       result = (GatewayConfig)conf;
 +     } else {
 +       result = new GatewayConfigImpl();
 +     }
 +     return result;
 +   }
 +
 +   public boolean validate() {
 +     boolean valid = true;
 +     GatewayConfig config = getGatewayConfig();
 +     File dir = new File( config.getGatewaySecurityDir() );
 +     File file = new File( dir, "master" );
 +     if( file.exists() ) {
 +       if( force ) {
 +         if( !file.canWrite() ) {
 +           out.println(
 +               "This command requires write permissions on the master secret file: " +
 +                   file.getAbsolutePath() );
 +           valid = false;
 +         } else if( !file.canWrite() ) {
 +           out.println(
 +               "This command requires write permissions on the master secret file: " +
 +                   file.getAbsolutePath() );
 +           valid = false;
 +         } else {
 +           valid = file.delete();
 +           if( !valid ) {
 +             out.println(
 +                 "Unable to delete the master secret file: " +
 +                     file.getAbsolutePath() );
 +           }
 +         }
 +       } else {
 +         out.println(
 +             "Master secret is already present on disk. " +
 +                 "Please be aware that overwriting it will require updating other security artifacts. " +
 +                 " Use --force to overwrite the existing master secret." );
 +         valid = false;
 +       }
 +     } else if( dir.exists() && !dir.canWrite() ) {
 +       out.println(
 +           "This command requires write permissions on the security directory: " +
 +               dir.getAbsolutePath() );
 +       valid = false;
 +     }
 +     return valid;
 +   }
 +
 +   /* (non-Javadoc)
-     * @see KnoxCLI.Command#execute()
++    * @see org.apache.knox.gateway.util.KnoxCLI.Command#execute()
 +    */
 +   @Override
 +   public void execute() throws Exception {
 +     out.println("Master secret has been persisted to disk.");
 +   }
 +
 +   /* (non-Javadoc)
-     * @see KnoxCLI.Command#getUsage()
++    * @see org.apache.knox.gateway.util.KnoxCLI.Command#getUsage()
 +    */
 +   @Override
 +   public String getUsage() {
 +     return USAGE + ":\n\n" + DESC;
 +   }
 + }
 +
 +  private class VersionCommand extends Command {
 +
 +    public static final String USAGE = "version";
 +    public static final String DESC = "Displays Knox version information.";
 +
 +    @Override
 +    public void execute() throws Exception {
 +      Properties buildProperties = loadBuildProperties();
 +      System.out.println(
 +          String.format(
 +              "Apache Knox: %s (%s)",
 +              buildProperties.getProperty( "build.version", "unknown" ),
 +              buildProperties.getProperty( "build.hash", "unknown" ) ) );
 +    }
 +
 +    @Override
 +    public String getUsage() {
 +      return USAGE + ":\n\n" + DESC;
 +    }
 +
 +  }
 +
 +  private class RedeployCommand extends Command {
 +
 +    public static final String USAGE = "redeploy [--cluster clustername]";
 +    public static final String DESC =
 +        "Redeploys one or all of the gateway's clusters (a.k.a topologies).";
 +
 +    @Override
 +    public void execute() throws Exception {
 +      TopologyService ts = getTopologyService();
 +      ts.reloadTopologies();
 +      if (cluster != null) {
 +        if (validateClusterName(cluster, ts)) {
 +          ts.redeployTopologies(cluster);
 +        }
 +        else {
 +          out.println("Invalid cluster name provided. Nothing to redeploy.");
 +        }
 +      }
 +    }
 +
 +    /**
 +     * @param cluster
 +     * @param ts
 +     */
 +    private boolean validateClusterName(String cluster, TopologyService ts) {
 +      boolean valid = false;
 +      for (Topology t : ts.getTopologies() ) {
 +        if (t.getName().equals(cluster)) {
 +          valid = true;
 +          break;
 +        }
 +      }
 +      return valid;
 +    }
 +
 +    @Override
 +    public String getUsage() {
 +      return USAGE + ":\n\n" + DESC;
 +    }
 +
 +  }
 +
 +  private class ValidateTopologyCommand extends Command {
 +
 +    public static final String USAGE = "validate-topology [--cluster clustername] | [--path \"path/to/file\"]";
 +    public static final String DESC = "Ensures that a cluster's description (a.k.a topology) \n" +
 +        "follows the correct formatting rules.\n" +
 +        "use the list-topologies command to get a list of available cluster names";
 +    private String file = "";
 +
 +    @Override
 +    public String getUsage() {
 +      return USAGE + ":\n\n" + DESC;
 +    }
 +
 +    public void execute() throws Exception {
 +      GatewayConfig gc = getGatewayConfig();
 +      String topDir = gc.getGatewayTopologyDir();
 +
 +      if(path != null) {
 +        file = path;
 +      } else if(cluster == null) {
 +        // The following block of code retreieves the list of files in the topologies directory
 +        File tops = new File(topDir + "/topologies");
 +        if(tops.isDirectory()) {
 +          out.println("List of files available in the topologies directory");
 +          for (File f : tops.listFiles()) {
 +            if(f.getName().endsWith(".xml")) {
 +              String fName = f.getName().replace(".xml", "");
 +              out.println(fName);
 +            }
 +          }
 +          return;
 +        } else {
 +          out.println("Could not locate topologies directory");
 +          return;
 +        }
 +
 +      } else {
 +        file = topDir + "/" + cluster + ".xml";
 +      }
 +
 +      // The following block checks a topology against the XSD
 +      out.println();
 +      out.println("File to be validated: ");
 +      out.println(file);
 +      out.println("==========================================");
 +
 +      if(new File(file).exists()) {
 +        TopologyValidator tv = new TopologyValidator(file);
 +
 +        if(tv.validateTopology()) {
 +          out.println("Topology file validated successfully");
 +        } else {
 +          out.println(tv.getErrorString()) ;
 +          out.println("Topology validation unsuccessful");
 +        }
 +      } else {
 +        out.println("The topology file specified does not exist.");
 +      }
 +    }
 +
 +  }
 +
 +  private class ListTopologiesCommand extends Command {
 +
 +    public static final String USAGE = "list-topologies";
 +    public static final String DESC = "Retrieves a list of the available topologies within the\n" +
 +        "default topologies directory. Will return topologies that may not be deployed due\n" +
 +        "errors in file formatting.";
 +
 +    @Override
 +    public String getUsage() {
 +      return USAGE + ":\n\n" + DESC;
 +    }
 +
 +    @Override
 +    public void execute() {
 +
 +      String confDir = getGatewayConfig().getGatewayConfDir();
 +      File tops = new File(confDir + "/topologies");
 +      out.println("List of files available in the topologies directory");
 +      out.println(tops.toString());
 +      if(tops.isDirectory()) {
 +        for (File f : tops.listFiles()) {
 +          if(f.getName().endsWith(".xml")) {
 +            String fName = f.getName().replace(".xml", "");
 +            out.println(fName);
 +          }
 +        }
 +        return;
 +      } else {
 +        out.println("ERR: Topologies directory does not exist.");
 +        return;
 +      }
 +
 +    }
 +
 +  }
 +
 +  private class LDAPCommand extends Command {
 +
 +    public static final String USAGE = "ldap-command";
 +    public static final String DESC = "This is an internal command. It should not be used.";
 +    protected String username = null;
 +    protected char[] password = null;
 +    protected static final String debugMessage = "For more information use --d for debug output.";
 +    protected Topology topology;
 +
 +    @Override
 +    public String getUsage() {
 +      return USAGE + ":\n\n" + DESC;
 +    }
 +
 +    @Override
 +    public void execute() {
 +      out.println("This command does not have any functionality.");
 +    }
 +
 +
 +//    First define a few Exceptions
 +    protected class NoSuchTopologyException extends Exception {
 +      public NoSuchTopologyException() {}
 +      public NoSuchTopologyException(String message) { super(message); }
 +    }
 +    protected class MissingPasswordException extends Exception {
 +      public MissingPasswordException() {}
 +      public MissingPasswordException(String message) { super(message); }
 +    }
 +
 +    protected class MissingUsernameException extends Exception {
 +      public MissingUsernameException() {};
 +      public MissingUsernameException(String message) { super(message); }
 +    }
 +
 +    protected class BadSubjectException extends Exception {
 +      public BadSubjectException() {}
 +      public BadSubjectException(String message) { super(message); }
 +    }
 +
 +    protected class NoSuchProviderException extends Exception {
 +      public NoSuchProviderException() {}
 +      public NoSuchProviderException(String name, String role, String topology) {
 +        super("Could not find provider with role: " + role + ", name: " + name + " inside of topology: " + topology);
 +      }
 +    }
 +
 +    //    returns false if any errors are printed
 +    protected boolean hasShiroProviderErrors(Topology topology, boolean groupLookup) {
 +//      First let's define the variables that represent the ShiroProvider params
 +      String mainLdapRealm = "main.ldapRealm";
 +      String contextFactory = mainLdapRealm + ".contextFactory";
 +      String groupContextFactory = "main.ldapGroupContextFactory";
 +      String authorizationEnabled = mainLdapRealm + ".authorizationEnabled";
 +      String userSearchAttributeName = mainLdapRealm + ".userSearchAttributeName";
 +      String userObjectClass = mainLdapRealm + ".userObjectClass";
 +      String authenticationMechanism = mainLdapRealm + ".authenticationMechanism"; // Should not be used up to v0.6.0)
 +      String searchBase = mainLdapRealm + ".searchBase";
 +      String groupSearchBase = mainLdapRealm + ".groupSearchBase";
 +      String userSearchBase = mainLdapRealm + ".userSearchBase";
 +      String groupObjectClass = mainLdapRealm + ".groupObjectClass";
 +      String memberAttribute = mainLdapRealm + ".memberAttribute";
 +      String memberAttributeValueTemplate = mainLdapRealm + ".memberAttributeValueTemplate";
 +      String systemUsername = contextFactory + ".systemUsername";
 +      String systemPassword = contextFactory + ".systemPassword";
 +      String url = contextFactory + ".url";
 +      String userDnTemplate = mainLdapRealm + ".userDnTemplate";
 +
 +
 +      Provider shiro = topology.getProvider("authentication", "ShiroProvider");
 +      if(shiro != null) {
 +        Map<String, String> params = shiro.getParams();
 +        int errs = 0;
 +        if(groupLookup) {
 +          int errors = 0;
 +          errors += hasParam(params, groupContextFactory, true) ? 0 : 1;
 +          errors += hasParam(params, groupObjectClass, true) ? 0 : 1;
 +          errors += hasParam(params, memberAttributeValueTemplate, true) ? 0 : 1;
 +          errors += hasParam(params, memberAttribute, true) ? 0 : 1;
 +          errors += hasParam(params, authorizationEnabled, true) ? 0 : 1;
 +          errors += hasParam(params, systemUsername, true) ? 0 : 1;
 +          errors += hasParam(params, systemPassword, true) ? 0 : 1;
 +          errors += hasParam(params, userSearchBase, true) ? 0 : 1;
 +          errors += hasParam(params, groupSearchBase, true) ? 0 : 1;
 +          errs += errors;
 +
 +        } else {
 +
 +//        Realm + Url is always required.
 +          errs += hasParam(params, mainLdapRealm, true) ? 0 : 1;
 +          errs += hasParam(params, url, true) ? 0 : 1;
 +
 +          if(hasParam(params, authorizationEnabled, false)) {
 +            int errors = 0;
 +            int searchBaseErrors = 0;
 +            errors += hasParam(params, systemUsername, true) ? 0 : 1;
 +            errors += hasParam(params, systemPassword, true) ? 0 : 1;
 +            searchBaseErrors += hasParam(params, searchBase, false) ? 0 : hasParam(params, userSearchBase, false) ? 0 : 1;
 +            if (searchBaseErrors > 0) {
 +              out.println("Warn: Both " + searchBase + " and " + userSearchBase + " are missing from the topology");
 +            }
 +            errors += searchBaseErrors;
 +            errs += errors;
 +          }
 +
 +//        If any one of these is present they must all be present
 +          if( hasParam(params, userSearchAttributeName, false) ||
 +              hasParam(params, userObjectClass, false) ||
 +              hasParam(params, searchBase, false) ||
 +              hasParam(params, userSearchBase, false)) {
 +
 +            int errors = 0;
 +            errors += hasParam(params, userSearchAttributeName, true) ? 0 : 1;
 +            errors += hasParam(params, userObjectClass, true) ? 0 : 1;
 +            errors += hasParam(params, searchBase, false) ? 0 : hasParam(params, userSearchBase, false) ? 0 : 1;
 +            errors += hasParam(params, systemUsername, true) ? 0 : 1;
 +            errors += hasParam(params, systemPassword, true) ? 0 : 1;
 +
 +            if(errors > 0) {
 +              out.println(userSearchAttributeName + " or " + userObjectClass + " or " + searchBase + " or " + userSearchBase + " was found in the topology");
 +              out.println("If any one of the above params is present then " + userSearchAttributeName + 
 +                  " and " + userObjectClass + " must both be present and either " + searchBase + " or " + userSearchBase + " must also be present.");
 +            }
 +            errs += errors;
 +          } else {
 +            errs += hasParam(params, userDnTemplate, true) ?  0 : 1;
 +
 +          }
 +        }
 +        return (errs > 0);
 +      } else {
 +        out.println("Could not obtain ShiroProvider");
 +        return true;
 +      }
 +    }
 +
 +    // Checks to see if the param name is present. If not, notify the user
 +    protected boolean hasParam(Map<String, String> params, String key, boolean notifyUser){
 +      if(params.get(key) == null){
 +        if(notifyUser) { out.println("Warn: " + key + " is not present in topology"); }
 +        return false;
 +      } else { return true; }
 +    }
 +
 +    /**
 +     *
 +     * @param ini - the path to the shiro.ini file within a topology deployment.
 +     * @param token - token for username and password
 +     * @return - true/false whether a user was successfully able to authenticate or not.
 +     */
 +    protected boolean authenticateUser(Ini ini, UsernamePasswordToken token){
 +      boolean result = false;
 +      try {
 +        Subject subject = getSubject(ini);
 +        try{
 +          subject.login(token);
 +          if(subject.isAuthenticated()){
 +            result = true;
 +          }
 +        } catch (AuthenticationException e){
 +          out.println(e.toString());
 +          out.println(e.getCause().getMessage());
 +          if (debug) {
 +            e.printStackTrace(out);
 +          } else {
 +            out.println(debugMessage);
 +          }
 +        } finally {
 +          subject.logout();
 +        }
 +      } catch (BadSubjectException e) {
 +        out.println(e.toString());
 +        if (debug){
 +          e.printStackTrace();
 +        } else {
 +          out.println(debugMessage);
 +        }
 +      } catch (ConfigurationException e) {
 +        out.println(e.toString());
 +      } catch ( Exception e ) {
 +        out.println(e.getCause());
 +        out.println(e.toString());
 +      }
 +      return result;
 +    }
 +
 +    protected boolean authenticateUser(String config, UsernamePasswordToken token) throws ConfigurationException {
 +      Ini ini = new Ini();
 +      try {
 +        ini.loadFromPath(config);
 +        return authenticateUser(ini, token);
 +      } catch (ConfigurationException e) {
 +        throw e;
 +      }
 +    }
 +
 +    /**
 +     *
 +     * @param userDn - fully qualified userDn used for LDAP authentication
 +     * @return - returns the principal found in the userDn after "uid="
 +     */
 +    protected String getPrincipal(String userDn){
 +      String result = "";
 +
 +//      Need to determine whether we are using AD or LDAP?
 +//      LDAP userDn usually starts with "uid="
 +//      AD userDn usually starts with cn/CN
 +//      Find the userDN template
 +
 +      try {
 +        Topology t = getTopology(cluster);
 +        Provider shiro = t.getProvider("authentication", "ShiroProvider");
 +
 +        String p1 = shiro.getParams().get("main.ldapRealm.userDnTemplate");
 +
 +//        We know everything between first "=" and "," will be part of the principal.
 +        int eq = userDn.indexOf("=");
 +        int com = userDn.indexOf(",");
 +        if(eq != -1 && com > eq && com != -1) {
 +          result = userDn.substring(eq + 1, com);
 +        } else {
 +          result = "";
 +        }
 +      } catch (NoSuchTopologyException e) {
 +        out.println(e.toString());
 +        result = userDn;
 +      } finally {
 +        return result;
 +      }
 +    }
 +
 +    /**
 +     *
 +     * @param t - topology configuration to use
 +     * @param config - the path to the shiro.ini file from the topology deployment.
 +     * @return - true/false whether LDAP successfully authenticated with system credentials.
 +     */
 +    protected boolean testSysBind(Topology t, String config) {
 +      boolean result = false;
 +      String username;
 +      char[] password;
 +
 +      try {
 +//        Pull out contextFactory.url param for light shiro config
 +        Provider shiro = t.getProvider("authentication", "ShiroProvider");
 +        Map<String, String> params = shiro.getParams();
 +        String url = params.get("main.ldapRealm.contextFactory.url");
 +
 +//        Build the Ini with minimum requirements
 +        Ini ini = new Ini();
 +        ini.addSection("main");
 +        ini.setSectionProperty("main", "ldapRealm", "org.apache.knox.gateway.shirorealm.KnoxLdapRealm");
 +        ini.setSectionProperty("main", "ldapContextFactory", "org.apache.knox.gateway.shirorealm.KnoxLdapContextFactory");
 +        ini.setSectionProperty("main", "ldapRealm.contextFactory.url", url);
 +
 +        username = getSystemUsername(t);
 +        password = getSystemPassword(t);
 +        result = authenticateUser(ini, new UsernamePasswordToken(username, password));
 +      } catch (MissingUsernameException | NoSuchProviderException | MissingPasswordException e) {
 +        out.println(e.toString());
 +      } catch (NullPointerException e) {
 +        out.println(e.toString());
 +      }
 +      return result;
 +    }
 +
 +    /**
 +     *
 +     * @param t - topology configuration to use
 +     * @return - the principal of the systemUsername specified in topology. null if non-existent
 +     */
 +    private String getSystemUsername(Topology t) throws MissingUsernameException, NoSuchProviderException {
 +      final String SYSTEM_USERNAME = "main.ldapRealm.contextFactory.systemUsername";
 +      String user = null;
 +      Provider shiroProvider = t.getProvider("authentication", "ShiroProvider");
 +      if(shiroProvider != null){
 +        Map<String, String> params = shiroProvider.getParams();
 +        String userDn = params.get(SYSTEM_USERNAME);
 +        user = userDn;
 +      } else {
 +        throw new NoSuchProviderException("ShiroProvider", "authentication", t.getName());
 +      }
 +      return user;
 +    }
 +
 +    /**
 +     *
 +     * @param t - topology configuration to use
 +     * @return - the systemPassword specified in topology. null if non-existent
 +     */
 +    private char[] getSystemPassword(Topology t) throws NoSuchProviderException, MissingPasswordException{
 +      final String SYSTEM_PASSWORD = "main.ldapRealm.contextFactory.systemPassword";
 +      String pass = null;
 +      Provider shiro = t.getProvider("authentication", "ShiroProvider");
 +      if(shiro != null){
 +        Map<String, String> params = shiro.getParams();
 +        pass = params.get(SYSTEM_PASSWORD);
 +      } else {
 +        throw new NoSuchProviderException("ShiroProvider", "authentication", t.getName());
 +      }
 +
 +      if(pass != null) {
 +        return pass.toCharArray();
 +      } else {
 +        throw new MissingPasswordException("ShiroProvider did not contain param: " + SYSTEM_PASSWORD);
 +      }
 +    }
 +
 +    /**
 +     *
 +     * @param config - the shiro.ini config file created in topology deployment.
 +     * @return returns the Subject given by the shiro config's settings.
 +     */
 +    protected Subject getSubject(Ini config) throws BadSubjectException {
 +      try {
 +        ThreadContext.unbindSubject();
 +        Factory factory = new IniSecurityManagerFactory(config);
 +        org.apache.shiro.mgt.SecurityManager securityManager = (org.apache.shiro.mgt.SecurityManager) factory.getInstance();
 +        SecurityUtils.setSecurityManager(securityManager);
 +        Subject subject = SecurityUtils.getSubject();
 +        if( subject != null) {
 +          return subject;
 +        } else {
 +          out.println("Error Creating Subject from config at: " + config);
 +        }
 +      } catch (Exception e){
 +        out.println(e.toString());
 +      }
 +      throw new BadSubjectException("Subject could not be created with Shiro Config at " + config);
 +    }
 +
 +    protected Subject getSubject(String config) throws ConfigurationException {
 +      Ini ini = new Ini();
 +      ini.loadFromPath(config);
 +      try {
 +        return getSubject(ini);
 +      } catch (BadSubjectException e) {
 +        throw new ConfigurationException("Could not get Subject with Ini at " + config);
 +      }
 +    }
 +
 +    /**
 +     * prompts the user for credentials in the command line if necessary
 +     * populates the username and password members.
 +     */
 +    protected void promptCredentials() {
 +      if(this.username == null){
 +        Console c = System.console();
 +        if( c != null) {
 +          this.username = c.readLine("Username: ");
 +        }else{
 +          try {
 +            BufferedReader reader = new BufferedReader(new InputStreamReader(System.in));
 +            out.println("Username: ");
 +            this.username = reader.readLine();
 +            reader.close();
 +          } catch (IOException e){
 +            out.println(e.toString());
 +            this.username = "";
 +          }
 +        }
 +      }
 +
 +      if(this.password == null){
 +        Console c = System.console();
 +        if( c != null) {
 +          this.password = c.readPassword("Password: ");
 +        }else{
 +          try {
 +            BufferedReader reader = new BufferedReader(new InputStreamReader(System.in));
 +            out.println("Password: ");
 +            String pw = reader.readLine();
 +            if(pw != null){
 +              this.password = pw.toCharArray();
 +            } else {
 +              this.password = new char[0];
 +            }
 +            reader.close();
 +          } catch (IOException e){
 +            out.println(e.toString());
 +            this.password = new char[0];
 +          }
 +        }
 +      }
 +    }
 +
 +    /**
 +     *
 +     * @param topologyName - the name of the topology to retrieve
 +     * @return - Topology object with specified name. null if topology doesn't exist in TopologyService
 +     */
 +    protected Topology getTopology(String topologyName) throws NoSuchTopologyException {
 +      TopologyService ts = getTopologyService();
 +      ts.reloadTopologies();
 +      for (Topology t : ts.getTopologies()) {
 +        if(t.getName().equals(topologyName)) {
 +          return t;
 +        }
 +      }
 +      throw new  NoSuchTopologyException("Topology " + topologyName + " does not" +
 +          " exist in the topologies directory.");
 +    }
 +
 +    /**
 +     *
 +     * @param t - Topology to use for config
 +     * @return - path of shiro.ini config file.
 +     */
 +    protected String getConfig(Topology t){
 +      File tmpDir = new File(System.getProperty("java.io.tmpdir"));
 +      DeploymentFactory.setGatewayServices(services);
 +      EnterpriseArchive archive = DeploymentFactory.createDeployment(getGatewayConfig(), t);
 +      File war = archive.as(ExplodedExporter.class).exportExploded(tmpDir, t.getName() + "_deploy.tmp");
 +      war.deleteOnExit();
 +      String config = war.getAbsolutePath() + "/%2F/WEB-INF/shiro.ini";
 +      try{
 +        FileUtils.forceDeleteOnExit(war);
 +      } catch (IOException e) {
 +        out.println(e.toString());
 +        war.deleteOnExit();
 +      }
 +      return config;
 +    }
 +
 +    /**
 +     * populates username and password if they were passed as arguments, if not will prompt user for them.
 +     */
 +    void acquireCredentials(){
 +      if(user != null){
 +        this.username = user;
 +      }
 +      if(pass != null){
 +        this.password = pass.toCharArray();
 +      }
 +      promptCredentials();
 +    }
 +
 +    /**
 +     *
 +     * @return - true or false if the topology was acquired from the topology service and populated in the topology
 +     * field.
 +     */
 +    protected boolean acquireTopology(){
 +      try {
 +        topology = getTopology(cluster);
 +      } catch (NoSuchTopologyException e) {
 +        out.println(e.toString());
 +        return false;
 +      }
 +      return true;
 +    }
 +  }
 +
 +  private class LDAPAuthCommand extends LDAPCommand {
 +
 +    public static final String USAGE = "user-auth-test [--cluster clustername] [--u username] [--p password] [--g]";
 +    public static final String DESC = "This command tests a cluster's configuration ability to\n " +
 +        "authenticate a user with a cluster's ShiroProvider settings.\n Use \"--g\" if you want to list the groups a" +
 +        " user is a member of. \nOptional: [--u username]: Provide a username argument to the command\n" +
 +        "Optional: [--p password]: Provide a password argument to the command.\n" +
 +        "If a username and password argument are not supplied, the terminal will prompt you for one.";
 +
 +    private static final String  SUBJECT_USER_GROUPS = "subject.userGroups";
 +    private HashSet<String> groupSet = new HashSet<>();
 +
 +    @Override
 +    public String getUsage() {
 +      return USAGE + ":\n\n" + DESC;
 +    }
 +
 +    @Override
 +    public void execute() {
 +      if(!acquireTopology()){
 +        return;
 +      }
 +      acquireCredentials();
 +
 +      if(topology.getProvider("authentication", "ShiroProvider") == null) {
 +        out.println("ERR: This tool currently only works with Shiro as the authentication provider.");
 +        out.println("Please update the topology to use \"ShiroProvider\" as the authentication provider.");
 +        return;
 +      }
 +
 +      String config = getConfig(topology);
 +
 +      if(new File(config).exists()) {
 +          if(authenticateUser(config, new UsernamePasswordToken(username, password))) {
 +            out.println("LDAP authentication successful!");
 +            if(groups) {
 +              if(testSysBind(topology, config)) {
 +                groupSet = getGroups(topology, new UsernamePasswordToken(username, password));
 +                if(groupSet == null || groupSet.isEmpty()) {
 +                  out.println(username + " does not belong to any groups");
 +                  if(groups) {
 +                    hasShiroProviderErrors(topology, true);
 +                    out.println("You were looking for this user's groups but this user does not belong to any.");
 +                    out.println("Your topology file may be incorrectly configured for group lookup.");
 +                  }
 +                } else {
 +                  for (Object o : groupSet.toArray()) {
 +                    out.println(username + " is a member of: " + o.toString());
 +                  }
 +                }
 +              }
 +            }
 +          } else {
 +            out.println("ERR: Unable to authenticate user: " + username);
 +          }
 +      } else {
 +        out.println("ERR: No shiro config file found.");
 +      }
 +    }
 +
 +    private HashSet<String> getGroups(Topology t, UsernamePasswordToken token){
 +      HashSet<String> groups = null;
 +      try {
 +        Subject subject = getSubject(getConfig(t));
 +        if(!subject.isAuthenticated()) {
 +          subject.login(token);
 +        }
 +        subject.hasRole(""); //Populate subject groups
 +        groups = (HashSet) subject.getSession().getAttribute(SUBJECT_USER_GROUPS);
 +        subject.logout();
 +      } catch (AuthenticationException e) {
 +        out.println("Error retrieving groups");
 +        out.println(e.toString());
 +        if(debug) {
 +          e.printStackTrace();
 +        } else {
 +          out.println(debugMessage);
 +        }
 +      } catch (ConfigurationException e) {
 +        out.println(e.toString());
 +        if(debug){
 +          e.printStackTrace();
 +        }
 +      }
 +      return groups;
 +    }
 +
 +  }
 +
 +  public class LDAPSysBindCommand extends LDAPCommand {
 +
 +    public static final String USAGE = "system-user-auth-test [--cluster clustername] [--d]";
 +    public static final String DESC = "This command tests a cluster configuration's ability to\n " +
 +        "authenticate a user with a cluster's ShiroProvider settings.";
 +
 +    @Override
 +    public String getUsage() {
 +      return USAGE + ":\n\n" + DESC;
 +    }
 +
 +    @Override
 +    public void execute() {
 +
 +      if(!acquireTopology()) {
 +        return;
 +      }
 +
 +      if(hasShiroProviderErrors(topology, false)) {
 +        out.println("Topology warnings present. SystemUser may not bind.");
 +      }
 +
 +      if(testSysBind(topology, getConfig(topology))) {
 +        out.println("System LDAP Bind successful.");
 +      } else {
 +        out.println("Unable to successfully bind to LDAP server with topology credentials. Are your parameters correct?");
 +      }
 +    }
 +  }
 +
 +  private GatewayConfig getGatewayConfig() {
 +    GatewayConfig result;
 +    Configuration conf = getConf();
 +    if(conf != null && conf instanceof GatewayConfig) {
 +      result = (GatewayConfig) conf;
 +    } else {
 +      result = new GatewayConfigImpl();
 +    }
 +    return result;
 +  }
 +
 +  public class ServiceTestCommand extends Command {
 +    public static final String USAGE = "service-test [--u username] [--p password] [--cluster clustername] [--hostname name] " +
 +        "[--port port]";
-     public static final String DESC = "This command requires a running instance of Knox to be present on the same " +
-         "machine. It will execute a test to make sure all services are accessible through the gateway URLs. Errors are " +
-         "reported and suggestions to resolve any problems are returned. JSON formatted.";
++    public static final String DESC =
++                        "This command requires a running instance of Knox to be present on the same machine.\n" +
++                        "It will execute a test to make sure all services are accessible through the gateway URLs.\n" +
++                        "Errors are reported and suggestions to resolve any problems are returned. JSON formatted.\n";
 +
 +    private boolean ssl = true;
 +    private int attempts = 0;
 +
 +    @Override
 +    public String getUsage() { return USAGE + ":\n\n" + DESC; };
 +
 +    @Override
 +    public void execute() {
 +      attempts++;
 +      SSLContext ctx = null;
 +      CloseableHttpClient client;
 +      String http = "http://";
 +      String https = "https://";
 +      GatewayConfig conf = getGatewayConfig();
 +      String gatewayPort;
 +      String host;
 +
 +
 +      if(cluster == null) {
 +        printKnoxShellUsage();
 +        out.println("A --cluster argument is required.");
 +        return;
 +      }
 +
 +      if(hostname != null) {
 +        host = hostname;
 +      } else {
 +        try {
 +          host = InetAddress.getLocalHost().getHostAddress();
 +        } catch (UnknownHostException e) {
 +          out.println(e.toString());
 +          out.println("Defaulting address to localhost. Use --hostname option to specify a different hostname");
 +          host = "localhost";
 +        }
 +      }
 +
 +      if (port != null) {
 +        gatewayPort = port;
 +      } else if (conf.getGatewayPort() > -1) {
 +        gatewayPort = Integer.toString(conf.getGatewayPort());
 +      } else {
 +        out.println("Could not get port. Please supply it using the --port option");
 +        return;
 +      }
 +
 +
 +      String path = "/" + conf.getGatewayPath();
 +      String topology = "/" + cluster;
 +      String httpServiceTestURL = http + host + ":" + gatewayPort + path + topology + "/service-test";
 +      String httpsServiceTestURL = https + host + ":" + gatewayPort + path + topology + "/service-test";
 +
 +      String authString = "";
 +//    Create Authorization String
 +      if( user != null && pass != null) {
 +        authString = "Basic " + Base64.encodeBase64String((user + ":" + pass).getBytes());
 +      } else {
 +        out.println("Username and/or password not supplied. Expect HTTP 401 Unauthorized responses.");
 +      }
 +
 +//    Attempt to build SSL context for HTTP client.
 +      try {
 +        ctx = SSLContexts.custom().loadTrustMaterial(null, new TrustSelfSignedStrategy()).build();
 +      } catch (Exception e) {
 +        out.println(e.toString());
 +      }
 +
 +//    Initialize the HTTP client
 +      if(ctx == null) {
 +        client = HttpClients.createDefault();
 +      } else {
 +        client = HttpClients.custom().setSslcontext(ctx).build();
 +      }
 +
 +      HttpGet request;
 +      if(ssl) {
 +        request = new HttpGet(httpsServiceTestURL);
 +      } else {
 +        request = new HttpGet(httpServiceTestURL);
 +      }
 +
 +
 +      request.setHeader("Authorization", authString);
 +      request.setHeader("Accept", MediaType.APPLICATION_JSON.getMediaType());
 +      try {
 +        out.println(request.toString());
 +        CloseableHttpResponse response = client.execute(request);
 +
 +        switch (response.getStatusLine().getStatusCode()) {
 +
 +          case 200:
 +            response.getEntity().writeTo(out);
 +            break;
 +          case 404:
 +            out.println("Could not find service-test resource");
 +            out.println("Make sure you have configured the SERVICE-TEST service in your topology.");
 +            break;
 +          case 500:
 +            out.println("HTTP 500 Server error");
 +            break;
 +
 +          default:
 +            out.println("Unexpected HTTP response code.");
 +            out.println(response.getStatusLine().toString());
 +            response.getEntity().writeTo(out);
 +            break;
 +        }
 +
 +        response.close();
 +        request.releaseConnection();
 +
 +      } catch (ClientProtocolException e) {
 +        out.println(e.toString());
 +        if (debug) {
 +          e.printStackTrace(out);
 +        }
 +      } catch (SSLException e) {
 +        out.println(e.toString());
 +        retryRequest();
 +      } catch (IOException e) {
 +        out.println(e.toString());
 +        retryRequest();
 +        if(debug) {
 +          e.printStackTrace(out);
 +        }
 +      } finally {
 +        try {
 +          client.close();
 +        } catch (IOException e) {
 +          out.println(e.toString());
 +        }
 +      }
 +
 +    }
 +
 +    public void retryRequest(){
 +      if(attempts < 2) {
 +        if(ssl) {
 +          ssl = false;
 +          out.println("Attempting request without SSL.");
 +        } else {
 +          ssl = true;
 +          out.println("Attempting request with SSL ");
 +        }
 +        execute();
 +      } else {
 +        out.println("Unable to successfully make request. Try using the API with cURL.");
 +      }
 +    }
 +
 +  }
 +
++  public class RemoteRegistryClientsListCommand extends Command {
++
++    static final String USAGE = "list-registry-clients";
++    static final String DESC = "Lists all of the remote configuration registry clients defined in gateway-site.xml.\n";
++
++    /* (non-Javadoc)
++     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#execute()
++     */
++    @Override
++    public void execute() throws Exception {
++      GatewayConfig config = getGatewayConfig();
++      List<String> remoteConfigRegistryClientNames = config.getRemoteRegistryConfigurationNames();
++      if (!remoteConfigRegistryClientNames.isEmpty()) {
++        out.println("Listing remote configuration registry clients:");
++        for (String name : remoteConfigRegistryClientNames) {
++          out.println(name);
++        }
++      }
++    }
++
++    /* (non-Javadoc)
++     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#getUsage()
++     */
++    @Override
++    public String getUsage() {
++      return USAGE + ":\n\n" + DESC;
++    }
++ }
++
++
++  /**
++   * Base class for remote config registry upload commands
++   */
++  public abstract class RemoteRegistryUploadCommand extends Command {
++    protected static final String ROOT_ENTRY = "/knox";
++    protected static final String CONFIG_ENTRY = ROOT_ENTRY + "/config";
++    protected static final String PROVIDER_CONFIG_ENTRY = CONFIG_ENTRY + "/shared-providers";
++    protected static final String DESCRIPTORS__ENTRY = CONFIG_ENTRY + "/descriptors";
++
++    private File sourceFile = null;
++    protected String filename = null;
++
++    protected RemoteRegistryUploadCommand(String sourceFileName) {
++      this.filename = sourceFileName;
++    }
++
++    private void upload(RemoteConfigurationRegistryClient client, String entryPath, File source) throws Exception {
++      String content = FileUtils.readFileToString(source);
++      if (client.entryExists(entryPath)) {
++        // If it exists, then we're going to set the data
++        client.setEntryData(entryPath, content);
++      } else {
++        // If it does not exist, then create it and set the data
++        client.createEntry(entryPath, content);
++      }
++    }
++
++    File getSourceFile() {
++      if (sourceFile == null) {
++        sourceFile = new File(filename);
++      }
++      return sourceFile;
++    }
++
++    String getEntryName(String prefixPath) {
++      String entryName = remoteRegistryEntryName;
++      if (entryName == null) {
++        File sourceFile = getSourceFile();
++        if (sourceFile.exists()) {
++          String path = sourceFile.getAbsolutePath();
++          entryName = path.substring(path.lastIndexOf(File.separator) + 1);
++        } else {
++          out.println("Could not locate source file: " + filename);
++        }
++      }
++      return prefixPath + "/" + entryName;
++    }
++
++    protected void execute(String entryName, File sourceFile) throws Exception {
++      if (remoteRegistryClient != null) {
++        RemoteConfigurationRegistryClientService cs = getRemoteConfigRegistryClientService();
++        RemoteConfigurationRegistryClient client = cs.get(remoteRegistryClient);
++        if (client != null) {
++          if (entryName != null) {
++            upload(client, entryName, sourceFile);
++          }
++        } else {
++          out.println("No remote configuration registry identified by '" + remoteRegistryClient + "' could be found.");
++        }
++      } else {
++        out.println("Missing required argument : --registry-client\n");
++      }
++    }
++
++  }
++
++
++  public class RemoteRegistryUploadProviderConfigCommand extends RemoteRegistryUploadCommand {
++
++    static final String USAGE = "upload-provider-config providerConfigFile --registry-client name [--entry-name entryName]";
++    static final String DESC = "Uploads a provider configuration to the specified remote registry client, optionally " +
++                               "renaming the entry.\nIf the entry name is not specified, the name of the uploaded " +
++                               "file is used.\n";
++
++    RemoteRegistryUploadProviderConfigCommand(String fileName) {
++      super(fileName);
++    }
++
++    /* (non-Javadoc)
++     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#execute()
++     */
++    @Override
++    public void execute() throws Exception {
++      super.execute(getEntryName(PROVIDER_CONFIG_ENTRY), getSourceFile());
++    }
++
++    /* (non-Javadoc)
++     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#getUsage()
++     */
++    @Override
++    public String getUsage() {
++      return USAGE + ":\n\n" + DESC;
++    }
++  }
++
++
++  public class RemoteRegistryUploadDescriptorCommand extends RemoteRegistryUploadCommand {
++
++    static final String USAGE = "upload-descriptor descriptorFile --registry-client name [--entry-name entryName]";
++    static final String DESC = "Uploads a simple descriptor using the specified remote registry client, optionally " +
++                               "renaming the entry.\nIf the entry name is not specified, the name of the uploaded " +
++                               "file is used.\n";
++
++    RemoteRegistryUploadDescriptorCommand(String fileName) {
++      super(fileName);
++    }
++
++    /* (non-Javadoc)
++     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#execute()
++     */
++    @Override
++    public void execute() throws Exception {
++      super.execute(getEntryName(DESCRIPTORS__ENTRY), getSourceFile());
++    }
++
++    /* (non-Javadoc)
++     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#getUsage()
++     */
++    @Override
++    public String getUsage() {
++      return USAGE + ":\n\n" + DESC;
++    }
++  }
++
++
++  public class RemoteRegistryGetACLCommand extends Command {
++
++    static final String USAGE = "get-registry-acl entry --registry-client name";
++    static final String DESC = "Presents the ACL settings for the specified remote registry entry.\n";
++
++    private String entry = null;
++
++    RemoteRegistryGetACLCommand(String entry) {
++      this.entry = entry;
++    }
++
++    /* (non-Javadoc)
++     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#execute()
++     */
++    @Override
++    public void execute() throws Exception {
++      if (remoteRegistryClient != null) {
++        RemoteConfigurationRegistryClientService cs = getRemoteConfigRegistryClientService();
++        RemoteConfigurationRegistryClient client = cs.get(remoteRegistryClient);
++        if (client != null) {
++          if (entry != null) {
++            List<RemoteConfigurationRegistryClient.EntryACL> acls = client.getACL(entry);
++            for (RemoteConfigurationRegistryClient.EntryACL acl : acls) {
++              out.println(acl.getType() + ":" + acl.getId() + ":" + acl.getPermissions());
++            }
++          }
++        } else {
++          out.println("No remote configuration registry identified by '" + remoteRegistryClient + "' could be found.");
++        }
++      } else {
++        out.println("Missing required argument : --registry-client\n");
++      }
++    }
++
++    /* (non-Javadoc)
++     * @see org.apache.hadoop.gateway.util.KnoxCLI.Command#getUsage()
++     */
++    @Override
++    public String getUsage() {
++      return USAGE + ":\n\n" + DESC;
++    }
++  }
++
++
++  /**
++   * Base class for remote config registry delete commands
++   */
++  public abstract class RemoteRegistryDeleteCommand extends Command {
++    protected static final String ROOT_ENTRY = "/knox";
++    protected static final String CONFIG_ENTRY = ROOT_ENTRY + "/config";
++    protected static final String PROVIDER_CONFIG_ENTRY = CONFIG_ENTRY + "/shared-providers";
++    protected static final String DESCRIPTORS__ENTRY = CONFIG_ENTRY + "/descriptors";
++
++    protected String entryName = null;
++
++    protected RemoteRegistryDeleteCommand(String entryName) {
++      this.entryName = entryName;
++    }
++
++    private void delete(RemoteConfigurationRegistryClient client, String entryPath) throws Exception {
++      if (client.entryExists(entryPath)) {
++        // If it exists, then delete it
++        client.deleteEntry(entryPath);
++      }
++    }
++
++    protected void execute(String entryName) throws Exception {
++      if (remoteRegistryClient != null) {
++        RemoteConfigurationRegistryClientService cs = getRemoteConfigRegistryClientService();
++        RemoteConfigurationRegistryClient client = cs.get(remoteRegistryClient);
++        if (client != null) {
++          if (entryName != null) {
++            delete(client, entryName);
++          }
++        } else {
++          out.println("No remote configuration registry identified by '" + remoteRegistryClient + "' could be found.");
++        }
++      } else {
++        out.println("Missing required argument : --registry-client\n");
++      }
++    }
++  }
++
++
++  public class RemoteRegistryDeleteProviderConfigCommand extends RemoteRegistryDeleteCommand {
++    static final String USAGE = "delete-provider-config providerConfig --registry-client name";
++    static final String DESC = "Deletes a shared provider configuration from the specified remote registry.\n";
++
++    public RemoteRegistryDeleteProviderConfigCommand(String entryName) {
++      super(entryName);
++    }
++
++    @Override
++    public void execute() throws Exception {
++      execute(PROVIDER_CONFIG_ENTRY + "/" + entryName);
++    }
++
++    @Override
++    public String getUsage() {
++      return USAGE + ":\n\n" + DESC;
++    }
++  }
++
++
++  public class RemoteRegistryDeleteDescriptorCommand extends RemoteRegistryDeleteCommand {
++    static final String USAGE = "delete-descriptor descriptor --registry-client name";
++    static final String DESC = "Deletes a simple descriptor from the specified remote registry.\n";
++
++    public RemoteRegistryDeleteDescriptorCommand(String entryName) {
++      super(entryName);
++    }
++
++    @Override
++    public void execute() throws Exception {
++      execute(DESCRIPTORS__ENTRY + "/" + entryName);
++    }
++
++    @Override
++    public String getUsage() {
++      return USAGE + ":\n\n" + DESC;
++    }
++  }
++
++
 +  private static Properties loadBuildProperties() {
 +    Properties properties = new Properties();
 +    InputStream inputStream = KnoxCLI.class.getClassLoader().getResourceAsStream( "build.properties" );
 +    if( inputStream != null ) {
 +      try {
 +        properties.load( inputStream );
 +        inputStream.close();
 +      } catch( IOException e ) {
 +        // Ignore.
 +      }
 +    }
 +    return properties;
 +  }
 +
 +  /**
 +   * @param args
 +   * @throws Exception
 +   */
 +  public static void main(String[] args) throws Exception {
 +    PropertyConfigurator.configure( System.getProperty( "log4j.configuration" ) );
 +    int res = ToolRunner.run(new GatewayConfigImpl(), new KnoxCLI(), args);
 +    System.exit(res);
 +  }
 +}


[41/49] knox git commit: Merge branch 'master' into KNOX-998-Package_Restructuring

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --cc gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
index 21627ad,0000000..05fc4eb
mode 100644,000000..100644
--- a/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
+++ b/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
@@@ -1,858 -1,0 +1,870 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one or more
 + * contributor license agreements. See the NOTICE file distributed with this
 + * work for additional information regarding copyright ownership. The ASF
 + * licenses this file to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance with the License.
 + * You may obtain a copy of the License at
 + *
 + * http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 + * License for the specific language governing permissions and limitations under
 + * the License.
 + */
 +package org.apache.knox.gateway.topology.discovery.ambari;
 +
 +import net.minidev.json.JSONObject;
 +import net.minidev.json.JSONValue;
 +import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
 +import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryConfig;
 +import org.easymock.EasyMock;
 +import org.junit.Test;
 +
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
 +
 +import static org.junit.Assert.assertNotNull;
 +import static org.junit.Assert.assertEquals;
 +import static org.junit.Assert.assertTrue;
 +
 +
 +/**
 + * Test the Ambari ServiceDiscovery implementation.
 + *
 + * N.B. These tests do NOT verify Ambari API responses. They DO validate the Ambari ServiceDiscovery implementation's
 + *      treatment of the responses as they were observed at the time the tests are developed.
 + */
 +public class AmbariServiceDiscoveryTest {
 +
 +    @Test
 +    public void testSingleClusterDiscovery() throws Exception {
 +        final String discoveryAddress = "http://ambarihost:8080";
 +        final String clusterName = "testCluster";
 +        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
 +
 +        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
 +        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
 +        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
 +        EasyMock.replay(sdc);
 +
 +        ServiceDiscovery.Cluster cluster = sd.discover(sdc, clusterName);
 +        assertNotNull(cluster);
 +        assertEquals(clusterName, cluster.getName());
 +        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
 +        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
 +
 +//        printServiceURLs(cluster);
 +    }
 +
 +
 +    @Test
 +    public void testBulkClusterDiscovery() throws Exception {
 +        final String discoveryAddress = "http://ambarihost:8080";
 +        final String clusterName = "anotherCluster";
 +        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
 +
 +        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
 +        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
 +        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
 +        EasyMock.replay(sdc);
 +
 +        Map<String, ServiceDiscovery.Cluster> clusters = sd.discover(sdc);
 +        assertNotNull(clusters);
 +        assertEquals(1, clusters.size());
 +        ServiceDiscovery.Cluster cluster = clusters.get(clusterName);
 +        assertNotNull(cluster);
 +        assertEquals(clusterName, cluster.getName());
 +        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
 +        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
 +
 +//        printServiceURLs(cluster, "NAMENODE", "WEBHCAT", "OOZIE", "RESOURCEMANAGER");
 +    }
 +
 +
 +    private static void printServiceURLs(ServiceDiscovery.Cluster cluster) {
 +        final String[] services = new String[]{"NAMENODE",
 +                                               "JOBTRACKER",
 +                                               "WEBHDFS",
 +                                               "WEBHCAT",
 +                                               "OOZIE",
 +                                               "WEBHBASE",
 +                                               "HIVE",
 +                                               "RESOURCEMANAGER"};
 +        printServiceURLs(cluster, services);
 +    }
 +
 +
 +    private static void printServiceURLs(ServiceDiscovery.Cluster cluster, String...services) {
 +        for (String name : services) {
 +            StringBuilder sb = new StringBuilder();
 +            List<String> urls = cluster.getServiceURLs(name);
 +            if (urls != null && !urls.isEmpty()) {
 +                for (String url : urls) {
 +                    sb.append(url);
 +                    sb.append(" ");
 +                }
 +            }
 +            System.out.println(String.format("%18s: %s", name, sb.toString()));
 +        }
 +    }
 +
 +
 +    /**
 +     * ServiceDiscovery implementation derived from AmbariServiceDiscovery, so the invokeREST method can be overridden
 +     * to eliminate the need to perform actual HTTP interactions with a real Ambari endpoint.
 +     */
 +    private static final class TestAmbariServiceDiscovery extends AmbariServiceDiscovery {
 +
++        final static String CLUSTER_PLACEHOLDER = TestRESTInvoker.CLUSTER_PLACEHOLDER;
++
++        TestAmbariServiceDiscovery(String clusterName) {
++            super(new TestRESTInvoker(clusterName));
++        }
++
++    }
++
++    private static final class TestRESTInvoker extends RESTInvoker {
++
 +        final static String CLUSTER_PLACEHOLDER = "CLUSTER_NAME";
 +
 +        private Map<String, JSONObject> cannedResponses = new HashMap<>();
 +
-         TestAmbariServiceDiscovery(String clusterName) {
-             cannedResponses.put(AMBARI_CLUSTERS_URI,
-                                 (JSONObject) JSONValue.parse(CLUSTERS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
-                                                                                                clusterName)));
++        TestRESTInvoker(String clusterName) {
++            super(null);
++
++            cannedResponses.put(AmbariServiceDiscovery.AMBARI_CLUSTERS_URI,
++                    (JSONObject) JSONValue.parse(CLUSTERS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
++                            clusterName)));
 +
-             cannedResponses.put(String.format(AMBARI_HOSTROLES_URI, clusterName),
-                                 (JSONObject) JSONValue.parse(HOSTROLES_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
-                                                                                                 clusterName)));
++            cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_HOSTROLES_URI, clusterName),
++                    (JSONObject) JSONValue.parse(HOSTROLES_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
++                            clusterName)));
 +
-             cannedResponses.put(String.format(AMBARI_SERVICECONFIGS_URI, clusterName),
-                                 (JSONObject) JSONValue.parse(SERVICECONFIGS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
-                                                                                                      clusterName)));
++            cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_SERVICECONFIGS_URI, clusterName),
++                    (JSONObject) JSONValue.parse(SERVICECONFIGS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
++                            clusterName)));
 +        }
 +
 +        @Override
-         protected JSONObject invokeREST(String url, String username, String passwordAlias) {
++        JSONObject invoke(String url, String username, String passwordAlias) {
 +            return cannedResponses.get(url.substring(url.indexOf("/api")));
 +        }
 +    }
 +
 +
 +    ////////////////////////////////////////////////////////////////////////
 +    //  JSON response templates, based on actual response content excerpts
 +    ////////////////////////////////////////////////////////////////////////
 +
 +    private static final String CLUSTERS_JSON_TEMPLATE =
 +    "{\n" +
 +    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters\",\n" +
 +    "  \"items\" : [\n" +
 +    "    {\n" +
 +    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "      \"Clusters\" : {\n" +
 +    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "        \"version\" : \"HDP-2.6\"\n" +
 +    "      }\n" +
 +    "    }\n" +
 +    "  ]" +
 +    "}";
 +
 +
 +    private static final String HOSTROLES_JSON_TEMPLATE =
 +    "{\n" +
 +    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services?fields=components/host_components/HostRoles\",\n" +
 +    "  \"items\" : [\n" +
 +    "    {\n" +
 +    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS\",\n" +
 +    "      \"ServiceInfo\" : {\n" +
 +    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "        \"service_name\" : \"AMBARI_METRICS\"\n" +
 +    "      },\n" +
 +    "      \"components\" : [\n" +
 +    "        {\n" +
 +    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS/components/METRICS_COLLECTOR\",\n" +
 +    "          \"ServiceComponentInfo\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"component_name\" : \"METRICS_COLLECTOR\",\n" +
 +    "            \"service_name\" : \"AMBARI_METRICS\"\n" +
 +    "          },\n" +
 +    "          \"host_components\" : [\n" +
 +    "            {\n" +
 +    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/METRICS_COLLECTOR\",\n" +
 +    "              \"HostRoles\" : {\n" +
 +    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "                \"component_name\" : \"METRICS_COLLECTOR\",\n" +
 +    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
 +    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
 +    "                \"service_name\" : \"AMBARI_METRICS\",\n" +
 +    "                \"stack_id\" : \"HDP-2.6\",\n" +
 +    "              }\n" +
 +    "            }\n" +
 +    "          ]\n" +
 +    "        },\n" +
 +    "        {\n" +
 +    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HBASE/components/HBASE_MASTER\",\n" +
 +    "          \"ServiceComponentInfo\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"component_name\" : \"HBASE_MASTER\",\n" +
 +    "            \"service_name\" : \"HBASE\"\n" +
 +    "          },\n" +
 +    "          \"host_components\" : [\n" +
 +    "            {\n" +
 +    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/HBASE_MASTER\",\n" +
 +    "              \"HostRoles\" : {\n" +
 +    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "                \"component_name\" : \"HBASE_MASTER\",\n" +
 +    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
 +    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
 +    "                \"service_name\" : \"HBASE\",\n" +
 +    "                \"stack_id\" : \"HDP-2.6\",\n" +
 +    "              }\n" +
 +    "            }\n" +
 +    "          ]\n" +
 +    "        }\n" +
 +    "      ]\n" +
 +    "    },\n" +
 +    "    {\n" +
 +    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS\",\n" +
 +    "      \"ServiceInfo\" : {\n" +
 +    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "        \"service_name\" : \"HDFS\"\n" +
 +    "      },\n" +
 +    "      \"components\" : [\n" +
 +    "        {\n" +
 +    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/NAMENODE\",\n" +
 +    "          \"ServiceComponentInfo\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"component_name\" : \"NAMENODE\",\n" +
 +    "            \"service_name\" : \"HDFS\"\n" +
 +    "          },\n" +
 +    "          \"host_components\" : [\n" +
 +    "            {\n" +
 +    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/NAMENODE\",\n" +
 +    "              \"HostRoles\" : {\n" +
 +    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "                \"component_name\" : \"NAMENODE\",\n" +
 +    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
 +    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
 +    "                \"service_name\" : \"HDFS\",\n" +
 +    "                \"stack_id\" : \"HDP-2.6\",\n" +
 +    "              }\n" +
 +    "            }\n" +
 +    "          ]\n" +
 +    "        },\n" +
 +    "        {\n" +
 +    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/SECONDARY_NAMENODE\",\n" +
 +    "          \"ServiceComponentInfo\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
 +    "            \"service_name\" : \"HDFS\"\n" +
 +    "          },\n" +
 +    "          \"host_components\" : [\n" +
 +    "            {\n" +
 +    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/SECONDARY_NAMENODE\",\n" +
 +    "              \"HostRoles\" : {\n" +
 +    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "                \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
 +    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
 +    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
 +    "                \"service_name\" : \"HDFS\",\n" +
 +    "                \"stack_id\" : \"HDP-2.6\",\n" +
 +    "              }\n" +
 +    "            }\n" +
 +    "          ]\n" +
 +    "        }\n" +
 +    "      ]\n" +
 +    "    },\n" +
 +    "    {\n" +
 +    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE\",\n" +
 +    "      \"ServiceInfo\" : {\n" +
 +    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "        \"service_name\" : \"HIVE\"\n" +
 +    "      },\n" +
 +    "      \"components\" : [\n" +
 +    "        {\n" +
 +    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HCAT\",\n" +
 +    "          \"ServiceComponentInfo\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"component_name\" : \"HCAT\",\n" +
 +    "            \"service_name\" : \"HIVE\"\n" +
 +    "          },\n" +
 +    "          \"host_components\" : [\n" +
 +    "            {\n" +
 +    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/HCAT\",\n" +
 +    "              \"HostRoles\" : {\n" +
 +    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "                \"component_name\" : \"HCAT\",\n" +
 +    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
 +    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
 +    "                \"service_name\" : \"HIVE\",\n" +
 +    "                \"stack_id\" : \"HDP-2.6\",\n" +
 +    "              }\n" +
 +    "            }\n" +
 +    "          ]\n" +
 +    "        }\n" +
 +    "        {\n" +
 +    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_METASTORE\",\n" +
 +    "          \"ServiceComponentInfo\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"component_name\" : \"HIVE_METASTORE\",\n" +
 +    "            \"service_name\" : \"HIVE\"\n" +
 +    "          },\n" +
 +    "          \"host_components\" : [\n" +
 +    "            {\n" +
 +    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_METASTORE\",\n" +
 +    "              \"HostRoles\" : {\n" +
 +    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "                \"component_name\" : \"HIVE_METASTORE\",\n" +
 +    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
 +    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
 +    "                \"service_name\" : \"HIVE\",\n" +
 +    "                \"stack_id\" : \"HDP-2.6\",\n" +
 +    "              }\n" +
 +    "            }\n" +
 +    "          ]\n" +
 +    "        },\n" +
 +    "        {\n" +
 +    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_SERVER\",\n" +
 +    "          \"ServiceComponentInfo\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"component_name\" : \"HIVE_SERVER\",\n" +
 +    "            \"service_name\" : \"HIVE\"\n" +
 +    "          },\n" +
 +    "          \"host_components\" : [\n" +
 +    "            {\n" +
 +    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_SERVER\",\n" +
 +    "              \"HostRoles\" : {\n" +
 +    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "                \"component_name\" : \"HIVE_SERVER\",\n" +
 +    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
 +    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
 +    "                \"service_name\" : \"HIVE\",\n" +
 +    "                \"stack_id\" : \"HDP-2.6\",\n" +
 +    "              }\n" +
 +    "            }\n" +
 +    "          ]\n" +
 +    "        },\n" +
 +    "        {\n" +
 +    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/WEBHCAT_SERVER\",\n" +
 +    "          \"ServiceComponentInfo\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"component_name\" : \"WEBHCAT_SERVER\",\n" +
 +    "            \"service_name\" : \"HIVE\"\n" +
 +    "          },\n" +
 +    "          \"host_components\" : [\n" +
 +    "            {\n" +
 +    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/WEBHCAT_SERVER\",\n" +
 +    "              \"HostRoles\" : {\n" +
 +    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "                \"component_name\" : \"WEBHCAT_SERVER\",\n" +
 +    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
 +    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
 +    "                \"service_name\" : \"HIVE\",\n" +
 +    "                \"stack_id\" : \"HDP-2.6\",\n" +
 +    "              }\n" +
 +    "            }\n" +
 +    "          ]\n" +
 +    "        }\n" +
 +    "      ]\n" +
 +    "    },\n" +
 +    "    {\n" +
 +    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE\",\n" +
 +    "      \"ServiceInfo\" : {\n" +
 +    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "        \"service_name\" : \"OOZIE\"\n" +
 +    "      },\n" +
 +    "      \"components\" : [\n" +
 +    "        {\n" +
 +    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE/components/OOZIE_SERVER\",\n" +
 +    "          \"ServiceComponentInfo\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"component_name\" : \"OOZIE_SERVER\",\n" +
 +    "            \"service_name\" : \"OOZIE\"\n" +
 +    "          },\n" +
 +    "          \"host_components\" : [\n" +
 +    "            {\n" +
 +    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/OOZIE_SERVER\",\n" +
 +    "              \"HostRoles\" : {\n" +
 +    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "                \"component_name\" : \"OOZIE_SERVER\",\n" +
 +    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
 +    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
 +    "                \"service_name\" : \"OOZIE\",\n" +
 +    "                \"stack_id\" : \"HDP-2.6\"\n" +
 +    "              }\n" +
 +    "            }\n" +
 +    "          ]\n" +
 +    "        }\n" +
 +    "      ]\n" +
 +    "    },\n" +
 +    "    {\n" +
 +    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN\",\n" +
 +    "      \"ServiceInfo\" : {\n" +
 +    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "        \"service_name\" : \"YARN\"\n" +
 +    "      },\n" +
 +    "      \"components\" : [\n" +
 +    "        {\n" +
 +    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/APP_TIMELINE_SERVER\",\n" +
 +    "          \"ServiceComponentInfo\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
 +    "            \"service_name\" : \"YARN\"\n" +
 +    "          },\n" +
 +    "          \"host_components\" : [\n" +
 +    "            {\n" +
 +    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/APP_TIMELINE_SERVER\",\n" +
 +    "              \"HostRoles\" : {\n" +
 +    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "                \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
 +    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
 +    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
 +    "                \"service_name\" : \"YARN\",\n" +
 +    "                \"stack_id\" : \"HDP-2.6\"\n" +
 +    "              }\n" +
 +    "            }\n" +
 +    "          ]\n" +
 +    "        },\n" +
 +    "        {\n" +
 +    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/NODEMANAGER\",\n" +
 +    "          \"ServiceComponentInfo\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"component_name\" : \"NODEMANAGER\",\n" +
 +    "            \"service_name\" : \"YARN\"\n" +
 +    "          },\n" +
 +    "          \"host_components\" : [\n" +
 +    "            {\n" +
 +    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/NODEMANAGER\",\n" +
 +    "              \"HostRoles\" : {\n" +
 +    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "                \"component_name\" : \"NODEMANAGER\",\n" +
 +    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
 +    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
 +    "                \"service_name\" : \"YARN\",\n" +
 +    "                \"stack_id\" : \"HDP-2.6\"\n" +
 +    "              }\n" +
 +    "            }\n" +
 +    "          ]\n" +
 +    "        },\n" +
 +    "        {\n" +
 +    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/RESOURCEMANAGER\",\n" +
 +    "          \"ServiceComponentInfo\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"component_name\" : \"RESOURCEMANAGER\",\n" +
 +    "            \"service_name\" : \"YARN\"\n" +
 +    "          },\n" +
 +    "          \"host_components\" : [\n" +
 +    "            {\n" +
 +    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/RESOURCEMANAGER\",\n" +
 +    "              \"HostRoles\" : {\n" +
 +    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "                \"component_name\" : \"RESOURCEMANAGER\",\n" +
 +    "                \"ha_state\" : \"ACTIVE\",\n" +
 +    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
 +    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
 +    "                \"service_name\" : \"YARN\",\n" +
 +    "                \"stack_id\" : \"HDP-2.6\"\n" +
 +    "              }\n" +
 +    "            }\n" +
 +    "          ]\n" +
 +    "        }\n" +
 +    "      ]\n" +
 +    "    },\n" +
 +    "    {\n" +
 +    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER\",\n" +
 +    "      \"ServiceInfo\" : {\n" +
 +    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "        \"service_name\" : \"ZOOKEEPER\"\n" +
 +    "      },\n" +
 +    "      \"components\" : [\n" +
 +    "        {\n" +
 +    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER/components/ZOOKEEPER_SERVER\",\n" +
 +    "          \"ServiceComponentInfo\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
 +    "            \"service_name\" : \"ZOOKEEPER\"\n" +
 +    "          },\n" +
 +    "          \"host_components\" : [\n" +
 +    "            {\n" +
 +    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
 +    "              \"HostRoles\" : {\n" +
 +    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
 +    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
 +    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
 +    "                \"service_name\" : \"ZOOKEEPER\",\n" +
 +    "                \"stack_id\" : \"HDP-2.6\"\n" +
 +    "              }\n" +
 +    "            },\n" +
 +    "            {\n" +
 +    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
 +    "              \"HostRoles\" : {\n" +
 +    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
 +    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
 +    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
 +    "                \"service_name\" : \"ZOOKEEPER\",\n" +
 +    "                \"stack_id\" : \"HDP-2.6\"\n" +
 +    "              }\n" +
 +    "            },\n" +
 +    "            {\n" +
 +    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
 +    "              \"HostRoles\" : {\n" +
 +    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
 +    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
 +    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
 +    "                \"service_name\" : \"ZOOKEEPER\",\n" +
 +    "                \"stack_id\" : \"HDP-2.6\"\n" +
 +    "              }\n" +
 +    "            }\n" +
 +    "          ]\n" +
 +    "        }\n" +
 +    "      ]\n" +
 +    "    }\n" +
 +    "  ]\n" +
 +    "}\n";
 +
 +
 +    private static final String SERVICECONFIGS_JSON_TEMPLATE =
 +    "{\n" +
 +    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?is_current=true\",\n" +
 +    "  \"items\" : [\n" +
 +    "    {\n" +
 +    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HBASE&service_config_version=1\",\n" +
 +    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "      \"configurations\" : [\n" +
 +    "        {\n" +
 +    "          \"Config\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"stack_id\" : \"HDP-2.6\"\n" +
 +    "          },\n" +
 +    "          \"type\" : \"hbase-site\",\n" +
 +    "          \"tag\" : \"version1503410563715\",\n" +
 +    "          \"version\" : 1,\n" +
 +    "          \"properties\" : {\n" +
 +    "            \"hbase.master.info.bindAddress\" : \"0.0.0.0\",\n" +
 +    "            \"hbase.master.info.port\" : \"16010\",\n" +
 +    "            \"hbase.master.port\" : \"16000\",\n" +
 +    "            \"hbase.regionserver.info.port\" : \"16030\",\n" +
 +    "            \"hbase.regionserver.port\" : \"16020\",\n" +
 +    "            \"hbase.zookeeper.property.clientPort\" : \"2181\",\n" +
 +    "            \"hbase.zookeeper.quorum\" : \"c6403.ambari.apache.org,c6402.ambari.apache.org,c6401.ambari.apache.org\",\n" +
 +    "            \"hbase.zookeeper.useMulti\" : \"true\",\n" +
 +    "            \"zookeeper.znode.parent\" : \"/hbase-unsecure\"\n" +
 +    "          },\n" +
 +    "          \"properties_attributes\" : { }\n" +
 +    "        },\n" +
 +    "      ],\n" +
 +    "      \"is_current\" : true,\n" +
 +    "      \"service_config_version\" : 1,\n" +
 +    "      \"service_config_version_note\" : \"Initial configurations for HBase\",\n" +
 +    "      \"service_name\" : \"HBASE\",\n" +
 +    "      \"stack_id\" : \"HDP-2.6\",\n" +
 +    "      \"user\" : \"admin\"\n" +
 +    "    },\n" +
 +    "    {\n" +
 +    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HDFS&service_config_version=2\",\n" +
 +    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "      \"configurations\" : [\n" +
 +    "        {\n" +
 +    "          \"Config\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"stack_id\" : \"HDP-2.6\"\n" +
 +    "          },\n" +
 +    "          \"type\" : \"hdfs-site\",\n" +
 +    "          \"tag\" : \"version1\",\n" +
 +    "          \"version\" : 1,\n" +
 +    "          \"properties\" : {\n" +
 +    "            \"dfs.cluster.administrators\" : \" hdfs\",\n" +
 +    "            \"dfs.datanode.address\" : \"0.0.0.0:50010\",\n" +
 +    "            \"dfs.datanode.http.address\" : \"0.0.0.0:50075\",\n" +
 +    "            \"dfs.datanode.https.address\" : \"0.0.0.0:50475\",\n" +
 +    "            \"dfs.datanode.ipc.address\" : \"0.0.0.0:8010\",\n" +
 +    "            \"dfs.http.policy\" : \"HTTP_ONLY\",\n" +
 +    "            \"dfs.https.port\" : \"50470\",\n" +
 +    "            \"dfs.journalnode.http-address\" : \"0.0.0.0:8480\",\n" +
 +    "            \"dfs.journalnode.https-address\" : \"0.0.0.0:8481\",\n" +
 +    "            \"dfs.namenode.http-address\" : \"c6401.ambari.apache.org:50070\",\n" +
 +    "            \"dfs.namenode.https-address\" : \"c6401.ambari.apache.org:50470\",\n" +
 +    "            \"dfs.namenode.rpc-address\" : \"c6401.ambari.apache.org:8020\",\n" +
 +    "            \"dfs.namenode.secondary.http-address\" : \"c6402.ambari.apache.org:50090\",\n" +
 +    "            \"dfs.webhdfs.enabled\" : \"true\"\n" +
 +    "          },\n" +
 +    "          \"properties_attributes\" : {\n" +
 +    "            \"final\" : {\n" +
 +    "              \"dfs.webhdfs.enabled\" : \"true\",\n" +
 +    "              \"dfs.namenode.http-address\" : \"true\",\n" +
 +    "              \"dfs.support.append\" : \"true\",\n" +
 +    "              \"dfs.namenode.name.dir\" : \"true\",\n" +
 +    "              \"dfs.datanode.failed.volumes.tolerated\" : \"true\",\n" +
 +    "              \"dfs.datanode.data.dir\" : \"true\"\n" +
 +    "            }\n" +
 +    "          }\n" +
 +    "        },\n" +
 +    "        {\n" +
 +    "          \"Config\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"stack_id\" : \"HDP-2.6\"\n" +
 +    "          },\n" +
 +    "          \"type\" : \"core-site\",\n" +
 +    "          \"tag\" : \"version1502131215159\",\n" +
 +    "          \"version\" : 2,\n" +
 +    "          \"properties\" : {\n" +
 +    "            \"hadoop.http.authentication.simple.anonymous.allowed\" : \"true\",\n" +
 +    "            \"net.topology.script.file.name\" : \"/etc/hadoop/conf/topology_script.py\"\n" +
 +    "          },\n" +
 +    "          \"properties_attributes\" : {\n" +
 +    "            \"final\" : {\n" +
 +    "              \"fs.defaultFS\" : \"true\"\n" +
 +    "            }\n" +
 +    "          }\n" +
 +    "        }\n" +
 +    "      ],\n" +
 +    "      \"is_current\" : true,\n" +
 +    "      \"service_config_version\" : 2,\n" +
 +    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
 +    "      \"service_name\" : \"HDFS\",\n" +
 +    "      \"stack_id\" : \"HDP-2.6\",\n" +
 +    "      \"user\" : \"admin\"\n" +
 +    "    },\n" +
 +    "    {\n" +
 +    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HIVE&service_config_version=3\",\n" +
 +    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "      \"configurations\" : [\n" +
 +    "        {\n" +
 +    "          \"Config\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"stack_id\" : \"HDP-2.6\"\n" +
 +    "          },\n" +
 +    "          \"type\" : \"hive-env\",\n" +
 +    "          \"tag\" : \"version1\",\n" +
 +    "          \"version\" : 1,\n" +
 +    "          \"properties\" : {\n" +
 +    "            \"hive_security_authorization\" : \"None\",\n" +
 +    "            \"webhcat_user\" : \"hcat\"\n" +
 +    "          },\n" +
 +    "          \"properties_attributes\" : { }\n" +
 +    "        },\n" +
 +    "        {\n" +
 +    "          \"Config\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"stack_id\" : \"HDP-2.6\"\n" +
 +    "          },\n" +
 +    "          \"type\" : \"hiveserver2-site\",\n" +
 +    "          \"tag\" : \"version1\",\n" +
 +    "          \"version\" : 1,\n" +
 +    "          \"properties\" : {\n" +
 +    "            \"hive.metastore.metrics.enabled\" : \"true\",\n" +
 +    "            \"hive.security.authorization.enabled\" : \"false\",\n" +
 +    "            \"hive.service.metrics.hadoop2.component\" : \"hiveserver2\",\n" +
 +    "            \"hive.service.metrics.reporter\" : \"HADOOP2\"\n" +
 +    "          },\n" +
 +    "          \"properties_attributes\" : { }\n" +
 +    "        },\n" +
 +    "        {\n" +
 +    "          \"Config\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"stack_id\" : \"HDP-2.6\"\n" +
 +    "          },\n" +
 +    "          \"type\" : \"hive-interactive-site\",\n" +
 +    "          \"tag\" : \"version1\",\n" +
 +    "          \"version\" : 1,\n" +
 +    "          \"properties\" : {\n" +
 +    "            \"hive.server2.enable.doAs\" : \"false\",\n" +
 +    "            \"hive.server2.tez.default.queues\" : \"default\",\n" +
 +    "            \"hive.server2.tez.initialize.default.sessions\" : \"true\",\n" +
 +    "            \"hive.server2.tez.sessions.custom.queue.allowed\" : \"ignore\",\n" +
 +    "            \"hive.server2.tez.sessions.per.default.queue\" : \"1\",\n" +
 +    "            \"hive.server2.tez.sessions.restricted.configs\" : \"hive.execution.mode,hive.execution.engine\",\n" +
 +    "            \"hive.server2.thrift.http.port\" : \"10501\",\n" +
 +    "            \"hive.server2.thrift.port\" : \"10500\",\n" +
 +    "            \"hive.server2.webui.port\" : \"10502\",\n" +
 +    "            \"hive.server2.webui.use.ssl\" : \"false\",\n" +
 +    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2-hive2\"\n" +
 +    "          },\n" +
 +    "          \"properties_attributes\" : { }\n" +
 +    "        },\n" +
 +    "        {\n" +
 +    "          \"Config\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"stack_id\" : \"HDP-2.6\"\n" +
 +    "          },\n" +
 +    "          \"type\" : \"tez-interactive-site\",\n" +
 +    "          \"tag\" : \"version1\",\n" +
 +    "          \"version\" : 1,\n" +
 +    "          \"properties\" : {\n" +
 +    "            \"tez.am.am-rm.heartbeat.interval-ms.max\" : \"10000\",\n" +
 +    "            \"tez.am.client.heartbeat.poll.interval.millis\" : \"6000\",\n" +
 +    "            \"tez.am.client.heartbeat.timeout.secs\" : \"90\"\n" +
 +    "          },\n" +
 +    "          \"properties_attributes\" : { }\n" +
 +    "        },\n" +
 +    "        {\n" +
 +    "          \"Config\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"stack_id\" : \"HDP-2.6\"\n" +
 +    "          },\n" +
 +    "          \"type\" : \"hive-site\",\n" +
 +    "          \"tag\" : \"version1502130841736\",\n" +
 +    "          \"version\" : 2,\n" +
 +    "          \"properties\" : {\n" +
 +    "            \"hive.metastore.sasl.enabled\" : \"false\",\n" +
 +    "            \"hive.metastore.server.max.threads\" : \"100000\",\n" +
 +    "            \"hive.metastore.uris\" : \"thrift://c6402.ambari.apache.org:9083\",\n" +
 +    "            \"hive.server2.allow.user.substitution\" : \"true\",\n" +
 +    "            \"hive.server2.authentication\" : \"NONE\",\n" +
 +    "            \"hive.server2.authentication.spnego.keytab\" : \"HTTP/_HOST@EXAMPLE.COM\",\n" +
 +    "            \"hive.server2.authentication.spnego.principal\" : \"/etc/security/keytabs/spnego.service.keytab\",\n" +
 +    "            \"hive.server2.enable.doAs\" : \"true\",\n" +
 +    "            \"hive.server2.support.dynamic.service.discovery\" : \"true\",\n" +
 +    "            \"hive.server2.thrift.http.path\" : \"cliservice\",\n" +
 +    "            \"hive.server2.thrift.http.port\" : \"10001\",\n" +
 +    "            \"hive.server2.thrift.max.worker.threads\" : \"500\",\n" +
 +    "            \"hive.server2.thrift.port\" : \"10000\",\n" +
 +    "            \"hive.server2.thrift.sasl.qop\" : \"auth\",\n" +
 +    "            \"hive.server2.transport.mode\" : \"http\",\n" +
 +    "            \"hive.server2.use.SSL\" : \"false\",\n" +
 +    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2\"\n" +
 +    "          },\n" +
 +    "          \"properties_attributes\" : {\n" +
 +    "            \"hidden\" : {\n" +
 +    "              \"javax.jdo.option.ConnectionPassword\" : \"HIVE_CLIENT,WEBHCAT_SERVER,HCAT,CONFIG_DOWNLOAD\"\n" +
 +    "            }\n" +
 +    "          }\n" +
 +    "        },\n" +
 +    "        {\n" +
 +    "          \"Config\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"stack_id\" : \"HDP-2.6\"\n" +
 +    "          },\n" +
 +    "          \"type\" : \"webhcat-site\",\n" +
 +    "          \"tag\" : \"version1502131111746\",\n" +
 +    "          \"version\" : 2,\n" +
 +    "          \"properties\" : {\n" +
 +    "            \"templeton.port\" : \"50111\",\n" +
 +    "            \"templeton.zookeeper.hosts\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
 +    "            \"webhcat.proxyuser.knox.groups\" : \"users\",\n" +
 +    "            \"webhcat.proxyuser.knox.hosts\" : \"*\",\n" +
 +    "            \"webhcat.proxyuser.root.groups\" : \"*\",\n" +
 +    "            \"webhcat.proxyuser.root.hosts\" : \"c6401.ambari.apache.org\"\n" +
 +    "          },\n" +
 +    "          \"properties_attributes\" : { }\n" +
 +    "        }\n" +
 +    "      ],\n" +
 +    "      \"createtime\" : 1502131110745,\n" +
 +    "      \"group_id\" : -1,\n" +
 +    "      \"group_name\" : \"Default\",\n" +
 +    "      \"hosts\" : [ ],\n" +
 +    "      \"is_cluster_compatible\" : true,\n" +
 +    "      \"is_current\" : true,\n" +
 +    "      \"service_config_version\" : 3,\n" +
 +    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
 +    "      \"service_name\" : \"HIVE\",\n" +
 +    "      \"stack_id\" : \"HDP-2.6\",\n" +
 +    "      \"user\" : \"admin\"\n" +
 +    "    },\n" +
 +    "    {\n" +
 +    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=OOZIE&service_config_version=3\",\n" +
 +    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "      \"configurations\" : [\n" +
 +    "        {\n" +
 +    "          \"Config\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"stack_id\" : \"HDP-2.6\"\n" +
 +    "          },\n" +
 +    "          \"type\" : \"oozie-site\",\n" +
 +    "          \"tag\" : \"version1502131137103\",\n" +
 +    "          \"version\" : 3,\n" +
 +    "          \"properties\" : {\n" +
 +    "            \"oozie.base.url\" : \"http://c6402.ambari.apache.org:11000/oozie\",\n" +
 +    "          },\n" +
 +    "          \"properties_attributes\" : { }\n" +
 +    "        }\n" +
 +    "      ],\n" +
 +    "      \"is_current\" : true,\n" +
 +    "      \"service_config_version\" : 3,\n" +
 +    "      \"service_name\" : \"OOZIE\",\n" +
 +    "      \"stack_id\" : \"HDP-2.6\",\n" +
 +    "      \"user\" : \"admin\"\n" +
 +    "    },\n" +
 +    "    {\n" +
 +    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=TEZ&service_config_version=1\",\n" +
 +    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "      \"configurations\" : [\n" +
 +    "        {\n" +
 +    "          \"Config\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"stack_id\" : \"HDP-2.6\"\n" +
 +    "          },\n" +
 +    "          \"type\" : \"tez-site\",\n" +
 +    "          \"tag\" : \"version1\",\n" +
 +    "          \"version\" : 1,\n" +
 +    "          \"properties\" : {\n" +
 +    "            \"tez.use.cluster.hadoop-libs\" : \"false\"\n" +
 +    "          },\n" +
 +    "          \"properties_attributes\" : { }\n" +
 +    "        }\n" +
 +    "      ],\n" +
 +    "      \"createtime\" : 1502122253525,\n" +
 +    "      \"group_id\" : -1,\n" +
 +    "      \"group_name\" : \"Default\",\n" +
 +    "      \"hosts\" : [ ],\n" +
 +    "      \"is_cluster_compatible\" : true,\n" +
 +    "      \"is_current\" : true,\n" +
 +    "      \"service_config_version\" : 1,\n" +
 +    "      \"service_config_version_note\" : \"Initial configurations for Tez\",\n" +
 +    "      \"service_name\" : \"TEZ\",\n" +
 +    "      \"stack_id\" : \"HDP-2.6\",\n" +
 +    "      \"user\" : \"admin\"\n" +
 +    "    },\n" +
 +    "    {\n" +
 +    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=YARN&service_config_version=1\",\n" +
 +    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "      \"configurations\" : [\n" +
 +    "        {\n" +
 +    "          \"Config\" : {\n" +
 +    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
 +    "            \"stack_id\" : \"HDP-2.6\"\n" +
 +    "          },\n" +
 +    "          \"type\" : \"yarn-site\",\n" +
 +    "          \"tag\" : \"version1\",\n" +
 +    "          \"version\" : 1,\n" +
 +    "          \"properties\" : {\n" +
 +    "            \"hadoop.registry.rm.enabled\" : \"true\",\n" +
 +    "            \"hadoop.registry.zk.quorum\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
 +    "            \"yarn.acl.enable\" : \"false\",\n" +
 +    "            \"yarn.http.policy\" : \"HTTP_ONLY\",\n" +
 +    "            \"yarn.nodemanager.address\" : \"0.0.0.0:45454\",\n" +
 +    "            \"yarn.nodemanager.bind-host\" : \"0.0.0.0\",\n" +
 +    "            \"yarn.resourcemanager.address\" : \"c6402.ambari.apache.org:8050\",\n" +
 +    "            \"yarn.resourcemanager.admin.address\" : \"c6402.ambari.apache.org:8141\",\n" +
 +    "            \"yarn.resourcemanager.ha.enabled\" : \"false\",\n" +
 +    "            \"yarn.resourcemanager.hostname\" : \"c6402.ambari.apache.org\",\n" +
 +    "            \"yarn.resourcemanager.resource-tracker.address\" : \"c6402.ambari.apache.org:8025\",\n" +
 +    "            \"yarn.resourcemanager.scheduler.address\" : \"c6402.ambari.apache.org:8030\",\n" +
 +    "            \"yarn.resourcemanager.webapp.address\" : \"c6402.ambari.apache.org:8088\",\n" +
 +    "            \"yarn.resourcemanager.webapp.delegation-token-auth-filter.enabled\" : \"false\",\n" +
 +    "            \"yarn.resourcemanager.webapp.https.address\" : \"c6402.ambari.apache.org:8090\",\n" +
 +    "            \"yarn.resourcemanager.zk-address\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\"\n" +
 +    "          },\n" +
 +    "          \"properties_attributes\" : { }\n" +
 +    "        }\n" +
 +    "      ],\n" +
 +    "      \"is_current\" : true,\n" +
 +    "      \"service_config_version\" : 1,\n" +
 +    "      \"service_name\" : \"YARN\",\n" +
 +    "      \"stack_id\" : \"HDP-2.6\",\n" +
 +    "      \"user\" : \"admin\"\n" +
 +    "    }\n" +
 +    "  ]\n" +
 +    "}";
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-provider-security-pac4j/src/main/java/org/apache/knox/gateway/pac4j/filter/Pac4jDispatcherFilter.java
----------------------------------------------------------------------
diff --cc gateway-provider-security-pac4j/src/main/java/org/apache/knox/gateway/pac4j/filter/Pac4jDispatcherFilter.java
index fe39f25,0000000..6e04932
mode 100644,000000..100644
--- a/gateway-provider-security-pac4j/src/main/java/org/apache/knox/gateway/pac4j/filter/Pac4jDispatcherFilter.java
+++ b/gateway-provider-security-pac4j/src/main/java/org/apache/knox/gateway/pac4j/filter/Pac4jDispatcherFilter.java
@@@ -1,214 -1,0 +1,215 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.pac4j.filter;
 +
 +import org.apache.knox.gateway.i18n.messages.MessagesFactory;
 +import org.apache.knox.gateway.pac4j.Pac4jMessages;
 +import org.apache.knox.gateway.pac4j.session.KnoxSessionStore;
 +import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.security.KeystoreService;
 +import org.apache.knox.gateway.services.security.MasterService;
 +import org.apache.knox.gateway.services.security.AliasService;
 +import org.apache.knox.gateway.services.security.AliasServiceException;
 +import org.apache.knox.gateway.services.security.CryptoService;
 +import org.pac4j.config.client.PropertiesConfigFactory;
 +import org.pac4j.core.client.Client;
 +import org.pac4j.core.config.Config;
- import org.pac4j.core.config.ConfigSingleton;
- import org.pac4j.core.context.J2EContext;
 +import org.pac4j.core.util.CommonHelper;
 +import org.pac4j.http.client.indirect.IndirectBasicAuthClient;
 +import org.pac4j.http.credentials.authenticator.test.SimpleTestUsernamePasswordAuthenticator;
 +import org.pac4j.j2e.filter.CallbackFilter;
 +import org.pac4j.j2e.filter.SecurityFilter;
 +
 +import javax.servlet.*;
 +import javax.servlet.http.HttpServletRequest;
 +import javax.servlet.http.HttpServletResponse;
 +import java.io.IOException;
 +import java.util.Enumeration;
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
 +
 +/**
 + * <p>This is the main filter for the pac4j provider. The pac4j provider module heavily relies on the j2e-pac4j library (https://github.com/pac4j/j2e-pac4j).</p>
 + * <p>This filter dispatches the HTTP calls between the j2e-pac4j filters:</p>
 + * <ul>
 + *     <li>to the {@link CallbackFilter} if the <code>client_name</code> parameter exists: it finishes the authentication process</li>
 + *     <li>to the {@link RequiresAuthenticationFilter} otherwise: it starts the authentication process (redirection to the identity provider) if the user is not authenticated</li>
 + * </ul>
 + * <p>It uses the {@link KnoxSessionStore} to manage session data. The generated cookies are defined on a domain name
 + * which can be configured via the domain suffix parameter: <code>pac4j.cookie.domain.suffix</code>.</p>
 + * <p>The callback url must be defined to the current protected url (KnoxSSO service for example) via the parameter: <code>pac4j.callbackUrl</code>.</p>
 + *
 + * @since 0.8.0
 + */
 +public class Pac4jDispatcherFilter implements Filter {
 +
 +  private static Pac4jMessages log = MessagesFactory.get(Pac4jMessages.class);
 +
 +  public static final String TEST_BASIC_AUTH = "testBasicAuth";
 +
 +  public static final String PAC4J_CALLBACK_URL = "pac4j.callbackUrl";
 +
 +  public static final String PAC4J_CALLBACK_PARAMETER = "pac4jCallback";
 +
 +  private static final String PAC4J_COOKIE_DOMAIN_SUFFIX_PARAM = "pac4j.cookie.domain.suffix";
 +
++  private static final String PAC4J_CONFIG = "pac4j.config";
++
 +  private CallbackFilter callbackFilter;
 +
 +  private SecurityFilter securityFilter;
 +  private MasterService masterService = null;
 +  private KeystoreService keystoreService = null;
 +  private AliasService aliasService = null;
 +
 +  @Override
 +  public void init( FilterConfig filterConfig ) throws ServletException {
 +    // JWT service
 +    final ServletContext context = filterConfig.getServletContext();
 +    CryptoService cryptoService = null;
 +    String clusterName = null;
 +    if (context != null) {
 +      GatewayServices services = (GatewayServices) context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
 +      clusterName = (String) context.getAttribute(GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE);
 +      if (services != null) {
 +        keystoreService = (KeystoreService) services.getService(GatewayServices.KEYSTORE_SERVICE);
 +        cryptoService = (CryptoService) services.getService(GatewayServices.CRYPTO_SERVICE);
 +        aliasService = (AliasService) services.getService(GatewayServices.ALIAS_SERVICE);
 +        masterService = (MasterService) services.getService("MasterService");
 +      }
 +    }
 +    // crypto service, alias service and cluster name are mandatory
 +    if (cryptoService == null || aliasService == null || clusterName == null) {
 +      log.cryptoServiceAndAliasServiceAndClusterNameRequired();
 +      throw new ServletException("The crypto service, alias service and cluster name are required.");
 +    }
 +    try {
 +      aliasService.getPasswordFromAliasForCluster(clusterName, KnoxSessionStore.PAC4J_PASSWORD, true);
 +    } catch (AliasServiceException e) {
 +      log.unableToGenerateAPasswordForEncryption(e);
 +      throw new ServletException("Unable to generate a password for encryption.");
 +    }
 +
 +    // url to SSO authentication provider
 +    String pac4jCallbackUrl = filterConfig.getInitParameter(PAC4J_CALLBACK_URL);
 +    if (pac4jCallbackUrl == null) {
 +      log.ssoAuthenticationProviderUrlRequired();
 +      throw new ServletException("Required pac4j callback URL is missing.");
 +    }
 +    // add the callback parameter to know it's a callback
 +    pac4jCallbackUrl = CommonHelper.addParameter(pac4jCallbackUrl, PAC4J_CALLBACK_PARAMETER, "true");
 +
 +    final Config config;
 +    final String clientName;
 +    // client name from servlet parameter (mandatory)
 +    final String clientNameParameter = filterConfig.getInitParameter("clientName");
 +    if (clientNameParameter == null) {
 +      log.clientNameParameterRequired();
 +      throw new ServletException("Required pac4j clientName parameter is missing.");
 +    }
 +    if (TEST_BASIC_AUTH.equalsIgnoreCase(clientNameParameter)) {
 +      // test configuration
 +      final IndirectBasicAuthClient indirectBasicAuthClient = new IndirectBasicAuthClient(new SimpleTestUsernamePasswordAuthenticator());
 +      indirectBasicAuthClient.setRealmName("Knox TEST");
 +      config = new Config(pac4jCallbackUrl, indirectBasicAuthClient);
 +      clientName = "IndirectBasicAuthClient";
 +    } else {
 +      // get clients from the init parameters
 +      final Map<String, String> properties = new HashMap<>();
 +      final Enumeration<String> names = filterConfig.getInitParameterNames();
 +      addDefaultConfig(clientNameParameter, properties);
 +      while (names.hasMoreElements()) {
 +        final String key = names.nextElement();
 +        properties.put(key, filterConfig.getInitParameter(key));
 +      }
 +      final PropertiesConfigFactory propertiesConfigFactory = new PropertiesConfigFactory(pac4jCallbackUrl, properties);
 +      config = propertiesConfigFactory.build();
 +      final List<Client> clients = config.getClients().getClients();
 +      if (clients == null || clients.size() == 0) {
 +        log.atLeastOnePac4jClientMustBeDefined();
 +        throw new ServletException("At least one pac4j client must be defined.");
 +      }
 +      if (CommonHelper.isBlank(clientNameParameter)) {
 +        clientName = clients.get(0).getName();
 +      } else {
 +        clientName = clientNameParameter;
 +      }
 +    }
 +
 +    callbackFilter = new CallbackFilter();
++    callbackFilter.setConfigOnly(config);
 +    securityFilter = new SecurityFilter();
 +    securityFilter.setClients(clientName);
-     securityFilter.setConfig(config);
++    securityFilter.setConfigOnly(config);
 +
 +    final String domainSuffix = filterConfig.getInitParameter(PAC4J_COOKIE_DOMAIN_SUFFIX_PARAM);
 +    config.setSessionStore(new KnoxSessionStore(cryptoService, clusterName, domainSuffix));
-     ConfigSingleton.setConfig(config);
 +  }
 +
 +  private void addDefaultConfig(String clientNameParameter, Map<String, String> properties) {
 +    // add default saml params
 +    if (clientNameParameter.contains("SAML2Client")) {
 +      properties.put(PropertiesConfigFactory.SAML_KEYSTORE_PATH,
 +          keystoreService.getKeystorePath());
 +
 +      properties.put(PropertiesConfigFactory.SAML_KEYSTORE_PASSWORD,
 +          new String(masterService.getMasterSecret()));
 +
 +      // check for provisioned alias for private key
 +      char[] gip = null;
 +      try {
 +        gip = aliasService.getGatewayIdentityPassphrase();
 +      }
 +      catch(AliasServiceException ase) {
 +        log.noPrivateKeyPasshraseProvisioned(ase);
 +      }
 +      if (gip != null) {
 +        properties.put(PropertiesConfigFactory.SAML_PRIVATE_KEY_PASSWORD,
 +            new String(gip));
 +      }
 +      else {
 +        // no alias provisioned then use the master
 +        properties.put(PropertiesConfigFactory.SAML_PRIVATE_KEY_PASSWORD,
 +            new String(masterService.getMasterSecret()));
 +      }
 +    }
 +  }
 +
 +  @Override
 +  public void doFilter( ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) throws IOException, ServletException {
 +
 +    final HttpServletRequest request = (HttpServletRequest) servletRequest;
 +    final HttpServletResponse response = (HttpServletResponse) servletResponse;
-     final J2EContext context = new J2EContext(request, response, ConfigSingleton.getConfig().getSessionStore());
++    request.setAttribute(PAC4J_CONFIG, securityFilter.getConfig());
++//    final J2EContext context = new J2EContext(request, response, securityFilter.getConfig().getSessionStore());
 +
 +    // it's a callback from an identity provider
 +    if (request.getParameter(PAC4J_CALLBACK_PARAMETER) != null) {
 +      // apply CallbackFilter
 +      callbackFilter.doFilter(servletRequest, servletResponse, filterChain);
 +    } else {
 +      // otherwise just apply security and requires authentication
 +      // apply RequiresAuthenticationFilter
 +      securityFilter.doFilter(servletRequest, servletResponse, filterChain);
 +    }
 +  }
 +
 +  @Override
 +  public void destroy() { }
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-provider-security-pac4j/src/main/java/org/apache/knox/gateway/pac4j/filter/Pac4jIdentityAdapter.java
----------------------------------------------------------------------
diff --cc gateway-provider-security-pac4j/src/main/java/org/apache/knox/gateway/pac4j/filter/Pac4jIdentityAdapter.java
index 6387a0b,0000000..bc66003
mode 100644,000000..100644
--- a/gateway-provider-security-pac4j/src/main/java/org/apache/knox/gateway/pac4j/filter/Pac4jIdentityAdapter.java
+++ b/gateway-provider-security-pac4j/src/main/java/org/apache/knox/gateway/pac4j/filter/Pac4jIdentityAdapter.java
@@@ -1,146 -1,0 +1,161 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.pac4j.filter;
 +
- import org.apache.knox.gateway.audit.api.Action;
- import org.apache.knox.gateway.audit.api.ActionOutcome;
- import org.apache.knox.gateway.audit.api.AuditService;
- import org.apache.knox.gateway.audit.api.AuditServiceFactory;
- import org.apache.knox.gateway.audit.api.Auditor;
- import org.apache.knox.gateway.audit.api.ResourceType;
++import org.apache.knox.gateway.audit.api.*;
 +import org.apache.knox.gateway.audit.log4j.audit.AuditConstants;
 +import org.apache.knox.gateway.filter.AbstractGatewayFilter;
 +import org.apache.knox.gateway.security.PrimaryPrincipal;
- import org.pac4j.core.config.ConfigSingleton;
++import org.pac4j.core.config.Config;
 +import org.pac4j.core.context.J2EContext;
 +import org.pac4j.core.profile.CommonProfile;
 +import org.pac4j.core.profile.ProfileManager;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +import javax.security.auth.Subject;
 +import javax.servlet.Filter;
 +import javax.servlet.FilterChain;
 +import javax.servlet.FilterConfig;
 +import javax.servlet.ServletException;
 +import javax.servlet.ServletRequest;
 +import javax.servlet.ServletResponse;
 +import javax.servlet.http.HttpServletRequest;
 +import javax.servlet.http.HttpServletResponse;
 +import java.io.IOException;
 +import java.security.PrivilegedActionException;
 +import java.security.PrivilegedExceptionAction;
 +import java.util.Optional;
 +
 +/**
 + * <p>This filter retrieves the authenticated user saved by the pac4j provider and injects it into the J2E HTTP request.</p>
 + *
 + * @since 0.8.0
 + */
 +public class Pac4jIdentityAdapter implements Filter {
 +
 +  private static final Logger logger = LoggerFactory.getLogger(Pac4jIdentityAdapter.class);
 +
++  public static final String PAC4J_ID_ATTRIBUTE = "pac4j.id_attribute";
++  private static final String PAC4J_CONFIG = "pac4j.config";
++
 +  private static AuditService auditService = AuditServiceFactory.getAuditService();
 +  private static Auditor auditor = auditService.getAuditor(
 +      AuditConstants.DEFAULT_AUDITOR_NAME, AuditConstants.KNOX_SERVICE_NAME,
 +      AuditConstants.KNOX_COMPONENT_NAME );
 +
 +  private String testIdentifier;
 +
++  private String idAttribute;
++
 +  @Override
 +  public void init( FilterConfig filterConfig ) throws ServletException {
++    idAttribute = filterConfig.getInitParameter(PAC4J_ID_ATTRIBUTE);
 +  }
 +
 +  public void destroy() {
 +  }
 +
 +  public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain chain)
 +      throws IOException, ServletException {
 +
 +    final HttpServletRequest request = (HttpServletRequest) servletRequest;
 +    final HttpServletResponse response = (HttpServletResponse) servletResponse;
-     final J2EContext context = new J2EContext(request, response, ConfigSingleton.getConfig().getSessionStore());
++    final J2EContext context = new J2EContext(request, response,
++        ((Config)request.getAttribute(PAC4J_CONFIG)).getSessionStore());
 +    final ProfileManager<CommonProfile> manager = new ProfileManager<CommonProfile>(context);
 +    final Optional<CommonProfile> optional = manager.get(true);
 +    if (optional.isPresent()) {
 +      CommonProfile profile = optional.get();
 +      logger.debug("User authenticated as: {}", profile);
 +      manager.remove(true);
-       final String id = profile.getId();
++      String id = null;
++      if (idAttribute != null) {
++        Object attribute = profile.getAttribute(idAttribute);
++        if (attribute != null) {
++          id = attribute.toString();
++        }
++        if (id == null) {
++          logger.error("Invalid attribute_id: {} configured to be used as principal"
++              + " falling back to default id", idAttribute);
++        }
++      }
++      if (id == null) {
++        id = profile.getId();
++      }
 +      testIdentifier = id;
 +      PrimaryPrincipal pp = new PrimaryPrincipal(id);
 +      Subject subject = new Subject();
 +      subject.getPrincipals().add(pp);
 +      auditService.getContext().setUsername(id);
 +      String sourceUri = (String)request.getAttribute( AbstractGatewayFilter.SOURCE_REQUEST_CONTEXT_URL_ATTRIBUTE_NAME );
 +      auditor.audit(Action.AUTHENTICATION, sourceUri, ResourceType.URI, ActionOutcome.SUCCESS);
 +
 +      doAs(request, response, chain, subject);
 +    }
 +  }
 +
 +  private void doAs(final ServletRequest request,
 +      final ServletResponse response, final FilterChain chain, Subject subject)
 +      throws IOException, ServletException {
 +    try {
 +      Subject.doAs(
 +          subject,
 +          new PrivilegedExceptionAction<Object>() {
 +            public Object run() throws Exception {
 +              chain.doFilter(request, response);
 +              return null;
 +            }
 +          }
 +          );
 +    }
 +    catch (PrivilegedActionException e) {
 +      Throwable t = e.getCause();
 +      if (t instanceof IOException) {
 +        throw (IOException) t;
 +      }
 +      else if (t instanceof ServletException) {
 +        throw (ServletException) t;
 +      }
 +      else {
 +        throw new ServletException(t);
 +      }
 +    }
 +  }
 +
 +  /**
 +   * For tests only.
 +   */
 +  public static void setAuditService(AuditService auditService) {
 +    Pac4jIdentityAdapter.auditService = auditService;
 +  }
 +
 +  /**
 +   * For tests only.
 +   */
 +  public static void setAuditor(Auditor auditor) {
 +    Pac4jIdentityAdapter.auditor = auditor;
 +  }
 +
 +  /**
 +   * For tests only.
 +     */
 +  public String getTestIdentifier() {
 +    return testIdentifier;
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/22a7304a/gateway-provider-security-pac4j/src/test/java/org/apache/knox/gateway/pac4j/MockHttpServletRequest.java
----------------------------------------------------------------------
diff --cc gateway-provider-security-pac4j/src/test/java/org/apache/knox/gateway/pac4j/MockHttpServletRequest.java
index 7a3a833,0000000..18f4913
mode 100644,000000..100644
--- a/gateway-provider-security-pac4j/src/test/java/org/apache/knox/gateway/pac4j/MockHttpServletRequest.java
+++ b/gateway-provider-security-pac4j/src/test/java/org/apache/knox/gateway/pac4j/MockHttpServletRequest.java
@@@ -1,88 -1,0 +1,94 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.pac4j;
 +
 +import javax.servlet.http.*;
 +
 +import java.util.HashMap;
 +import java.util.Map;
 +
 +import static org.mockito.Mockito.*;
 +
 +public class MockHttpServletRequest extends HttpServletRequestWrapper {
 +
 +    private String requestUrl;
 +    private Cookie[] cookies;
 +    private String serverName;
 +    private Map<String, String> parameters = new HashMap<>();
 +    private Map<String, String> headers = new HashMap<>();
++    private Map<String, Object> attributes = new HashMap<>();
 +
 +    public MockHttpServletRequest() {
 +        super(mock(HttpServletRequest.class));
 +    }
 +
 +    @Override
 +    public Cookie[] getCookies() {
 +        return cookies;
 +    }
 +
 +    public void setCookies(final Cookie[] cookies) {
 +        this.cookies = cookies;
 +    }
 +
 +    @Override
 +    public StringBuffer getRequestURL() {
 +        return new StringBuffer(requestUrl);
 +    }
 +
 +    public void setRequestURL(final String requestUrl) {
 +        this.requestUrl = requestUrl;
 +    }
 +
 +    @Override
 +    public String getServerName() {
 +        return serverName;
 +    }
 +
 +    public void setServerName(final String serverName) {
 +        this.serverName = serverName;
 +    }
 +
 +    @Override
 +    public String getParameter(String name) {
 +        return parameters.get(name);
 +    }
 +
 +    public void addParameter(String key, String value) {
 +        parameters.put(key, value);
 +    }
 +
 +    @Override
 +    public String getHeader(String name) {
 +        return headers.get(name);
 +    }
 +
 +    public void addHeader(String key, String value) {
 +        headers.put(key, value);
 +    }
 +
 +    @Override
++    public void setAttribute(String name, Object value) {
++        attributes.put(name, value);
++    }
++
++    @Override
 +    public Object getAttribute(String name) {
-         return null;
++        return attributes.get(name);
 +    }
 +}