You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@knox.apache.org by km...@apache.org on 2013/09/30 20:49:18 UTC
git commit: KNOX-172: Partial support for ~. Still need principal
mapping and sample changes.
Updated Branches:
refs/heads/master 8542db9c7 -> 942842a70
KNOX-172: Partial support for ~. Still need principal mapping and sample changes.
Project: http://git-wip-us.apache.org/repos/asf/incubator-knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-knox/commit/942842a7
Tree: http://git-wip-us.apache.org/repos/asf/incubator-knox/tree/942842a7
Diff: http://git-wip-us.apache.org/repos/asf/incubator-knox/diff/942842a7
Branch: refs/heads/master
Commit: 942842a7001dfda35d0197f18d7d44199ff5b572
Parents: 8542db9
Author: Kevin Minder <ke...@hortonworks.com>
Authored: Mon Sep 30 14:49:12 2013 -0400
Committer: Kevin Minder <ke...@hortonworks.com>
Committed: Mon Sep 30 14:49:12 2013 -0400
----------------------------------------------------------------------
.../home/templates/sandbox-1-3-topology.xml | 113 -----------------
.../home/templates/sandbox-1-3-users.ldif | 36 ------
gateway-release/home/templates/sandbox-1-3.xml | 121 +++++++++++++++++++
.../hdfs/WebHdfsDeploymentContributor.java | 26 ++++
.../WebHdfsDeploymentContributor/rewrite.xml | 8 ++
.../hadoop/gateway/GatewayBasicFuncTest.java | 53 ++++++++
6 files changed, 208 insertions(+), 149 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/942842a7/gateway-release/home/templates/sandbox-1-3-topology.xml
----------------------------------------------------------------------
diff --git a/gateway-release/home/templates/sandbox-1-3-topology.xml b/gateway-release/home/templates/sandbox-1-3-topology.xml
deleted file mode 100644
index 7579a46..0000000
--- a/gateway-release/home/templates/sandbox-1-3-topology.xml
+++ /dev/null
@@ -1,113 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<topology>
-
- <gateway>
-
- <provider>
- <role>authentication</role>
- <enabled>true</enabled>
- <name>ShiroProvider</name>
- <param>
- <name>main.ldapRealm</name>
- <value>org.apache.shiro.realm.ldap.JndiLdapRealm</value>
- </param>
- <param>
- <name>main.ldapRealm.userDnTemplate</name>
- <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>
- </param>
- <param>
- <name>main.ldapRealm.contextFactory.url</name>
- <value>ldap://localhost:33389</value>
- </param>
- <param>
- <name>main.ldapRealm.contextFactory.authenticationMechanism</name>
- <value>simple</value>
- </param>
- <param>
- <name>urls./**</name>
- <value>authcBasic</value>
- </param>
- </provider>
- <provider>
- <role>identity-assertion</role>
- <enabled>true</enabled>
- <name>Pseudo</name>
- </provider>
-
- <!--
- Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.
- For example, a hadoop service running in AWS may return a response that includes URLs containing the
- some AWS internal host name. If the client needs to make a subsequent request to the host identified
- in those URLs they need to be mapped to external host names that the client Knox can use to connect.
-
- If the external hostname and internal host names are same turn of this provider by setting the value of
- enabled parameter as false.
-
- The name parameter specifies the external host names in a comma separated list.
- The value parameter specifies corresponding internal host names in a comma separated list.
-
- Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out
- of box sandbox.xml. This is because Sandbox uses port mapping to allow clients to connect to the
- Hadoop services using localhost. In real clusters, external host names would almost never be localhost.
- -->
- <provider>
- <role>hostmap</role>
- <name>static</name>
- <enabled>true</enabled>
- <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>
- </provider>
-
- </gateway>
-
- <service>
- <role>NAMENODE</role>
- <url>hdfs://localhost:8020</url>
- </service>
-
- <service>
- <role>JOBTRACKER</role>
- <url>rpc://localhost:50030</url>
- </service>
-
- <service>
- <role>WEBHDFS</role>
- <url>http://localhost:50070/webhdfs</url>
- </service>
-
- <service>
- <role>WEBHCAT</role>
- <url>http://localhost:50111/templeton</url>
- </service>
-
- <service>
- <role>OOZIE</role>
- <url>http://localhost:11000/oozie</url>
- </service>
-
- <service>
- <role>WEBHBASE</role>
- <url>http://localhost:60080</url>
- </service>
-
- <service>
- <role>HIVE</role>
- <url>http://localhost:10000</url>
- </service>
-
-</topology>
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/942842a7/gateway-release/home/templates/sandbox-1-3-users.ldif
----------------------------------------------------------------------
diff --git a/gateway-release/home/templates/sandbox-1-3-users.ldif b/gateway-release/home/templates/sandbox-1-3-users.ldif
deleted file mode 100644
index f792fee..0000000
--- a/gateway-release/home/templates/sandbox-1-3-users.ldif
+++ /dev/null
@@ -1,36 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: 1
-
-# entry for a sample people container
-# please replace with site specific values
-dn: ou=people,dc=hadoop,dc=apache,dc=org
-objectclass:top
-objectclass:organizationalUnit
-ou: people
-
-# entry for a sample end user
-# please replace with site specific values
-dn: uid=hue,ou=people,dc=hadoop,dc=apache,dc=org
-objectclass:top
-objectclass:person
-objectclass:organizationalPerson
-objectclass:inetOrgPerson
-cn: HUE
-sn: User
-uid: hue
-userPassword:hue-password
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/942842a7/gateway-release/home/templates/sandbox-1-3.xml
----------------------------------------------------------------------
diff --git a/gateway-release/home/templates/sandbox-1-3.xml b/gateway-release/home/templates/sandbox-1-3.xml
new file mode 100644
index 0000000..a96c81e
--- /dev/null
+++ b/gateway-release/home/templates/sandbox-1-3.xml
@@ -0,0 +1,121 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<topology>
+
+ <gateway>
+
+ <provider>
+ <role>authentication</role>
+ <enabled>true</enabled>
+ <name>ShiroProvider</name>
+ <param>
+ <name>main.ldapRealm</name>
+ <value>org.apache.shiro.realm.ldap.JndiLdapRealm</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.userDnTemplate</name>
+ <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory.url</name>
+ <value>ldap://localhost:33389</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory.authenticationMechanism</name>
+ <value>simple</value>
+ </param>
+ <param>
+ <name>urls./**</name>
+ <value>authcBasic</value>
+ </param>
+ </provider>
+
+ <!--
+ Maps the username from the samples that is valid within our LDAP server to the
+ username expected in sandbox 1.3. This username changes in snadbox 2.0. In order for
+ the samples to just work with either sandbox version without modification, we map the
+ principal to the expected value. This also illustrates the use of principal mapping.
+ -->
+ <provider>
+ <role>identity-assertion</role>
+ <enabled>true</enabled>
+ <name>Pseudo</name>
+ <param><name>principal.mapping</name><value>guest=hue</value></param>
+ </provider>
+
+ <!--
+ Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.
+ For example, a hadoop service running in AWS may return a response that includes URLs containing the
+ some AWS internal host name. If the client needs to make a subsequent request to the host identified
+ in those URLs they need to be mapped to external host names that the client Knox can use to connect.
+
+ If the external hostname and internal host names are same turn of this provider by setting the value of
+ enabled parameter as false.
+
+ The name parameter specifies the external host names in a comma separated list.
+ The value parameter specifies corresponding internal host names in a comma separated list.
+
+ Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out
+ of box sandbox.xml. This is because Sandbox uses port mapping to allow clients to connect to the
+ Hadoop services using localhost. In real clusters, external host names would almost never be localhost.
+ -->
+ <provider>
+ <role>hostmap</role>
+ <name>static</name>
+ <enabled>true</enabled>
+ <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>
+ </provider>
+
+ </gateway>
+
+ <service>
+ <role>NAMENODE</role>
+ <url>hdfs://localhost:8020</url>
+ </service>
+
+ <service>
+ <role>JOBTRACKER</role>
+ <url>rpc://localhost:50030</url>
+ </service>
+
+ <service>
+ <role>WEBHDFS</role>
+ <url>http://localhost:50070/webhdfs</url>
+ </service>
+
+ <service>
+ <role>WEBHCAT</role>
+ <url>http://localhost:50111/templeton</url>
+ </service>
+
+ <service>
+ <role>OOZIE</role>
+ <url>http://localhost:11000/oozie</url>
+ </service>
+
+ <service>
+ <role>WEBHBASE</role>
+ <url>http://localhost:60080</url>
+ </service>
+
+ <service>
+ <role>HIVE</role>
+ <url>http://localhost:10000</url>
+ </service>
+
+</topology>
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/942842a7/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor.java
----------------------------------------------------------------------
diff --git a/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor.java b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor.java
index eff7f47..7095e80 100644
--- a/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor.java
+++ b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor.java
@@ -92,6 +92,32 @@ public class WebHdfsDeploymentContributor extends ServiceDeploymentContributorBa
addIdentityAssertionFilter( context, service, fileResource );
addAuthorizationFilter( context, service, fileResource );
addDispatchFilter( context, service, fileResource, "dispatch", null );
+
+ ResourceDescriptor homeResource = context.getGatewayDescriptor().addResource();
+ homeResource.role( service.getRole() );
+ homeResource.pattern( WEBHDFS_EXTERNAL_PATH + "/~?**" );
+ addAuthenticationFilter( context, service, homeResource );
+ params = new ArrayList<FilterParamDescriptor>();
+ params.add( homeResource.createFilterParam().
+ name( UrlRewriteServletFilter.REQUEST_URL_RULE_PARAM ).value( getQualifiedName() + "/inbound/namenode/home" ) );
+ addRewriteFilter( context, service, homeResource, params );
+ addIdentityAssertionFilter( context, service, homeResource );
+ addAuthorizationFilter( context, service, homeResource );
+ addDispatchFilter( context, service, homeResource, "dispatch", null );
+
+ ResourceDescriptor homeFileResource = context.getGatewayDescriptor().addResource();
+ homeFileResource.role( service.getRole() );
+ homeFileResource.pattern( WEBHDFS_EXTERNAL_PATH + "/~/**?**" );
+ addAuthenticationFilter( context, service, homeFileResource );
+ params = new ArrayList<FilterParamDescriptor>();
+ params.add( homeFileResource.createFilterParam().
+ name( UrlRewriteServletFilter.REQUEST_URL_RULE_PARAM ).value( getQualifiedName() + "/inbound/namenode/home/file" ) );
+ params.add( homeFileResource.createFilterParam().
+ name( UrlRewriteServletFilter.RESPONSE_HEADERS_FILTER_PARAM ).value( getQualifiedName() + "/outbound/namenode/headers" ) );
+ addRewriteFilter( context, service, homeFileResource, params );
+ addIdentityAssertionFilter( context, service, homeFileResource );
+ addAuthorizationFilter( context, service, homeFileResource );
+ addDispatchFilter( context, service, homeFileResource, "dispatch", null );
}
public void contributeDataNodeResource( DeploymentContext context, Service service ) throws URISyntaxException {
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/942842a7/gateway-service-webhdfs/src/main/resources/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor/rewrite.xml
----------------------------------------------------------------------
diff --git a/gateway-service-webhdfs/src/main/resources/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor/rewrite.xml b/gateway-service-webhdfs/src/main/resources/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor/rewrite.xml
index 66081ee..5fcebf5 100644
--- a/gateway-service-webhdfs/src/main/resources/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor/rewrite.xml
+++ b/gateway-service-webhdfs/src/main/resources/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor/rewrite.xml
@@ -47,6 +47,14 @@
<rewrite template="{$serviceUrl[WEBHDFS]}/{version}/{path=**}?{**}"/>
</rule>
+ <rule dir="IN" name="WEBHDFS/webhdfs/inbound/namenode/home" pattern="*://*:*/**/webhdfs/{version}/~?{**}">
+ <rewrite template="{$serviceUrl[WEBHDFS]}/{version}/user/{$username}?{**}"/>
+ </rule>
+
+ <rule dir="IN" name="WEBHDFS/webhdfs/inbound/namenode/home/file" pattern="*://*:*/**/webhdfs/{version}/~/{path=**}?{**}">
+ <rewrite template="{$serviceUrl[WEBHDFS]}/{version}/user/{$username}/{path=**}?{**}"/>
+ </rule>
+
<rule dir="IN" name="WEBHDFS/webhdfs/inbound/datanode">
<decrypt-query/>
<match pattern="*://*:*/**/webhdfs/data/*/{path=**}?{scheme}?{host}?{port}?{**}"/>
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/942842a7/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
index 8b8cc58..61a38ee 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
@@ -286,6 +286,59 @@ public class GatewayBasicFuncTest {
}
@Test
+ public void testHdfsTildeUseCase() throws IOException {
+ String root = "/tmp/GatewayWebHdfsFuncTest/testBasicHdfsUseCase";
+ String username = "hdfs";
+ String password = "hdfs-password";
+ InetSocketAddress gatewayAddress = driver.gateway.getAddresses()[0];
+
+ // Attempt to delete the test directory in case a previous run failed.
+ // Ignore any result.
+ // Cleanup anything that might have been leftover because the test failed previously.
+ driver.getMock( "WEBHDFS" )
+ .expect()
+ .method( "DELETE" )
+ .pathInfo( "/v1/user/hdfs" + root )
+ .queryParam( "op", "DELETE" )
+ .queryParam( "user.name", username )
+ .queryParam( "recursive", "true" )
+ .respond()
+ .status( HttpStatus.SC_OK );
+ given()
+ .auth().preemptive().basic( username, password )
+ .queryParam( "op", "DELETE" )
+ .queryParam( "recursive", "true" )
+ .expect()
+ //.log().all();
+ .statusCode( HttpStatus.SC_OK )
+ .when().delete( driver.getUrl( "WEBHDFS" ) + "/v1/~" + root + ( driver.isUseGateway() ? "" : "?user.name=" + username ) );
+ driver.assertComplete();
+
+ driver.getMock( "WEBHDFS" )
+ .expect()
+ .method( "PUT" )
+ .pathInfo( "/v1/user/hdfs/dir" )
+ .queryParam( "op", "MKDIRS" )
+ .queryParam( "user.name", username )
+ .respond()
+ .status( HttpStatus.SC_OK )
+ .content( driver.getResourceBytes( "webhdfs-success.json" ) )
+ .contentType( "application/json" );
+ given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .queryParam( "op", "MKDIRS" )
+ .expect()
+ //.log().all();
+ .statusCode( HttpStatus.SC_OK )
+ .contentType( "application/json" )
+ .content( "boolean", is( true ) )
+ .when().put( driver.getUrl( "WEBHDFS" ) + "/v1/~/dir" );
+ driver.assertComplete();
+
+ }
+
+ @Test
public void testBasicHdfsUseCase() throws IOException {
String root = "/tmp/GatewayWebHdfsFuncTest/testBasicHdfsUseCase";
String username = "hdfs";