You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@knox.apache.org by km...@apache.org on 2013/09/14 04:53:52 UTC
[1/2] KNOX-50: Ensure cluster topology details rewritten for Oozie
REST APIs
Updated Branches:
refs/heads/master 70711f73d -> 3f1ede539
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-service-templeton/src/main/java/org/apache/hadoop/gateway/templeton/TempletonDeploymentContributor.java
----------------------------------------------------------------------
diff --git a/gateway-service-templeton/src/main/java/org/apache/hadoop/gateway/templeton/TempletonDeploymentContributor.java b/gateway-service-templeton/src/main/java/org/apache/hadoop/gateway/templeton/TempletonDeploymentContributor.java
index 88ed2cd..406867e 100644
--- a/gateway-service-templeton/src/main/java/org/apache/hadoop/gateway/templeton/TempletonDeploymentContributor.java
+++ b/gateway-service-templeton/src/main/java/org/apache/hadoop/gateway/templeton/TempletonDeploymentContributor.java
@@ -51,7 +51,7 @@ public class TempletonDeploymentContributor extends ServiceDeploymentContributor
.directions( "request" )
.pattern( "*://*:*/**" + TEMPLETON_EXTERNAL_PATH + "/{path=**}?{**}" );
rewrite = rule.addStep( "rewrite" );
- rewrite.template( service.getUrl().toExternalForm() + "/{path=**}?{**}" );
+ rewrite.template( service.getUrl() + "/{path=**}?{**}" );
ResourceDescriptor resource = context.getGatewayDescriptor().addResource();
resource.role( service.getRole() );
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributor.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributor.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributor.java
index 8e8ab9e..ba21928 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributor.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributor.java
@@ -21,7 +21,7 @@ import org.apache.hadoop.gateway.topology.Service;
public interface ServiceDeploymentContributor {
- // The role of this service deployment contributor. e.g. NAMENODE
+ // The role of this service deployment contributor. e.g. WEBHDFS
String getRole();
// The name of this service deployment contributor. Not used yet.
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Service.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Service.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Service.java
index 8e70b11..24bba52 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Service.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Service.java
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.gateway.topology;
-import java.net.URL;
-
public class Service {
private String role;
@@ -40,13 +38,13 @@ public class Service {
this.name = name;
}
- private URL url;
+ private String url;
- public URL getUrl() {
+ public String getUrl() {
return url;
}
- public void setUrl( URL url ) {
+ public void setUrl( String url ) {
this.url = url;
}
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServer.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServer.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServer.java
index 72aea0f..508c71a 100644
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServer.java
+++ b/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServer.java
@@ -34,8 +34,8 @@ import java.util.Queue;
* Each interaction is an expected request and a resulting response.
* These interactions are added to a queue in a fluent API style.
* So in most of the tests like GatewayBasicFuncTest.testBasicJsonUseCase you will see calls like
- * driver.getMock( "NAMENODE" ).expect()....respond()...;
- * This adds a single interaction to the mock server which is returned via the driver.getMock( "NAMENODE" ) above.
+ * driver.getMock( "WEBHDFS" ).expect()....respond()...;
+ * This adds a single interaction to the mock server which is returned via the driver.getMock( "WEBHDFS" ) above.
* Any number of interactions may be added.
* When the request comes in it will check the request against the expected request.
* If it matches return the response otherwise it will return a 500 error.
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
index c1466d7..8ede1f4 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
@@ -17,13 +17,10 @@
*/
package org.apache.hadoop.gateway;
-import com.jayway.restassured.RestAssured;
import com.jayway.restassured.http.ContentType;
-import com.jayway.restassured.parsing.Parser;
import com.jayway.restassured.response.Response;
import com.mycila.xmltool.XMLDoc;
import com.mycila.xmltool.XMLTag;
-import org.apache.hadoop.io.UTF8;
import org.apache.hadoop.test.TestUtils;
import org.apache.hadoop.test.category.FunctionalTests;
import org.apache.hadoop.test.category.MediumTests;
@@ -52,19 +49,15 @@ import java.net.ServerSocket;
import java.nio.charset.Charset;
import static com.jayway.restassured.RestAssured.given;
-import static com.jayway.restassured.RestAssured.responseContentType;
import static org.hamcrest.CoreMatchers.containsString;
-import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.startsWith;
import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assert.fail;
import static org.xmlmatchers.XmlMatchers.isEquivalentTo;
import static org.xmlmatchers.transform.XmlConverters.the;
import static uk.co.datumedge.hamcrest.json.SameJSONAs.sameJSONAs;
-
@Category( { FunctionalTests.class, MediumTests.class } )
public class GatewayBasicFuncTest {
@@ -97,7 +90,7 @@ public class GatewayBasicFuncTest {
// Specifies if the GATEWAY_HOME created for the test should be deleted when the test suite is complete.
// This is frequently used during debugging to keep the GATEWAY_HOME around for inspection.
- private static final boolean CLEANUP_TEST = false;
+ private static final boolean CLEANUP_TEST = true;
// private static final boolean USE_GATEWAY = false;
// private static final boolean USE_MOCK_SERVICES = false;
@@ -124,12 +117,10 @@ public class GatewayBasicFuncTest {
config.setGatewayPath( "gateway" );
driver.setResourceBase( GatewayBasicFuncTest.class );
driver.setupLdap( findFreePort() );
- driver.setupService( "NAMENODE", "http://" + TEST_HOST + ":50070/webhdfs/v1", "/cluster/namenode/api/v1", USE_MOCK_SERVICES ); // IPC:8020
- driver.setupService( "NAMENODE-RPC", "thrift://" + TEST_HOST + ":777", null, USE_MOCK_SERVICES );
- driver.setupService( "DATANODE", "http://" + TEST_HOST + ":50075/webhdfs/v1", "/cluster/datanode/api/v1", USE_MOCK_SERVICES ); // CLIENT:50010, IPC:50020
- // JobTracker: UI:50030,
- // TaskTracker: UI:50060, 127.0.0.1:0
- driver.setupService( "JOBTRACKER", "thrift://" + TEST_HOST + ":777", null, USE_MOCK_SERVICES );
+ driver.setupService( "WEBHDFS", "http://" + TEST_HOST + ":50070/webhdfs/v1", "/cluster/namenode/api/v1", USE_MOCK_SERVICES );
+ driver.setupService( "NAMENODE", "hdfs://" + TEST_HOST + ":8020", null, USE_MOCK_SERVICES );
+ driver.setupService( "DATANODE", "http://" + TEST_HOST + ":50075/webhdfs/v1", "/cluster/datanode/api/v1", USE_MOCK_SERVICES );
+ driver.setupService( "JOBTRACKER", "thrift://" + TEST_HOST + ":8021", null, USE_MOCK_SERVICES );
driver.setupService( "TEMPLETON", "http://" + TEST_HOST + ":50111/templeton/v1", "/cluster/templeton/api/v1", USE_MOCK_SERVICES );
driver.setupService( "OOZIE", "http://" + TEST_HOST + ":11000/oozie", "/cluster/oozie/api", USE_MOCK_SERVICES );
driver.setupService( "HIVE", "http://" + TEST_HOST + ":10000", "/cluster/hive/api/v1", USE_MOCK_SERVICES );
@@ -192,12 +183,12 @@ public class GatewayBasicFuncTest {
.addTag( "value" ).addText( "AND" ).gotoParent().gotoParent()
.gotoRoot()
.addTag( "service" )
+ .addTag( "role" ).addText( "WEBHDFS" )
+ .addTag( "url" ).addText( driver.getRealUrl( "WEBHDFS" ) ).gotoParent()
+ .addTag( "service" )
.addTag( "role" ).addText( "NAMENODE" )
.addTag( "url" ).addText( driver.getRealUrl( "NAMENODE" ) ).gotoParent()
.addTag( "service" )
- .addTag( "role" ).addText( "NAMENODE-RPC" )
- .addTag( "url" ).addText( driver.getRealUrl( "NAMENODE-RPC" ) ).gotoParent()
- .addTag( "service" )
.addTag( "role" ).addText( "DATANODE" )
.addTag( "url" ).addText( driver.getRealUrl( "DATANODE" ) ).gotoParent()
.addTag( "service" )
@@ -235,7 +226,7 @@ public class GatewayBasicFuncTest {
{"boolean": true}
*/
- driver.getMock( "NAMENODE" )
+ driver.getMock( "WEBHDFS" )
.expect()
.method( "PUT" )
.pathInfo( root + "/dir" )
@@ -254,7 +245,7 @@ public class GatewayBasicFuncTest {
.statusCode( HttpStatus.SC_OK )
.contentType( "application/json" )
.content( "boolean", is( true ) )
- .when().put( driver.getUrl( "NAMENODE" ) + root + "/dir" );
+ .when().put( driver.getUrl( "WEBHDFS" ) + root + "/dir" );
driver.assertComplete();
}
@@ -265,7 +256,7 @@ public class GatewayBasicFuncTest {
String password = "hdfs-password";
InetSocketAddress gatewayAddress = driver.gateway.getAddresses()[0];
- driver.getMock( "NAMENODE" )
+ driver.getMock( "WEBHDFS" )
.expect()
.method( "PUT" )
.pathInfo( root + "/dir/file" )
@@ -281,7 +272,7 @@ public class GatewayBasicFuncTest {
.expect()
//.log().ifError()
.statusCode( HttpStatus.SC_TEMPORARY_REDIRECT )
- .when().put( driver.getUrl("NAMENODE") + root + "/dir/file" );
+ .when().put( driver.getUrl("WEBHDFS") + root + "/dir/file" );
String location = response.getHeader( "Location" );
//System.out.println( location );
log.debug( "Redirect location: " + response.getHeader( "Location" ) );
@@ -303,7 +294,7 @@ public class GatewayBasicFuncTest {
// Attempt to delete the test directory in case a previous run failed.
// Ignore any result.
// Cleanup anything that might have been leftover because the test failed previously.
- driver.getMock( "NAMENODE" )
+ driver.getMock( "WEBHDFS" )
.expect()
.method( "DELETE" )
.pathInfo( root )
@@ -319,7 +310,7 @@ public class GatewayBasicFuncTest {
.expect()
//.log().all();
.statusCode( HttpStatus.SC_OK )
- .when().delete( driver.getUrl( "NAMENODE" ) + root + ( driver.isUseGateway() ? "" : "?user.name=" + username ) );
+ .when().delete( driver.getUrl( "WEBHDFS" ) + root + ( driver.isUseGateway() ? "" : "?user.name=" + username ) );
driver.assertComplete();
/* Create a directory.
@@ -332,7 +323,7 @@ public class GatewayBasicFuncTest {
{"boolean": true}
*/
- driver.getMock( "NAMENODE" )
+ driver.getMock( "WEBHDFS" )
.expect()
.method( "PUT" )
.pathInfo( root + "/dir" )
@@ -351,10 +342,10 @@ public class GatewayBasicFuncTest {
.statusCode( HttpStatus.SC_OK )
.contentType( "application/json" )
.content( "boolean", is( true ) )
- .when().put( driver.getUrl( "NAMENODE" ) + root + "/dir" );
+ .when().put( driver.getUrl( "WEBHDFS" ) + root + "/dir" );
driver.assertComplete();
- driver.getMock( "NAMENODE" )
+ driver.getMock( "WEBHDFS" )
.expect()
.method( "GET" )
.pathInfo( root )
@@ -372,7 +363,7 @@ public class GatewayBasicFuncTest {
//.log().ifError()
.statusCode( HttpStatus.SC_OK )
.content( "FileStatuses.FileStatus[0].pathSuffix", is( "dir" ) )
- .when().get( driver.getUrl( "NAMENODE" ) + root );
+ .when().get( driver.getUrl( "WEBHDFS" ) + root );
driver.assertComplete();
//NEGATIVE: Test a bad password.
@@ -383,7 +374,7 @@ public class GatewayBasicFuncTest {
.expect()
//.log().ifError()
.statusCode( HttpStatus.SC_UNAUTHORIZED )
- .when().get( driver.getUrl( "NAMENODE" ) + root );
+ .when().get( driver.getUrl( "WEBHDFS" ) + root );
driver.assertComplete();
//NEGATIVE: Test a bad user.
@@ -394,7 +385,7 @@ public class GatewayBasicFuncTest {
.expect()
//.log().ifError()
.statusCode( HttpStatus.SC_UNAUTHORIZED )
- .when().get( driver.getUrl( "NAMENODE" ) + root );
+ .when().get( driver.getUrl( "WEBHDFS" ) + root );
driver.assertComplete();
//NEGATIVE: Test a valid but unauthorized user.
@@ -405,7 +396,7 @@ public class GatewayBasicFuncTest {
.expect()
//.log().ifError()
.statusCode( HttpStatus.SC_UNAUTHORIZED )
- .when().get( driver.getUrl( "NAMENODE" ) + root );
+ .when().get( driver.getUrl( "WEBHDFS" ) + root );
/* Add a file.
curl -i -X PUT "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=CREATE
@@ -425,7 +416,7 @@ public class GatewayBasicFuncTest {
Location: webhdfs://<HOST>:<PORT>/<PATH>
Content-Length: 0
*/
- driver.getMock( "NAMENODE" )
+ driver.getMock( "WEBHDFS" )
.expect()
.method( "PUT" )
.pathInfo( root + "/dir/file" )
@@ -453,7 +444,7 @@ public class GatewayBasicFuncTest {
.expect()
//.log().ifError()
.statusCode( HttpStatus.SC_TEMPORARY_REDIRECT )
- .when().put( driver.getUrl("NAMENODE") + root + "/dir/file" );
+ .when().put( driver.getUrl("WEBHDFS") + root + "/dir/file" );
String location = response.getHeader( "Location" );
log.debug( "Redirect location: " + response.getHeader( "Location" ) );
if( driver.isUseGateway() ) {
@@ -495,7 +486,7 @@ public class GatewayBasicFuncTest {
Hello, webhdfs user!
*/
- driver.getMock( "NAMENODE" )
+ driver.getMock( "WEBHDFS" )
.expect()
.method( "GET" )
.pathInfo( root + "/dir/file" )
@@ -522,7 +513,7 @@ public class GatewayBasicFuncTest {
//.log().ifError()
.statusCode( HttpStatus.SC_OK )
.content( is( "TEST" ) )
- .when().get( driver.getUrl("NAMENODE") + root + "/dir/file" );
+ .when().get( driver.getUrl("WEBHDFS") + root + "/dir/file" );
driver.assertComplete();
/* Delete the directory.
@@ -537,7 +528,7 @@ public class GatewayBasicFuncTest {
{"boolean": true}
*/
// Mock the interaction with the namenode.
- driver.getMock( "NAMENODE" )
+ driver.getMock( "WEBHDFS" )
.expect()
.method( "DELETE" )
.pathInfo( root )
@@ -553,7 +544,7 @@ public class GatewayBasicFuncTest {
.expect()
//.log().ifError()
.statusCode( HttpStatus.SC_OK )
- .when().delete( driver.getUrl( "NAMENODE" ) + root );
+ .when().delete( driver.getUrl( "WEBHDFS" ) + root );
driver.assertComplete();
}
@@ -647,7 +638,7 @@ public class GatewayBasicFuncTest {
.expect()
//.log().all()
.statusCode( HttpStatus.SC_UNAUTHORIZED )
- .when().get( driver.getUrl("NAMENODE") + root + "/dirA700/fileA700" );
+ .when().get( driver.getUrl("WEBHDFS") + root + "/dirA700/fileA700" );
}
driver.assertComplete();
@@ -1244,7 +1235,7 @@ public class GatewayBasicFuncTest {
}
@Test
- public void testHbaseGetTableList() throws IOException {
+ public void testHBaseGetTableList() throws IOException {
String username = "hbase";
String password = "hbase-password";
String resourceName = "hbase/table-list";
@@ -1317,7 +1308,7 @@ public class GatewayBasicFuncTest {
}
@Test
- public void testHbaseCreateTableAndVerifySchema() throws IOException {
+ public void testHBaseCreateTableAndVerifySchema() throws IOException {
String username = "hbase";
String password = "hbase-password";
String resourceName = "hbase/table-schema";
@@ -1383,7 +1374,7 @@ public class GatewayBasicFuncTest {
}
@Test
- public void testHbaseGetTableSchema() throws IOException {
+ public void testHBaseGetTableSchema() throws IOException {
String username = "hbase";
String password = "hbase-password";
String resourceName = "hbase/table-metadata";
@@ -1459,7 +1450,7 @@ public class GatewayBasicFuncTest {
}
@Test
- public void testHbaseInsertDataIntoTable() throws IOException {
+ public void testHBaseInsertDataIntoTable() throws IOException {
String username = "hbase";
String password = "hbase-password";
@@ -1591,7 +1582,7 @@ public class GatewayBasicFuncTest {
}
@Test
- public void testHbaseDeleteDataFromTable() {
+ public void testHBaseDeleteDataFromTable() {
String username = "hbase";
String password = "hbase-password";
@@ -1645,7 +1636,7 @@ public class GatewayBasicFuncTest {
}
@Test
- public void testHbaseQueryTableData() throws IOException {
+ public void testHBaseQueryTableData() throws IOException {
String username = "hbase";
String password = "hbase-password";
@@ -1750,7 +1741,7 @@ public class GatewayBasicFuncTest {
}
@Test
- public void testHbaseUseScanner() throws IOException {
+ public void testHBaseUseScanner() throws IOException {
String username = "hbase";
String password = "hbase-password";
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayFuncTestDriver.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayFuncTestDriver.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayFuncTestDriver.java
index 050a6b7..cc3d792 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayFuncTestDriver.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayFuncTestDriver.java
@@ -26,7 +26,6 @@ import org.apache.hadoop.gateway.config.GatewayConfig;
import org.apache.hadoop.gateway.security.EmbeddedApacheDirectoryServer;
import org.apache.hadoop.gateway.services.DefaultGatewayServices;
import org.apache.hadoop.gateway.services.ServiceLifecycleException;
-import org.apache.hadoop.io.UTF8;
import org.apache.hadoop.test.mock.MockServer;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
@@ -278,7 +277,7 @@ public class GatewayFuncTestDriver {
public String createFileNN( String user, String password, String file, String permsOctal, int status ) throws IOException {
if( status == HttpStatus.SC_TEMPORARY_REDIRECT ) {
- getMock( "NAMENODE" )
+ getMock( "WEBHDFS" )
.expect()
.method( "PUT" )
.pathInfo( file )
@@ -288,7 +287,7 @@ public class GatewayFuncTestDriver {
.status( status )
.header( "Location", getRealUrl("DATANODE") + file + "?op=CREATE&user.name="+user );
} else {
- getMock( "NAMENODE" )
+ getMock( "WEBHDFS" )
.expect()
.method( "PUT" )
.pathInfo( file )
@@ -306,7 +305,7 @@ public class GatewayFuncTestDriver {
.expect()
//.log().all()
.statusCode( status )
- .when().put( getUrl( "NAMENODE" ) + file + ( isUseGateway() ? "" : "?user.name=" + user ) );
+ .when().put( getUrl( "WEBHDFS" ) + file + ( isUseGateway() ? "" : "?user.name=" + user ) );
String location = response.getHeader( "Location" );
log.trace( "Redirect location: " + response.getHeader( "Location" ) );
return location;
@@ -367,7 +366,7 @@ public class GatewayFuncTestDriver {
}
public void readFile( String user, String password, String file, String contentType, String resource, int status ) throws IOException {
- getMock( "NAMENODE" )
+ getMock( "WEBHDFS" )
.expect()
.method( "GET" )
.pathInfo( file )
@@ -404,7 +403,7 @@ public class GatewayFuncTestDriver {
.expect()
//.log().all()
.statusCode( status )
- .when().get( getUrl("NAMENODE") + file + ( isUseGateway() ? "" : "?user.name=" + user ) );
+ .when().get( getUrl("WEBHDFS") + file + ( isUseGateway() ? "" : "?user.name=" + user ) );
if( response.getStatusCode() == HttpStatus.SC_OK ) {
String actualContent = response.asString();
String expectedContent = getResourceString( resource, Charset.forName("UTF-8") );
@@ -414,7 +413,7 @@ public class GatewayFuncTestDriver {
}
public void chownFile( String user, String password, String file, String owner, String group, int status ) {
- getMock( "NAMENODE" )
+ getMock( "WEBHDFS" )
.expect()
.method( "PUT" )
.pathInfo( file )
@@ -433,12 +432,12 @@ public class GatewayFuncTestDriver {
.expect()
//.log().all()
.statusCode( status )
- .when().put( getUrl("NAMENODE") + file + ( isUseGateway() ? "" : "?user.name=" + user ) );
+ .when().put( getUrl("WEBHDFS") + file + ( isUseGateway() ? "" : "?user.name=" + user ) );
assertComplete();
}
public void chmodFile( String user, String password, String file, String permsOctal, int status ) {
- getMock( "NAMENODE" )
+ getMock( "WEBHDFS" )
.expect()
.method( "PUT" )
.pathInfo( file )
@@ -455,7 +454,7 @@ public class GatewayFuncTestDriver {
.expect()
//.log().all()
.statusCode( status )
- .when().put( getUrl("NAMENODE") + file + ( isUseGateway() ? "" : "?user.name=" + user ) );
+ .when().put( getUrl("WEBHDFS") + file + ( isUseGateway() ? "" : "?user.name=" + user ) );
assertComplete();
}
@@ -471,7 +470,7 @@ public class GatewayFuncTestDriver {
public String updateFileNN( String user, String password, String file, String resource, int status ) throws IOException {
if( status == HttpStatus.SC_TEMPORARY_REDIRECT ) {
- getMock( "NAMENODE" )
+ getMock( "WEBHDFS" )
.expect()
.method( "PUT" )
.pathInfo( file )
@@ -482,7 +481,7 @@ public class GatewayFuncTestDriver {
.status( status )
.header( "Location", getRealUrl("DATANODE") + file + "?op=CREATE&user.name="+user );
} else {
- getMock( "NAMENODE" )
+ getMock( "WEBHDFS" )
.expect()
.method( "PUT" )
.pathInfo( file )
@@ -500,7 +499,7 @@ public class GatewayFuncTestDriver {
.expect()
//.log().all()
.statusCode( status )
- .when().put( getUrl("NAMENODE") + file + ( isUseGateway() ? "" : "?user.name=" + user ) );
+ .when().put( getUrl("WEBHDFS") + file + ( isUseGateway() ? "" : "?user.name=" + user ) );
String location = response.getHeader( "Location" );
log.trace( "Redirect location: " + response.getHeader( "Location" ) );
return location;
@@ -545,7 +544,7 @@ public class GatewayFuncTestDriver {
}
public void deleteFile( String user, String password, String file, String recursive, int... status ) {
- getMock( "NAMENODE" )
+ getMock( "WEBHDFS" )
.expect()
.method( "DELETE" )
.pathInfo( file )
@@ -562,12 +561,12 @@ public class GatewayFuncTestDriver {
//.log().all()
.statusCode( isIn( ArrayUtils.toObject( status ) ) )
.when()
- .delete( getUrl( "NAMENODE" ) + file + ( isUseGateway() ? "" : "?user.name=" + user ) );
+ .delete( getUrl( "WEBHDFS" ) + file + ( isUseGateway() ? "" : "?user.name=" + user ) );
assertComplete();
}
public String createDir( String user, String password, String dir, String permsOctal, int status ) {
- getMock( "NAMENODE" )
+ getMock( "WEBHDFS" )
.expect()
.method( "PUT" )
.pathInfo( dir )
@@ -589,7 +588,7 @@ public class GatewayFuncTestDriver {
.contentType( "application/json" )
.content( "boolean", equalTo( true ) )
.when()
- .put( getUrl("NAMENODE") + dir + ( isUseGateway() ? "" : "?user.name=" + user ) );
+ .put( getUrl("WEBHDFS") + dir + ( isUseGateway() ? "" : "?user.name=" + user ) );
String location = response.getHeader( "Location" );
return location;
}
@@ -612,7 +611,7 @@ public class GatewayFuncTestDriver {
.statusCode( status )
.content( equalTo( "TODO" ) )
.when()
- .get( getUrl( "NAMENODE" ) + dir );
+ .get( getUrl( "WEBHDFS" ) + dir );
}
public String submitJava( String user, String password, String jar, String main, String input, String output, int status ) {
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java
index 56f2603..c50090b 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java
@@ -24,7 +24,6 @@ import org.apache.hadoop.gateway.topology.ProviderParam;
import org.apache.hadoop.gateway.topology.Service;
import org.apache.hadoop.gateway.topology.Topology;
import org.jboss.shrinkwrap.api.spec.WebArchive;
-import org.junit.Ignore;
import org.junit.Test;
import org.w3c.dom.Document;
import org.xml.sax.InputSource;
@@ -35,7 +34,7 @@ import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import java.io.IOException;
import java.io.InputStream;
-import java.net.URL;
+import java.net.URISyntaxException;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.IsEqual.equalTo;
@@ -44,13 +43,13 @@ import static org.hamcrest.xml.HasXPath.hasXPath;
public class DeploymentFactoryFuncTest {
@Test
- public void testSimpleTopology() throws IOException, SAXException, ParserConfigurationException {
+ public void testSimpleTopology() throws IOException, SAXException, ParserConfigurationException, URISyntaxException {
GatewayConfig config = new GatewayConfigImpl();
Topology topology = new Topology();
topology.setName( "test-cluster" );
Service service = new Service();
- service.setRole( "NAMENODE" );
- service.setUrl( new URL( "http://localhost:50070/webhdfs/v1" ) );
+ service.setRole( "WEBHDFS" );
+ service.setUrl( "http://localhost:50070/webhdfs/v1" );
topology.addService( service );
Provider provider = new Provider();
provider.setRole( "authentication" );
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-test/src/test/java/org/apache/hadoop/gateway/hbase/HBaseDeploymentContributorTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/hbase/HBaseDeploymentContributorTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/hbase/HBaseDeploymentContributorTest.java
new file mode 100644
index 0000000..3c8c1b1
--- /dev/null
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/hbase/HBaseDeploymentContributorTest.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.hbase;
+
+import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor;
+import org.junit.Test;
+
+import java.util.Iterator;
+import java.util.ServiceLoader;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.junit.Assert.fail;
+
+public class HBaseDeploymentContributorTest {
+
+ @Test
+ public void testServiceLoader() throws Exception {
+ ServiceLoader loader = ServiceLoader.load( ServiceDeploymentContributor.class );
+ Iterator iterator = loader.iterator();
+ assertThat( "Service iterator empty.", iterator.hasNext() );
+ while( iterator.hasNext() ) {
+ Object object = iterator.next();
+ if( object instanceof HBaseDeploymentContributor ) {
+ return;
+ }
+ }
+ fail( "Failed to find " + HBaseDeploymentContributor.class.getName() + " via service loader." );
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-test/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/log4j.properties b/gateway-test/src/test/resources/log4j.properties
index 70175cb..1a4b189 100644
--- a/gateway-test/src/test/resources/log4j.properties
+++ b/gateway-test/src/test/resources/log4j.properties
@@ -24,6 +24,7 @@ log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%5p [%c] %m%n
#log4j.logger.org.apache.hadoop.gateway=DEBUG
+
#log4j.logger.org.eclipse.jetty=DEBUG
#log4j.logger.org.apache.shiro=DEBUG
#log4j.logger.org.apache.http=DEBUG
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/hsso-release/home/INSTALL
----------------------------------------------------------------------
diff --git a/hsso-release/home/INSTALL b/hsso-release/home/INSTALL
index 9cab07b..f198704 100644
--- a/hsso-release/home/INSTALL
+++ b/hsso-release/home/INSTALL
@@ -71,7 +71,7 @@ Installation and Deployment Instructions
5. Configure the Gateway with the topology of your Hadoop cluster
a. Edit the file {GATEWAY_HOME}/deployments/sample.xml
b. Change the host and port in the urls of the <service> elements for
- NAMENODE, TEMPLETON and OOZIE services to match your Hadoop cluster
+ WEBHDFS, TEMPLETON and OOZIE services to match your Hadoop cluster
deployment.
c. The default configuration contains the LDAP URL for a LDAP server. By
default that file is configured to access the demo ApacheDS based LDAP
@@ -91,7 +91,7 @@ Installation and Deployment Instructions
6. Test the installation and configuration of your Gateway
Invoke the LISTSATUS operation on HDFS represented by your configured
- NAMENODE by using your web browser or curl:
+ WEBHDFS by using your web browser or curl:
curl -i -k -u hdfs:hdfs-password -X GET \
'https://localhost:8443/gateway/sample/namenode/api/v1/?op=LISTSTATUS'
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/hsso-release/home/deployments/sample.xml
----------------------------------------------------------------------
diff --git a/hsso-release/home/deployments/sample.xml b/hsso-release/home/deployments/sample.xml
index 4f840d4..1aaebd2 100644
--- a/hsso-release/home/deployments/sample.xml
+++ b/hsso-release/home/deployments/sample.xml
@@ -21,17 +21,17 @@
<provider>
<role>federation</role>
<enabled>true</enabled>
- <name>AccessTokenProvider</name>
+ <name>AccessTokenProvider</name>
+ </provider>
+ <provider>
+ <role>identity-assertion</role>
+ <enabled>true</enabled>
+ <name>Pseudo</name>
</provider>
- <provider>
- <role>identity-assertion</role>
- <enabled>true</enabled>
- <name>Pseudo</name>
- </provider>
</gateway>
<service>
- <role>NAMENODE</role>
+ <role>WEBHDFS</role>
<url>http://localhost:50070/webhdfs/v1</url>
</service>
<service>
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/hsso-release/home/templates/topology.xml
----------------------------------------------------------------------
diff --git a/hsso-release/home/templates/topology.xml b/hsso-release/home/templates/topology.xml
index a7939d4..fb7b536 100644
--- a/hsso-release/home/templates/topology.xml
+++ b/hsso-release/home/templates/topology.xml
@@ -51,7 +51,7 @@
</gateway>
<service>
- <role>NAMENODE</role>
+ <role>WEBHDFS</role>
<url>http://localhost:50070/webhdfs/v1</url>
</service>
<service>
[2/2] git commit: KNOX-50: Ensure cluster topology details rewritten
for Oozie REST APIs
Posted by km...@apache.org.
KNOX-50: Ensure cluster topology details rewritten for Oozie REST APIs
Project: http://git-wip-us.apache.org/repos/asf/incubator-knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-knox/commit/3f1ede53
Tree: http://git-wip-us.apache.org/repos/asf/incubator-knox/tree/3f1ede53
Diff: http://git-wip-us.apache.org/repos/asf/incubator-knox/diff/3f1ede53
Branch: refs/heads/master
Commit: 3f1ede539d1c8cfa7c0c140b10c001fd0232f64b
Parents: 70711f7
Author: Kevin Minder <ke...@hortonworks.com>
Authored: Fri Sep 13 22:53:44 2013 -0400
Committer: Kevin Minder <ke...@hortonworks.com>
Committed: Fri Sep 13 22:53:44 2013 -0400
----------------------------------------------------------------------
.../src/main/resources/clusters/demo.xml | 2 +-
.../impl/ServiceRegistryFunctionsTest.java | 6 +-
.../ServiceRegistryFunctionsTest/rewrite.xml | 18 +--
.../filter/rewrite/i18n/UrlRewriteMessages.java | 7 +-
.../rewrite/impl/form/FormFilterReader.java | 16 +-
.../rewrite/impl/html/HtmlFilterReaderBase.java | 2 +-
.../rewrite/impl/json/JsonFilterReader.java | 4 +-
.../impl/xml/XmlUrlRewriteFilterReader.java | 12 +-
.../gateway/filter/rewrite/impl/gateway.xml | 2 +-
.../gateway/filter/rewrite/impl/rewrite.xml | 10 +-
.../filter-sample.xml | 2 +-
gateway-release/home/INSTALL | 4 +-
gateway-release/home/conf/users.ldif | 12 +-
gateway-release/home/deployments/sample.xml | 8 +
.../home/samples/ExampleOozieWorkflow.groovy | 70 ++++++---
gateway-release/home/templates/sample.conf | 2 +-
gateway-release/home/templates/sample.xml | 2 +-
gateway-release/home/templates/topology.xml | 2 +-
.../apache/hadoop/gateway/GatewayServer.java | 7 -
.../gateway/config/impl/GatewayConfigImpl.java | 12 --
.../gateway/deploy/DeploymentFactory.java | 2 +-
.../interpreter/ServicePropertyInterpreter.java | 9 +-
.../builder/PropertyTopologyBuilderTest.java | 8 +-
.../topology/xml/TopologyRulesModuleTest.java | 23 +--
.../gateway/topology/file/topology-one.xml | 2 +-
.../gateway/topology/file/topology-three.xml | 2 +-
.../gateway/topology/file/topology-two.xml | 2 +-
.../xml/simple-topology-ambari-format.conf | 2 +-
.../xml/simple-topology-knox-format.xml | 2 +-
.../hbase/HbaseDeploymentContributor.java | 6 +-
....gateway.deploy.ServiceDeploymentContributor | 2 +-
.../gateway/hdfs/HdfsDeploymentContributor.java | 146 -------------------
.../hdfs/JobTrackerDeploymentContributor.java | 41 ++++++
.../hdfs/NameNodeDeploymentContributor.java | 41 ++++++
.../hdfs/WebHdfsDeploymentContributor.java | 145 ++++++++++++++++++
....gateway.deploy.ServiceDeploymentContributor | 4 +-
.../gateway/hive/HiveDeploymentContributor.java | 2 +-
.../oozie/OozieDeploymentContributor.java | 81 ++++------
.../OozieDeploymentContributor/rewrite.xml | 40 ++++-
.../oozie/OozieDeploymentContributorTest.java | 9 +-
.../TempletonDeploymentContributor.java | 2 +-
.../deploy/ServiceDeploymentContributor.java | 2 +-
.../apache/hadoop/gateway/topology/Service.java | 8 +-
.../org/apache/hadoop/test/mock/MockServer.java | 4 +-
.../hadoop/gateway/GatewayBasicFuncTest.java | 79 +++++-----
.../hadoop/gateway/GatewayFuncTestDriver.java | 35 +++--
.../deploy/DeploymentFactoryFuncTest.java | 9 +-
.../hbase/HBaseDeploymentContributorTest.java | 45 ++++++
.../src/test/resources/log4j.properties | 1 +
hsso-release/home/INSTALL | 4 +-
hsso-release/home/deployments/sample.xml | 14 +-
hsso-release/home/templates/topology.xml | 2 +-
52 files changed, 567 insertions(+), 407 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-demo/src/main/resources/clusters/demo.xml
----------------------------------------------------------------------
diff --git a/gateway-demo/src/main/resources/clusters/demo.xml b/gateway-demo/src/main/resources/clusters/demo.xml
index 07b08d9..ea66063 100644
--- a/gateway-demo/src/main/resources/clusters/demo.xml
+++ b/gateway-demo/src/main/resources/clusters/demo.xml
@@ -19,7 +19,7 @@
<topology>
<service>
- <role>NAMENODE</role>
+ <role>WEBHDFS</role>
<url>http://vm.home:50070/webhdfs/v1</url>
</service>
</topology>
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceRegistryFunctionsTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceRegistryFunctionsTest.java b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceRegistryFunctionsTest.java
index c13d302..ae21d67 100644
--- a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceRegistryFunctionsTest.java
+++ b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceRegistryFunctionsTest.java
@@ -18,10 +18,8 @@
package org.apache.hadoop.gateway.svcregfunc.impl;
import org.apache.hadoop.gateway.filter.AbstractGatewayFilter;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteEnvironment;
import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteServletContextListener;
import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteServletFilter;
-import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteContext;
import org.apache.hadoop.gateway.services.GatewayServices;
import org.apache.hadoop.gateway.services.registry.ServiceRegistry;
import org.apache.hadoop.gateway.util.urltemplate.Parser;
@@ -78,8 +76,8 @@ public class ServiceRegistryFunctionsTest {
public void setUp( String username, Map<String,String> initParams ) throws Exception {
ServiceRegistry mockServiceRegistry = EasyMock.createNiceMock( ServiceRegistry.class );
- EasyMock.expect( mockServiceRegistry.lookupServiceURL( "test-cluster", "NAMENODE.rpc" ) ).andReturn( "test-nn-scheme://test-nn-host:411" ).anyTimes();
- EasyMock.expect( mockServiceRegistry.lookupServiceURL( "test-cluster", "JOBTRACKER.rpc" ) ).andReturn( "test-jt-scheme://test-jt-host:511" ).anyTimes();
+ EasyMock.expect( mockServiceRegistry.lookupServiceURL( "test-cluster", "NAMENODE" ) ).andReturn( "test-nn-scheme://test-nn-host:411" ).anyTimes();
+ EasyMock.expect( mockServiceRegistry.lookupServiceURL( "test-cluster", "JOBTRACKER" ) ).andReturn( "test-jt-scheme://test-jt-host:511" ).anyTimes();
GatewayServices mockGatewayServices = EasyMock.createNiceMock( GatewayServices.class );
EasyMock.expect( mockGatewayServices.getService( GatewayServices.SERVICE_REGISTRY_SERVICE ) ).andReturn( mockServiceRegistry ).anyTimes();
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-provider-rewrite-func-service-registry/src/test/resources/org/apache/hadoop/gateway/svcregfunc/impl/ServiceRegistryFunctionsTest/rewrite.xml
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite-func-service-registry/src/test/resources/org/apache/hadoop/gateway/svcregfunc/impl/ServiceRegistryFunctionsTest/rewrite.xml b/gateway-provider-rewrite-func-service-registry/src/test/resources/org/apache/hadoop/gateway/svcregfunc/impl/ServiceRegistryFunctionsTest/rewrite.xml
index 9bab980..47ba6e7 100644
--- a/gateway-provider-rewrite-func-service-registry/src/test/resources/org/apache/hadoop/gateway/svcregfunc/impl/ServiceRegistryFunctionsTest/rewrite.xml
+++ b/gateway-provider-rewrite-func-service-registry/src/test/resources/org/apache/hadoop/gateway/svcregfunc/impl/ServiceRegistryFunctionsTest/rewrite.xml
@@ -17,40 +17,40 @@
<rules>
<rule name="nn-url">
- <rewrite template="{$serviceUrl[NAMENODE.rpc]}"/>
+ <rewrite template="{$serviceUrl[NAMENODE]}"/>
</rule>
<rule name="nn-addr">
- <rewrite template="{$serviceAddr[NAMENODE.rpc]}"/>
+ <rewrite template="{$serviceAddr[NAMENODE]}"/>
</rule>
<rule name="nn-scheme">
- <rewrite template="{$serviceScheme[NAMENODE.rpc]}"/>
+ <rewrite template="{$serviceScheme[NAMENODE]}"/>
</rule>
<rule name="nn-host">
- <rewrite template="{$serviceHost[NAMENODE.rpc]}"/>
+ <rewrite template="{$serviceHost[NAMENODE]}"/>
</rule>
<rule name="nn-port">
- <rewrite template="{$servicePort[NAMENODE.rpc]}"/>
+ <rewrite template="{$servicePort[NAMENODE]}"/>
</rule>
<rule name="nn-path">
- <rewrite template="{$servicePath[NAMENODE.rpc]}"/>
+ <rewrite template="{$servicePath[NAMENODE]}"/>
</rule>
<rule name="hdfs-addr">
- <rewrite template="hdfs://{$serviceAddr[NAMENODE.rpc]}"/>
+ <rewrite template="hdfs://{$serviceAddr[NAMENODE]}"/>
</rule>
<rule name="hdfs-path">
<match pattern="{path=**}"/>
- <rewrite template="hdfs://{$serviceHost[NAMENODE.rpc]}:{$servicePort[NAMENODE.rpc]}/{path=**}"/>
+ <rewrite template="hdfs://{$serviceHost[NAMENODE]}:{$servicePort[NAMENODE]}/{path=**}"/>
</rule>
<rule name="jt-addr">
- <rewrite template="{$serviceAddr[JOBTRACKER.rpc]}"/>
+ <rewrite template="{$serviceAddr[JOBTRACKER]}"/>
</rule>
<filter name="oozie-conf">
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/i18n/UrlRewriteMessages.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/i18n/UrlRewriteMessages.java b/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/i18n/UrlRewriteMessages.java
index c8fee57..5111cd8 100644
--- a/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/i18n/UrlRewriteMessages.java
+++ b/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/i18n/UrlRewriteMessages.java
@@ -52,8 +52,11 @@ public interface UrlRewriteMessages {
@Message( level = MessageLevel.ERROR, text = "Failed to rewrite URL: {0}" )
void failedToRewriteUrl( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
- @Message( level = MessageLevel.ERROR, text = "Failed to filter value {0}: {1}" )
- void failedToFilterValue( String value, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+ @Message( level = MessageLevel.ERROR, text = "Failed to filter value {0}, rule {1}" )
+ void failedToFilterValue( String value, String rule );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to filter value {0}, rule {1}: {2}" )
+ void failedToFilterValue( String value, String rule, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
@Message( level = MessageLevel.ERROR, text = "Failed to filter field name {0}: {1}" )
void failedToFilterFieldName( String fieldName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/form/FormFilterReader.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/form/FormFilterReader.java b/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/form/FormFilterReader.java
index 0125b32..57d11ce 100644
--- a/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/form/FormFilterReader.java
+++ b/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/form/FormFilterReader.java
@@ -17,15 +17,15 @@
*/
package org.apache.hadoop.gateway.filter.rewrite.impl.form;
-import java.io.IOException;
-import java.io.Reader;
-import java.io.StringWriter;
-
import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFilterContentDescriptor;
import org.apache.hadoop.gateway.filter.rewrite.i18n.UrlRewriteMessages;
import org.apache.hadoop.gateway.filter.rewrite.impl.UrlRewriteUtil;
import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import java.io.IOException;
+import java.io.Reader;
+import java.io.StringWriter;
+
public class FormFilterReader extends Reader {
private static final UrlRewriteMessages LOG = MessagesFactory.get( UrlRewriteMessages.class );
@@ -78,14 +78,14 @@ public class FormFilterReader extends Reader {
private void processPair() throws IOException {
FormPair pair = parser.getCurrentPair();
+ String name = pair.getName();
+ String value = pair.getValue();
+ String rule = UrlRewriteUtil.pickFirstRuleWithEqualsIgnoreCasePathMatch( config, name );
try {
- String name = pair.getName();
- String value = pair.getValue();
- String rule = UrlRewriteUtil.pickFirstRuleWithEqualsIgnoreCasePathMatch( config, name );
value = filterValue( name, pair.getValue(), rule );
pair.setValue( value );
} catch( Exception e ) {
- LOG.failedToFilterValue( pair.getValue(), e );
+ LOG.failedToFilterValue( pair.getValue(), rule, e );
// Write original value.
}
generator.writePair( pair );
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/html/HtmlFilterReaderBase.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/html/HtmlFilterReaderBase.java b/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/html/HtmlFilterReaderBase.java
index a6ddd77..b33d2e1 100644
--- a/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/html/HtmlFilterReaderBase.java
+++ b/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/html/HtmlFilterReaderBase.java
@@ -185,7 +185,7 @@ public abstract class HtmlFilterReaderBase extends Reader {
outputValue = inputValue;
}
} catch ( Exception e ) {
- LOG.failedToFilterValue( inputValue, e );
+ LOG.failedToFilterValue( inputValue, null, e );
}
writer.write( outputValue );
}
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/json/JsonFilterReader.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/json/JsonFilterReader.java b/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/json/JsonFilterReader.java
index 36682a6..4e00d19 100644
--- a/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/json/JsonFilterReader.java
+++ b/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/json/JsonFilterReader.java
@@ -464,7 +464,7 @@ class JsonFilterReader extends Reader {
value = filterValueString( field, value, rule );
((ObjectNode)node.node ).put( field, value );
} catch( Exception e ) {
- LOG.failedToFilterValue( value, e );
+ LOG.failedToFilterValue( value, rule, e );
}
return value;
}
@@ -517,7 +517,7 @@ class JsonFilterReader extends Reader {
value = filterValueString( field, value, apply.rule() );
((ObjectNode)match.getParent().getNode()).put( field, value );
} catch( Exception e ) {
- LOG.failedToFilterValue( value, e );
+ LOG.failedToFilterValue( value, apply.rule(), e );
}
}
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/xml/XmlUrlRewriteFilterReader.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/xml/XmlUrlRewriteFilterReader.java b/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/xml/XmlUrlRewriteFilterReader.java
index 4cf4c2f..0208449 100644
--- a/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/xml/XmlUrlRewriteFilterReader.java
+++ b/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/xml/XmlUrlRewriteFilterReader.java
@@ -54,8 +54,16 @@ public class XmlUrlRewriteFilterReader extends XmlFilterReader {
private String filterValueString( String name, String value, String rule ) {
try {
Template input = Parser.parse( value );
- Template output = rewriter.rewrite( resolver, input, direction, rule );
- value = output.toString();
+ if( input != null ) {
+ Template output = rewriter.rewrite( resolver, input, direction, rule );
+ if( output != null ) {
+ value = output.toString();
+ } else {
+ LOG.failedToFilterValue( value, rule );
+ }
+ } else {
+ LOG.failedToParseValueForUrlRewrite( value );
+ }
} catch( URISyntaxException e ) {
LOG.failedToParseValueForUrlRewrite( value );
}
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-provider-rewrite/src/test/java/org/apache/hadoop/gateway/filter/rewrite/impl/gateway.xml
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite/src/test/java/org/apache/hadoop/gateway/filter/rewrite/impl/gateway.xml b/gateway-provider-rewrite/src/test/java/org/apache/hadoop/gateway/filter/rewrite/impl/gateway.xml
index 97fec06..0ed824c 100644
--- a/gateway-provider-rewrite/src/test/java/org/apache/hadoop/gateway/filter/rewrite/impl/gateway.xml
+++ b/gateway-provider-rewrite/src/test/java/org/apache/hadoop/gateway/filter/rewrite/impl/gateway.xml
@@ -17,7 +17,7 @@
-->
<gateway>
<resource>
- <role>NAMENODE</role>
+ <role>WEBHDFS</role>
<pattern>/namenode/api/v1/?**</pattern>
...
<filter>
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-provider-rewrite/src/test/java/org/apache/hadoop/gateway/filter/rewrite/impl/rewrite.xml
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite/src/test/java/org/apache/hadoop/gateway/filter/rewrite/impl/rewrite.xml b/gateway-provider-rewrite/src/test/java/org/apache/hadoop/gateway/filter/rewrite/impl/rewrite.xml
index a6ca78d..162251f 100644
--- a/gateway-provider-rewrite/src/test/java/org/apache/hadoop/gateway/filter/rewrite/impl/rewrite.xml
+++ b/gateway-provider-rewrite/src/test/java/org/apache/hadoop/gateway/filter/rewrite/impl/rewrite.xml
@@ -30,7 +30,7 @@
If no filter/rule specified it should attempt to filter everything like it did before.
-->
- <filter name="NAMENODE/request">
+ <filter name="WEBHDFS/request">
<content type="http/header">
<apply target="Location" rule="{rewrite-rule-name}"/>
<apply>...</apply>
@@ -77,18 +77,18 @@
ContentPathParser
-->
- <rule dir="IN" name="NAMENODE/hdfs/namenode/root/inbound" pattern="*://*:*/**/namenode/api/v1/?{**}">
+ <rule dir="IN" name="WEBHDFS/webhdfs/namenode/root/inbound" pattern="*://*:*/**/namenode/api/v1/?{**}">
<rewrite template="http://vm:50070/webhdfs/v1/?{**}"/>
</rule>
- <rule dir="IN" name="NAMENODE/hdfs/namenode/file/inbound" pattern="*://*:*/**/namenode/api/v1/{path=**}?{**}">
+ <rule dir="IN" name="WEBHDFS/webhdfs/namenode/file/inbound" pattern="*://*:*/**/namenode/api/v1/{path=**}?{**}">
<rewrite template="http://vm:50070/webhdfs/v1/{path=**}?{**}"/>
</rule>
- <rule dir="IN" name="NAMENODE/hdfs/datanode/inbound" pattern="*://*:*/**/datanode/api/v1/{path=**}?**">
+ <rule dir="IN" name="WEBHDFS/webhdfs/datanode/inbound" pattern="*://*:*/**/datanode/api/v1/{path=**}?**">
<decode-query/>
<match pattern="*://*:*/**/datanode/api/v1/{path=**}?{host}&{port}&{**}"/>
<rewrite template="http://{host}:{port}/{path=**}?{**}"/>
</rule>
- <rule dir="OUT" name="NAMENODE/hdfs/datanode/outbound" pattern="*://*:*/**?**">
+ <rule dir="OUT" name="WEBHDFS/webhdfs/datanode/outbound" pattern="*://*:*/**?**">
<match pattern="*://{host}:{port}/{path=**}?{**}"/>
<rewrite template="{gateway.url}/datanode/api/v1/{path=**}?{host}&{port}&{**}"/>
<encode-query/>
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-provider-rewrite/src/test/resources/org/apache/hadoop/gateway/filter/rewrite/api/UrlRewriteRulesDescriptorFactoryTest/filter-sample.xml
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite/src/test/resources/org/apache/hadoop/gateway/filter/rewrite/api/UrlRewriteRulesDescriptorFactoryTest/filter-sample.xml b/gateway-provider-rewrite/src/test/resources/org/apache/hadoop/gateway/filter/rewrite/api/UrlRewriteRulesDescriptorFactoryTest/filter-sample.xml
index 043ce1c..60da1d3 100644
--- a/gateway-provider-rewrite/src/test/resources/org/apache/hadoop/gateway/filter/rewrite/api/UrlRewriteRulesDescriptorFactoryTest/filter-sample.xml
+++ b/gateway-provider-rewrite/src/test/resources/org/apache/hadoop/gateway/filter/rewrite/api/UrlRewriteRulesDescriptorFactoryTest/filter-sample.xml
@@ -16,7 +16,7 @@
limitations under the License.
-->
<rules>
- <filter name="NAMENODE/request">
+ <filter name="WEBHDFS/request">
<content type="http/header">
<apply target="Location" rule="{rewrite-rule-name}"/>
<apply>...</apply>
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-release/home/INSTALL
----------------------------------------------------------------------
diff --git a/gateway-release/home/INSTALL b/gateway-release/home/INSTALL
index 9cab07b..f198704 100644
--- a/gateway-release/home/INSTALL
+++ b/gateway-release/home/INSTALL
@@ -71,7 +71,7 @@ Installation and Deployment Instructions
5. Configure the Gateway with the topology of your Hadoop cluster
a. Edit the file {GATEWAY_HOME}/deployments/sample.xml
b. Change the host and port in the urls of the <service> elements for
- NAMENODE, TEMPLETON and OOZIE services to match your Hadoop cluster
+ WEBHDFS, TEMPLETON and OOZIE services to match your Hadoop cluster
deployment.
c. The default configuration contains the LDAP URL for a LDAP server. By
default that file is configured to access the demo ApacheDS based LDAP
@@ -91,7 +91,7 @@ Installation and Deployment Instructions
6. Test the installation and configuration of your Gateway
Invoke the LISTSATUS operation on HDFS represented by your configured
- NAMENODE by using your web browser or curl:
+ WEBHDFS by using your web browser or curl:
curl -i -k -u hdfs:hdfs-password -X GET \
'https://localhost:8443/gateway/sample/namenode/api/v1/?op=LISTSTATUS'
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-release/home/conf/users.ldif
----------------------------------------------------------------------
diff --git a/gateway-release/home/conf/users.ldif b/gateway-release/home/conf/users.ldif
index 8c27746..a635656 100644
--- a/gateway-release/home/conf/users.ldif
+++ b/gateway-release/home/conf/users.ldif
@@ -43,4 +43,14 @@ objectclass:inetOrgPerson
cn: Hue
sn: Hue
uid: hue
-userPassword:hue-password
\ No newline at end of file
+userPassword:hue-password
+
+dn: uid=guest,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: Guest
+sn: Guest
+uid: guest
+userPassword:guest-password
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-release/home/deployments/sample.xml
----------------------------------------------------------------------
diff --git a/gateway-release/home/deployments/sample.xml b/gateway-release/home/deployments/sample.xml
index 2bd89b0..727c09c 100644
--- a/gateway-release/home/deployments/sample.xml
+++ b/gateway-release/home/deployments/sample.xml
@@ -58,6 +58,14 @@
<service>
<role>NAMENODE</role>
+ <url>hdfs://sandbox.hortonworks.com:8020</url>
+ </service>
+ <service>
+ <role>JOBTRACKER</role>
+ <url>rpc://sandbox.hortonworks.com:8050</url>
+ </service>
+ <service>
+ <role>WEBHDFS</role>
<url>http://localhost:50070/webhdfs/v1</url>
</service>
<service>
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-release/home/samples/ExampleOozieWorkflow.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/ExampleOozieWorkflow.groovy b/gateway-release/home/samples/ExampleOozieWorkflow.groovy
index ac83c13..5055b35 100644
--- a/gateway-release/home/samples/ExampleOozieWorkflow.groovy
+++ b/gateway-release/home/samples/ExampleOozieWorkflow.groovy
@@ -16,6 +16,7 @@
* limitations under the License.
*/
import com.jayway.jsonpath.JsonPath
+import groovy.json.JsonSlurper
import org.apache.hadoop.gateway.shell.Hadoop
import org.apache.hadoop.gateway.shell.hdfs.Hdfs
import org.apache.hadoop.gateway.shell.workflow.Workflow
@@ -23,11 +24,10 @@ import org.apache.hadoop.gateway.shell.workflow.Workflow
import static java.util.concurrent.TimeUnit.SECONDS
gateway = "https://localhost:8443/gateway/sample"
-jobTracker = "sandbox.hortonworks.com:8050"
-nameNode = "sandbox.hortonworks.com:8020"
-username = "hue"
-password = "hue-password"
+username = "guest"
+password = username + "-password"
inputFile = "LICENSE"
+jobDir = "/user/" + username + "/test"
jarFile = "samples/hadoop-examples.jar"
definition = """\
@@ -35,11 +35,11 @@ definition = """\
<start to="root-node"/>
<action name="root-node">
<java>
- <job-tracker>set-via-configuration-property</job-tracker>
- <name-node>set-via-configuration-property</name-node>
+ <job-tracker>\${jobTracker}</job-tracker>
+ <name-node>\${nameNode}</name-node>
<main-class>org.apache.hadoop.examples.WordCount</main-class>
- <arg>/tmp/test/input</arg>
- <arg>/tmp/test/output</arg>
+ <arg>$jobDir/input</arg>
+ <arg>$jobDir/output</arg>
</java>
<ok to="end"/>
<error to="fail"/>
@@ -54,44 +54,64 @@ definition = """\
configuration = """\
<configuration>
<property>
- <name>user.name</name>
- <value>$username</value>
+ <name>fs.default.name</name>
+ <value>default</value>
</property>
<property>
<name>nameNode</name>
- <value>hdfs://$nameNode</value>
+ <value>default</value>
+ </property>
+ <property>
+ <name>mapred.job.tracker</name>
+ <value>default</value>
</property>
<property>
<name>jobTracker</name>
- <value>$jobTracker</value>
+ <value>default</value>
+ </property>
+ <property>
+ <name>user.name</name>
+ <value>default</value>
+ </property>
+ <property>
+ <name>mapreduce.job.user.name</name>
+ <value>default</value>
</property>
<property>
<name>oozie.wf.application.path</name>
- <value>hdfs://$nameNode/tmp/test</value>
+ <value>$jobDir</value>
+ </property>
+ <property>
+ <name>oozie.libpath</name>
+ <value>\$jobDir/lib</value>
+ </property>
+ <property>
+ <name>oozie.proxysubmission</name>
+ <value>true</value>
</property>
</configuration>
"""
session = Hadoop.login( gateway, username, password )
-println "Delete /tmp/test " + Hdfs.rm( session ).file( "/tmp/test" ).recursive().now().statusCode
-println "Mkdir /tmp/test " + Hdfs.mkdir( session ).dir( "/tmp/test" ).now().statusCode
+println "Delete " + jobDir + " " + Hdfs.rm( session ).file( jobDir ).recursive().now().statusCode
+println "Mkdir " + jobDir + " " + Hdfs.mkdir( session ).dir( jobDir ).now().statusCode
-putWorkflow = Hdfs.put(session).text( definition ).to( "/tmp/test/workflow.xml" ).later() {
- println "Put /tmp/test/workflow.xml " + it.statusCode }
+putData = Hdfs.put(session).file( inputFile ).to( jobDir + "/input/FILE" ).later() {
+ println "Put " + jobDir + "/input/FILE " + it.statusCode }
-putData = Hdfs.put(session).file( inputFile ).to( "/tmp/test/input/FILE" ).later() {
- println "Put /tmp/test/input/FILE " + it.statusCode }
+putJar = Hdfs.put(session).file( jarFile ).to( jobDir + "/lib/hadoop-examples.jar" ).later() {
+ println "Put " + jobDir + "/lib/hadoop-examples.jar " + it.statusCode }
-putJar = Hdfs.put(session).file( jarFile ).to( "/tmp/test/lib/hadoop-examples.jar" ).later() {
- println "Put /tmp/test/lib/hadoop-examples.jar " + it.statusCode }
+putWorkflow = Hdfs.put(session).text( definition ).to( jobDir + "/workflow.xml" ).later() {
+ println "Put " + jobDir + "/workflow.xml " + it.statusCode }
session.waitFor( putWorkflow, putData, putJar )
jobId = Workflow.submit(session).text( configuration ).now().jobId
println "Submitted job " + jobId
-println "Polling for completion..."
+println "Polling up to 60s for completion..."
status = "UNKNOWN";
count = 0;
while( status != "SUCCEEDED" && count++ < 60 ) {
@@ -99,6 +119,10 @@ while( status != "SUCCEEDED" && count++ < 60 ) {
json = Workflow.status(session).jobId( jobId ).now().string
status = JsonPath.read( json, "\$.status" )
}
-println "Job status " + status;
+println "Job status " + status
+
+text = Hdfs.ls( session ).dir( jobDir ).now().string
+json = (new JsonSlurper()).parseText( text )
+println json.FileStatuses.FileStatus.pathSuffix
println "Shutdown " + session.shutdown( 10, SECONDS )
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-release/home/templates/sample.conf
----------------------------------------------------------------------
diff --git a/gateway-release/home/templates/sample.conf b/gateway-release/home/templates/sample.conf
index 7797f17..5d4085e 100644
--- a/gateway-release/home/templates/sample.conf
+++ b/gateway-release/home/templates/sample.conf
@@ -57,7 +57,7 @@
<value>hdfs=lmccay;kminder=admin</value>
</property>
<property>
- <name>topology.service.NAMENODE..url</name>
+ <name>topology.service.WEBHDFS..url</name>
<value>http://localhost:50070/webhdfs/v1</value>
</property>
<property>
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-release/home/templates/sample.xml
----------------------------------------------------------------------
diff --git a/gateway-release/home/templates/sample.xml b/gateway-release/home/templates/sample.xml
index 5c6891d..a84c9de 100644
--- a/gateway-release/home/templates/sample.xml
+++ b/gateway-release/home/templates/sample.xml
@@ -51,7 +51,7 @@
</gateway>
<service>
- <role>NAMENODE</role>
+ <role>WEBHDFS</role>
<url>http://localhost:50070/webhdfs/v1</url>
</service>
<service>
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-release/home/templates/topology.xml
----------------------------------------------------------------------
diff --git a/gateway-release/home/templates/topology.xml b/gateway-release/home/templates/topology.xml
index 41de7f7..8c6da85 100644
--- a/gateway-release/home/templates/topology.xml
+++ b/gateway-release/home/templates/topology.xml
@@ -51,7 +51,7 @@
</gateway>
<service>
- <role>NAMENODE</role>
+ <role>WEBHDFS</role>
<url>http://localhost:50070/webhdfs/v1</url>
</service>
<service>
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayServer.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayServer.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayServer.java
index 062b9ff..827ab88 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayServer.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayServer.java
@@ -24,10 +24,8 @@ import org.apache.commons.io.IOUtils;
import org.apache.hadoop.gateway.config.GatewayConfig;
import org.apache.hadoop.gateway.config.impl.GatewayConfigImpl;
import org.apache.hadoop.gateway.deploy.DeploymentFactory;
-import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor;
import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
import org.apache.hadoop.gateway.i18n.resources.ResourcesFactory;
-import org.apache.hadoop.gateway.services.DefaultGatewayServices;
import org.apache.hadoop.gateway.services.GatewayServices;
import org.apache.hadoop.gateway.services.ServiceLifecycleException;
import org.apache.hadoop.gateway.services.registry.ServiceRegistry;
@@ -227,11 +225,6 @@ public class GatewayServer {
private synchronized void start() throws Exception {
-// Map<String,String> params = new HashMap<String,String>();
-// params.put( GatewayConfigImpl.NAMENODE_ADDRESS, config.getNameNodeAddress() );
-// params.put( GatewayConfigImpl.TEMPLETON_ADDRESS, config.getTempletonAddress() );
-// params.put( GatewayConfigImpl.SHIRO_CONFIG_FILE, config.getShiroConfigFile() );
-
// Create the global context handler.
contexts = new ContextHandlerCollection();
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
index 8f55328..e56d2af 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
@@ -235,18 +235,6 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
return get( DEPLOYMENT_DIR, DEFAULT_DEPLOYMENT_DIR );
}
-// public String getShiroConfigFile() {
-// return get( SHIRO_CONFIG_FILE, DEFAULT_SHIRO_CONFIG_FILE );
-// }
-
-// public String getNameNodeAddress() {
-// return get( NAMENODE_ADDRESS, DEFAULT_NAMENODE_ADDRESS );
-// }
-
-// public String getTempletonAddress() {
-// return get( TEMPLETON_ADDRESS, DEFAULT_TEMPLETON_ADDRESS );
-// }
-
public InetSocketAddress getGatewayAddress() throws UnknownHostException {
String host = getGatewayHost();
int port = getGatewayPort();
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentFactory.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentFactory.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentFactory.java
index f9371ce..94a7ba3 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentFactory.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentFactory.java
@@ -253,7 +253,7 @@ public abstract class DeploymentFactory {
ServiceRegistry sr = (ServiceRegistry) gatewayServices.getService(GatewayServices.SERVICE_REGISTRY_SERVICE);
if (sr != null) {
String regCode = sr.getRegistrationCode(topology.getName());
- sr.registerService(regCode, topology.getName(), service.getRole(), service.getUrl().toExternalForm());
+ sr.registerService(regCode, topology.getName(), service.getRole(), service.getUrl() );
}
}
} catch( Exception e ) {
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/property/interpreter/ServicePropertyInterpreter.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/property/interpreter/ServicePropertyInterpreter.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/property/interpreter/ServicePropertyInterpreter.java
index b416d78..4175bf4 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/property/interpreter/ServicePropertyInterpreter.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/property/interpreter/ServicePropertyInterpreter.java
@@ -16,9 +16,6 @@
*/
package org.apache.hadoop.gateway.topology.builder.property.interpreter;
-import java.net.MalformedURLException;
-import java.net.URL;
-
import org.apache.hadoop.gateway.GatewayResources;
import org.apache.hadoop.gateway.i18n.resources.ResourcesFactory;
import org.apache.hadoop.gateway.topology.Service;
@@ -69,11 +66,7 @@ public class ServicePropertyInterpreter extends AbstractInterpreter {
}
if (SERVICE_URL.equalsIgnoreCase(nextToken)) {
- try {
- service.setUrl(new URL(value));
- } catch (MalformedURLException mue) {
- throw new InterpretException(mue);
- }
+ service.setUrl( value );
} else {
throw new InterpretException(gatewayResources.unsupportedPropertyTokenError(token));
}
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/builder/PropertyTopologyBuilderTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/builder/PropertyTopologyBuilderTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/builder/PropertyTopologyBuilderTest.java
index 35dbcbe..5d50ed2 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/builder/PropertyTopologyBuilderTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/builder/PropertyTopologyBuilderTest.java
@@ -126,21 +126,21 @@ public class PropertyTopologyBuilderTest {
@Test(expected = IllegalArgumentException.class)
public void testBuildFailedForWrongServiceToken2() {
PropertyTopologyBuilder propertyTopologyBuilder = new PropertyTopologyBuilder();
- propertyTopologyBuilder.addProperty(new Property("topology.gateway.service.NAMENODE", "value"));
+ propertyTopologyBuilder.addProperty(new Property("topology.gateway.service.WEBHDFS", "value"));
propertyTopologyBuilder.build();
}
@Test(expected = IllegalArgumentException.class)
public void testBuildFailedForWrongServiceToken3() {
PropertyTopologyBuilder propertyTopologyBuilder = new PropertyTopologyBuilder();
- propertyTopologyBuilder.addProperty(new Property("topology.gateway.service.NAMENODE.", "value"));
+ propertyTopologyBuilder.addProperty(new Property("topology.gateway.service.WEBHDFS.", "value"));
propertyTopologyBuilder.build();
}
@Test(expected = IllegalArgumentException.class)
public void testBuildSuccessfulForServiceProperty() {
PropertyTopologyBuilder propertyTopologyBuilder = new PropertyTopologyBuilder();
- propertyTopologyBuilder.addProperty(new Property("topology.gateway.service.NAMENODE..url", "http://host:50070/webhdfs/v1"));
+ propertyTopologyBuilder.addProperty(new Property("topology.gateway.service.WEBHDFS..url", "http://host:50070/webhdfs/v1"));
Topology topology = propertyTopologyBuilder.build();
assertThat(topology, notNullValue());
@@ -149,7 +149,7 @@ public class PropertyTopologyBuilderTest {
@Test(expected = IllegalArgumentException.class)
public void testBuildFailedForWrongServiceProperty() {
PropertyTopologyBuilder propertyTopologyBuilder = new PropertyTopologyBuilder();
- propertyTopologyBuilder.addProperty(new Property("topology.gateway.service.NAMENODE..miss_prop", "value"));
+ propertyTopologyBuilder.addProperty(new Property("topology.gateway.service.WEBHDFS..miss_prop", "value"));
propertyTopologyBuilder.build();
}
}
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest.java
index 162166c..7b88aa9 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest.java
@@ -30,12 +30,13 @@ import org.xml.sax.SAXException;
import java.io.File;
import java.io.IOException;
+import java.net.URISyntaxException;
import java.net.URL;
import static org.apache.commons.digester3.binder.DigesterLoader.newLoader;
import static org.hamcrest.core.Is.is;
-import static org.hamcrest.core.IsNull.nullValue;
import static org.hamcrest.core.IsNull.notNullValue;
+import static org.hamcrest.core.IsNull.nullValue;
import static org.junit.Assert.assertThat;
public class TopologyRulesModuleTest {
@@ -52,7 +53,7 @@ public class TopologyRulesModuleTest {
}
@Test
- public void testParseSimpleTopologyXmlInKnoxFormat() throws IOException, SAXException {
+ public void testParseSimpleTopologyXmlInKnoxFormat() throws IOException, SAXException, URISyntaxException {
Digester digester = loader.newDigester();
String name = "org/apache/hadoop/gateway/topology/xml/simple-topology-knox-format.xml";
URL url = ClassLoader.getSystemResource( name );
@@ -69,8 +70,8 @@ public class TopologyRulesModuleTest {
Service comp = topology.getServices().iterator().next();
assertThat( comp, notNullValue() );
- assertThat( comp.getRole(), is( "NAMENODE" ) );
- assertThat( comp.getUrl(), is( new URL( "http://host:80/webhdfs/v1" ) ) );
+ assertThat( comp.getRole(), is( "WEBHDFS" ) );
+ assertThat( comp.getUrl(), is( "http://host:80/webhdfs/v1" ) );
Provider provider = topology.getProviders().iterator().next();
assertThat( provider, notNullValue() );
@@ -80,7 +81,7 @@ public class TopologyRulesModuleTest {
}
@Test
- public void testParseSimpleTopologyXmlInHadoopFormat() throws IOException, SAXException {
+ public void testParseSimpleTopologyXmlInHadoopFormat() throws IOException, SAXException, URISyntaxException {
Digester digester = loader.newDigester();
String name = "org/apache/hadoop/gateway/topology/xml/simple-topology-ambari-format.conf";
URL url = ClassLoader.getSystemResource( name );
@@ -96,29 +97,29 @@ public class TopologyRulesModuleTest {
assertThat( topology.getServices().size(), is( 4 ) );
assertThat( topology.getProviders().size(), is( 2 ) );
- Service namenodeService = topology.getService( "NAMENODE", null );
+ Service namenodeService = topology.getService( "WEBHDFS", null );
assertThat( namenodeService, notNullValue() );
- assertThat( namenodeService.getRole(), is( "NAMENODE" ) );
+ assertThat( namenodeService.getRole(), is( "WEBHDFS" ) );
assertThat( namenodeService.getName(), nullValue() );
- assertThat( namenodeService.getUrl(), is( new URL( "http://host:50070/webhdfs/v1" ) ) );
+ assertThat( namenodeService.getUrl(), is( "http://host:50070/webhdfs/v1" ) );
Service templetonService = topology.getService( "TEMPLETON", null );
assertThat( templetonService, notNullValue() );
assertThat( templetonService.getRole(), is( "TEMPLETON" ) );
assertThat( templetonService.getName(), nullValue() );
- assertThat( templetonService.getUrl(), is( new URL( "http://host:50111/templeton/v1" ) ) );
+ assertThat( templetonService.getUrl(), is( "http://host:50111/templeton/v1" ) );
Service oozieService = topology.getService( "OOZIE", null );
assertThat( oozieService, notNullValue() );
assertThat( oozieService.getRole(), is( "OOZIE" ) );
assertThat( oozieService.getName(), nullValue() );
- assertThat( oozieService.getUrl(), is( new URL( "http://host:11000/oozie" ) ) );
+ assertThat( oozieService.getUrl(), is( "http://host:11000/oozie" ) );
Service hiveService = topology.getService( "HIVE", null );
assertThat( hiveService, notNullValue() );
assertThat( hiveService.getRole(), is( "HIVE" ) );
assertThat( hiveService.getName(), nullValue() );
- assertThat( hiveService.getUrl(), is( new URL( "http://host:10000" ) ) );
+ assertThat( hiveService.getUrl(), is( "http://host:10000" ) );
Provider authenticationProvider = topology.getProvider( "authentication", "ShiroProvider" );
assertThat( authenticationProvider, notNullValue() );
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/topology-one.xml
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/topology-one.xml b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/topology-one.xml
index 3054837..a231a1a 100644
--- a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/topology-one.xml
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/topology-one.xml
@@ -6,7 +6,7 @@
<security></security>
-->
<service>
- <role>NAMENODE</role>
+ <role>WEBHDFS</role>
<url>http://host-one:80/webhdfs/v1</url>
</service>
</topology>
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/topology-three.xml
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/topology-three.xml b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/topology-three.xml
index 5306c18..488c618 100644
--- a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/topology-three.xml
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/topology-three.xml
@@ -6,7 +6,7 @@
<security></security>
-->
<service>
- <role>NAMENODE</role>
+ <role>WEBHDFS</role>
<url>http://host-three:80/webhdfs/v1</url>
</service>
</topology>
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/topology-two.xml
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/topology-two.xml b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/topology-two.xml
index f12e9ac..0529599 100644
--- a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/topology-two.xml
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/topology-two.xml
@@ -6,7 +6,7 @@
<security></security>
-->
<service>
- <role>NAMENODE</role>
+ <role>WEBHDFS</role>
<url>http://host-two:80/webhdfs/v1</url>
</service>
</topology>
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-ambari-format.conf
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-ambari-format.conf b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-ambari-format.conf
index a112b80..51b1073 100644
--- a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-ambari-format.conf
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-ambari-format.conf
@@ -57,7 +57,7 @@
<value>hdfs=lmccay;kminder=admin</value>
</property>
<property>
- <name>topology.service.NAMENODE..url</name>
+ <name>topology.service.WEBHDFS..url</name>
<value>http://host:50070/webhdfs/v1</value>
</property>
<property>
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-knox-format.xml
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-knox-format.xml b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-knox-format.xml
index 09bb39c..8a6f0ad 100644
--- a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-knox-format.xml
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-knox-format.xml
@@ -62,7 +62,7 @@
</gateway>
<service>
- <role>NAMENODE</role>
+ <role>WEBHDFS</role>
<url>http://host:80/webhdfs/v1</url>
</service>
</topology>
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HbaseDeploymentContributor.java
----------------------------------------------------------------------
diff --git a/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HbaseDeploymentContributor.java b/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HbaseDeploymentContributor.java
index c467fdf..8243a9f 100644
--- a/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HbaseDeploymentContributor.java
+++ b/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HbaseDeploymentContributor.java
@@ -33,7 +33,7 @@ import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
-public class HbaseDeploymentContributor extends ServiceDeploymentContributorBase {
+public class HBaseDeploymentContributor extends ServiceDeploymentContributorBase {
private static final String EXTERNAL_PATH = "/hbase/api/v1";
private static final String CLUSTER_URL_FUNCTION = "{gateway.url}";
@@ -64,13 +64,13 @@ public class HbaseDeploymentContributor extends ServiceDeploymentContributorBase
.directions( "inbound" )
.pattern( "*://*:*/**" + EXTERNAL_PATH + "/?{**}" );
rewrite = rule.addStep( "rewrite" );
- rewrite.template( service.getUrl().toExternalForm() + "/?{**}" );
+ rewrite.template( service.getUrl() + "/?{**}" );
rule = rules.addRule( getQualifiedName() + "/root/inbound" )
.directions( "inbound" )
.pattern( "*://*:*/**" + EXTERNAL_PATH + "/{**}?{**}" );
rewrite = rule.addStep( "rewrite" );
- rewrite.template( service.getUrl().toExternalForm() + "/{**}?{**}" );
+ rewrite.template( service.getUrl() + "/{**}?{**}" );
rule = rules.addRule( getQualifiedName() + "/hbase/outbound" )
.directions( "outbound" );
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-service-hbase/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
----------------------------------------------------------------------
diff --git a/gateway-service-hbase/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor b/gateway-service-hbase/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
index 3b95831..5fc751f 100644
--- a/gateway-service-hbase/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
+++ b/gateway-service-hbase/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
@@ -16,4 +16,4 @@
# limitations under the License.
##########################################################################
-org.apache.hadoop.gateway.hbase.HbaseDeploymentContributor
+org.apache.hadoop.gateway.hbase.HBaseDeploymentContributor
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-service-hdfs/src/main/java/org/apache/hadoop/gateway/hdfs/HdfsDeploymentContributor.java
----------------------------------------------------------------------
diff --git a/gateway-service-hdfs/src/main/java/org/apache/hadoop/gateway/hdfs/HdfsDeploymentContributor.java b/gateway-service-hdfs/src/main/java/org/apache/hadoop/gateway/hdfs/HdfsDeploymentContributor.java
deleted file mode 100644
index 76f5dfb..0000000
--- a/gateway-service-hdfs/src/main/java/org/apache/hadoop/gateway/hdfs/HdfsDeploymentContributor.java
+++ /dev/null
@@ -1,146 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.hdfs;
-
-import org.apache.hadoop.gateway.deploy.DeploymentContext;
-import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributorBase;
-import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
-import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFilterContentDescriptor;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFilterDescriptor;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRuleDescriptor;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptor;
-import org.apache.hadoop.gateway.filter.rewrite.ext.UrlRewriteActionRewriteDescriptorExt;
-import org.apache.hadoop.gateway.filter.rewrite.ext.UrlRewriteMatchDescriptor;
-import org.apache.hadoop.gateway.topology.Service;
-
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.List;
-
-public class HdfsDeploymentContributor extends ServiceDeploymentContributorBase {
-
- private static final String NAMENODE_EXTERNAL_PATH = "/namenode/api/v1";
- private static final String DATANODE_INTERNAL_PATH = "/webhdfs/v1";
- private static final String DATANODE_EXTERNAL_PATH = "/datanode/api/v1";
- private static final String CLUSTER_URL_FUNCTION = "{gateway.url}";
-
- @Override
- public String getRole() {
- return "NAMENODE";
- }
-
- @Override
- public String getName() {
- return "hdfs";
- }
-
- @Override
- public void contributeService( DeploymentContext context, Service service ) throws Exception {
- contributeRewriteRules( context, service );
- contributeNameNodeResource( context, service );
- contributeDataNodeResource( context, service );
- }
-
- private void contributeRewriteRules( DeploymentContext context, Service service ) throws URISyntaxException {
- UrlRewriteRulesDescriptor rules = context.getDescriptor( "rewrite" );
- UrlRewriteRuleDescriptor rule;
- UrlRewriteActionRewriteDescriptorExt rewrite;
- UrlRewriteMatchDescriptor match;
-
- rule = rules.addRule( getQualifiedName() + "/namenode/root/inbound" )
- .directions( "inbound" )
- .pattern( "*://*:*/**" + NAMENODE_EXTERNAL_PATH + "/?{**}" );
- rewrite = rule.addStep( "rewrite" );
- //rewrite.template( service.getUrl().toExternalForm() + "/?user.name={$username}&{**}" );
- rewrite.template( service.getUrl().toExternalForm() + "/?{**}" );
-
- rule = rules.addRule( getQualifiedName() + "/namenode/file/inbound" )
- .directions( "inbound" )
- .pattern( "*://*:*/**" + NAMENODE_EXTERNAL_PATH + "/{path=**}?{**}" );
- rewrite = rule.addStep( "rewrite" );
- //rewrite.template( service.getUrl().toExternalForm() + "/{path=**}?user.name={$username}&{**}" );
- rewrite.template( service.getUrl().toExternalForm() + "/{path=**}?{**}" );
-
- rule = rules.addRule( getQualifiedName() + "/datanode/inbound" )
- .directions( "inbound" )
- .pattern( "*://*:*/**" + DATANODE_EXTERNAL_PATH + "/{path=**}?**" );
- //TODO: If the input type is wrong it throws a NPE.
- rule.addStep( "decode-query" );
- match = rule.addStep( "match" );
- match.pattern( "*://*:*/**" + DATANODE_EXTERNAL_PATH + "/{path=**}?{host}&{port}&{**}" );
- rewrite = rule.addStep( "rewrite" );
- rewrite.template( "http://{host}:{port}/{path=**}?{**}" );
-
- rule = rules.addRule( getQualifiedName() + "/datanode/outbound" )
- .directions( "outbound" );
- match = rule.addStep( "match" );
- match.pattern( "*://{host}:{port}/{path=**}?{**}" );
- rewrite = rule.addStep( "rewrite" );
- rewrite.template( CLUSTER_URL_FUNCTION + DATANODE_EXTERNAL_PATH + "/{path=**}?host={$hostmap(host)}&{port}&{**}" );
- rule.addStep( "encode-query" );
-
- UrlRewriteFilterDescriptor filter = rules.addFilter( getQualifiedName() + "/outbound" );
- UrlRewriteFilterContentDescriptor content = filter.addContent( "application/x-http-headers" );
- content.addApply( "Location", getQualifiedName() + "/datanode/outbound" );
- }
-
- public void contributeNameNodeResource( DeploymentContext context, Service service ) throws URISyntaxException {
- ResourceDescriptor rootResource = context.getGatewayDescriptor().addResource();
- rootResource.role( service.getRole() );
- rootResource.pattern( NAMENODE_EXTERNAL_PATH + "/?**" );
- addAuthenticationFilter( context, service, rootResource );
- addRewriteFilter( context, service, rootResource );
- addIdentityAssertionFilter( context, service, rootResource );
- addAuthorizationFilter( context, service, rootResource );
- addDispatchFilter( context, service, rootResource, "dispatch", null );
-
- ResourceDescriptor fileResource = context.getGatewayDescriptor().addResource();
- fileResource.role( service.getRole() );
- fileResource.pattern( NAMENODE_EXTERNAL_PATH + "/**?**" );
- addAuthenticationFilter( context, service, fileResource );
- addRewriteFilter( context, service, fileResource );
- addIdentityAssertionFilter( context, service, fileResource );
- addAuthorizationFilter( context, service, fileResource );
- addDispatchFilter( context, service, fileResource, "dispatch", null );
- }
-
- public void contributeDataNodeResource( DeploymentContext context, Service service ) throws URISyntaxException {
- ResourceDescriptor fileResource = context.getGatewayDescriptor().addResource();
- fileResource.role( service.getRole() );
- fileResource.pattern( DATANODE_EXTERNAL_PATH + "/**?**" );
- addAuthenticationFilter( context, service, fileResource );
- addIdentityAssertionFilter( context, service, fileResource );
- addAuthorizationFilter( context, service, fileResource );
- addRewriteFilter( context, service, fileResource );
- addDispatchFilter( context, service, fileResource, "dispatch", null );
- }
-
- private void addRewriteFilter(
- DeploymentContext context, Service service, ResourceDescriptor resource ) throws URISyntaxException {
- List<FilterParamDescriptor> params = new ArrayList<FilterParamDescriptor>();
- params.add( resource.createFilterParam().name( "response.headers" ).value( getQualifiedName() + "/outbound" ) );
- context.contributeFilter( service, resource, "rewrite", null, params );
-
- }
-
- private String getQualifiedName() {
- return getRole() + "/" + getName();
- }
-
-}
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-service-hdfs/src/main/java/org/apache/hadoop/gateway/hdfs/JobTrackerDeploymentContributor.java
----------------------------------------------------------------------
diff --git a/gateway-service-hdfs/src/main/java/org/apache/hadoop/gateway/hdfs/JobTrackerDeploymentContributor.java b/gateway-service-hdfs/src/main/java/org/apache/hadoop/gateway/hdfs/JobTrackerDeploymentContributor.java
new file mode 100644
index 0000000..d982592
--- /dev/null
+++ b/gateway-service-hdfs/src/main/java/org/apache/hadoop/gateway/hdfs/JobTrackerDeploymentContributor.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.hdfs;
+
+import org.apache.hadoop.gateway.deploy.DeploymentContext;
+import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributorBase;
+import org.apache.hadoop.gateway.topology.Service;
+
+public class JobTrackerDeploymentContributor extends ServiceDeploymentContributorBase {
+
+ @Override
+ public String getRole() {
+ return "JOBTRACKER";
+ }
+
+ @Override
+ public String getName() {
+ return "jobtracker";
+ }
+
+ @Override
+ public void contributeService( DeploymentContext context, Service service ) throws Exception {
+ // NoOp
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-service-hdfs/src/main/java/org/apache/hadoop/gateway/hdfs/NameNodeDeploymentContributor.java
----------------------------------------------------------------------
diff --git a/gateway-service-hdfs/src/main/java/org/apache/hadoop/gateway/hdfs/NameNodeDeploymentContributor.java b/gateway-service-hdfs/src/main/java/org/apache/hadoop/gateway/hdfs/NameNodeDeploymentContributor.java
new file mode 100644
index 0000000..6203bb0
--- /dev/null
+++ b/gateway-service-hdfs/src/main/java/org/apache/hadoop/gateway/hdfs/NameNodeDeploymentContributor.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.hdfs;
+
+import org.apache.hadoop.gateway.deploy.DeploymentContext;
+import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributorBase;
+import org.apache.hadoop.gateway.topology.Service;
+
+public class NameNodeDeploymentContributor extends ServiceDeploymentContributorBase {
+
+ @Override
+ public String getRole() {
+ return "NAMENODE";
+ }
+
+ @Override
+ public String getName() {
+ return "namenode";
+ }
+
+ @Override
+ public void contributeService( DeploymentContext context, Service service ) throws Exception {
+ // NoOp
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-service-hdfs/src/main/java/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor.java
----------------------------------------------------------------------
diff --git a/gateway-service-hdfs/src/main/java/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor.java b/gateway-service-hdfs/src/main/java/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor.java
new file mode 100644
index 0000000..ff85835
--- /dev/null
+++ b/gateway-service-hdfs/src/main/java/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor.java
@@ -0,0 +1,145 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.hdfs;
+
+import org.apache.hadoop.gateway.deploy.DeploymentContext;
+import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributorBase;
+import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
+import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
+import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFilterContentDescriptor;
+import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFilterDescriptor;
+import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRuleDescriptor;
+import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptor;
+import org.apache.hadoop.gateway.filter.rewrite.ext.UrlRewriteActionRewriteDescriptorExt;
+import org.apache.hadoop.gateway.filter.rewrite.ext.UrlRewriteMatchDescriptor;
+import org.apache.hadoop.gateway.topology.Service;
+
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.List;
+
+public class WebHdfsDeploymentContributor extends ServiceDeploymentContributorBase {
+
+ private static final String WEBHDFS_EXTERNAL_PATH = "/namenode/api/v1";
+ private static final String DATANODE_INTERNAL_PATH = "/webhdfs/v1";
+ private static final String DATANODE_EXTERNAL_PATH = "/datanode/api/v1";
+ private static final String CLUSTER_URL_FUNCTION = "{gateway.url}";
+
+ @Override
+ public String getRole() {
+ return "WEBHDFS";
+ }
+
+ @Override
+ public String getName() {
+ return "webhdfs";
+ }
+
+ @Override
+ public void contributeService( DeploymentContext context, Service service ) throws Exception {
+ contributeRewriteRules( context, service );
+ contributeNameNodeResource( context, service );
+ contributeDataNodeResource( context, service );
+ }
+
+ private void contributeRewriteRules( DeploymentContext context, Service service ) throws URISyntaxException {
+ UrlRewriteRulesDescriptor rules = context.getDescriptor( "rewrite" );
+ UrlRewriteRuleDescriptor rule;
+ UrlRewriteActionRewriteDescriptorExt rewrite;
+ UrlRewriteMatchDescriptor match;
+
+ rule = rules.addRule( getQualifiedName() + "/namenode/root/inbound" )
+ .directions( "inbound" )
+ .pattern( "*://*:*/**" + WEBHDFS_EXTERNAL_PATH + "/?{**}" );
+ rewrite = rule.addStep( "rewrite" );
+ //rewrite.template( service.getUrl().toExternalForm() + "/?user.name={$username}&{**}" );
+ rewrite.template( service.getUrl() + "/?{**}" );
+
+ rule = rules.addRule( getQualifiedName() + "/namenode/file/inbound" )
+ .directions( "inbound" )
+ .pattern( "*://*:*/**" + WEBHDFS_EXTERNAL_PATH + "/{path=**}?{**}" );
+ rewrite = rule.addStep( "rewrite" );
+ //rewrite.template( service.getUrl().toExternalForm() + "/{path=**}?user.name={$username}&{**}" );
+ rewrite.template( service.getUrl() + "/{path=**}?{**}" );
+
+ rule = rules.addRule( getQualifiedName() + "/datanode/inbound" )
+ .directions( "inbound" )
+ .pattern( "*://*:*/**" + DATANODE_EXTERNAL_PATH + "/{path=**}?**" );
+ //TODO: If the input type is wrong it throws a NPE.
+ rule.addStep( "decode-query" );
+ match = rule.addStep( "match" );
+ match.pattern( "*://*:*/**" + DATANODE_EXTERNAL_PATH + "/{path=**}?{host}&{port}&{**}" );
+ rewrite = rule.addStep( "rewrite" );
+ rewrite.template( "http://{host}:{port}/{path=**}?{**}" );
+
+ rule = rules.addRule( getQualifiedName() + "/datanode/outbound" )
+ .directions( "outbound" );
+ match = rule.addStep( "match" );
+ match.pattern( "*://{host}:{port}/{path=**}?{**}" );
+ rewrite = rule.addStep( "rewrite" );
+ rewrite.template( CLUSTER_URL_FUNCTION + DATANODE_EXTERNAL_PATH + "/{path=**}?host={$hostmap(host)}&{port}&{**}" );
+ rule.addStep( "encode-query" );
+
+ UrlRewriteFilterDescriptor filter = rules.addFilter( getQualifiedName() + "/outbound" );
+ UrlRewriteFilterContentDescriptor content = filter.addContent( "application/x-http-headers" );
+ content.addApply( "Location", getQualifiedName() + "/datanode/outbound" );
+ }
+
+ public void contributeNameNodeResource( DeploymentContext context, Service service ) throws URISyntaxException {
+ ResourceDescriptor rootResource = context.getGatewayDescriptor().addResource();
+ rootResource.role( service.getRole() );
+ rootResource.pattern( WEBHDFS_EXTERNAL_PATH + "/?**" );
+ addAuthenticationFilter( context, service, rootResource );
+ addRewriteFilter( context, service, rootResource );
+ addIdentityAssertionFilter( context, service, rootResource );
+ addAuthorizationFilter( context, service, rootResource );
+ addDispatchFilter( context, service, rootResource, "dispatch", null );
+
+ ResourceDescriptor fileResource = context.getGatewayDescriptor().addResource();
+ fileResource.role( service.getRole() );
+ fileResource.pattern( WEBHDFS_EXTERNAL_PATH + "/**?**" );
+ addAuthenticationFilter( context, service, fileResource );
+ addRewriteFilter( context, service, fileResource );
+ addIdentityAssertionFilter( context, service, fileResource );
+ addAuthorizationFilter( context, service, fileResource );
+ addDispatchFilter( context, service, fileResource, "dispatch", null );
+ }
+
+ public void contributeDataNodeResource( DeploymentContext context, Service service ) throws URISyntaxException {
+ ResourceDescriptor fileResource = context.getGatewayDescriptor().addResource();
+ fileResource.role( service.getRole() );
+ fileResource.pattern( DATANODE_EXTERNAL_PATH + "/**?**" );
+ addAuthenticationFilter( context, service, fileResource );
+ addIdentityAssertionFilter( context, service, fileResource );
+ addAuthorizationFilter( context, service, fileResource );
+ addRewriteFilter( context, service, fileResource );
+ addDispatchFilter( context, service, fileResource, "dispatch", null );
+ }
+
+ private void addRewriteFilter(
+ DeploymentContext context, Service service, ResourceDescriptor resource ) throws URISyntaxException {
+ List<FilterParamDescriptor> params = new ArrayList<FilterParamDescriptor>();
+ params.add( resource.createFilterParam().name( "response.headers" ).value( getQualifiedName() + "/outbound" ) );
+ context.contributeFilter( service, resource, "rewrite", null, params );
+ }
+
+ private String getQualifiedName() {
+ return getRole() + "/" + getName();
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-service-hdfs/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
----------------------------------------------------------------------
diff --git a/gateway-service-hdfs/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor b/gateway-service-hdfs/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
index 70cffb5..0a3d6ee 100644
--- a/gateway-service-hdfs/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
+++ b/gateway-service-hdfs/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
@@ -16,4 +16,6 @@
# limitations under the License.
##########################################################################
-org.apache.hadoop.gateway.hdfs.HdfsDeploymentContributor
\ No newline at end of file
+org.apache.hadoop.gateway.hdfs.WebHdfsDeploymentContributor
+org.apache.hadoop.gateway.hdfs.NameNodeDeploymentContributor
+org.apache.hadoop.gateway.hdfs.JobTrackerDeploymentContributor
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDeploymentContributor.java
----------------------------------------------------------------------
diff --git a/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDeploymentContributor.java b/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDeploymentContributor.java
index d735481..898392d 100644
--- a/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDeploymentContributor.java
+++ b/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDeploymentContributor.java
@@ -58,7 +58,7 @@ public class HiveDeploymentContributor extends ServiceDeploymentContributorBase
.directions( "inbound" )
.pattern( "*://*:*/**" + EXTERNAL_PATH );
rewrite = rule.addStep( "rewrite" );
- rewrite.template( service.getUrl().toExternalForm() );
+ rewrite.template( service.getUrl() );
}
public void contributeResources( DeploymentContext context, Service service ) throws URISyntaxException {
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-service-oozie/src/main/java/org/apache/hadoop/gateway/oozie/OozieDeploymentContributor.java
----------------------------------------------------------------------
diff --git a/gateway-service-oozie/src/main/java/org/apache/hadoop/gateway/oozie/OozieDeploymentContributor.java b/gateway-service-oozie/src/main/java/org/apache/hadoop/gateway/oozie/OozieDeploymentContributor.java
index d685757..3f5f212 100644
--- a/gateway-service-oozie/src/main/java/org/apache/hadoop/gateway/oozie/OozieDeploymentContributor.java
+++ b/gateway-service-oozie/src/main/java/org/apache/hadoop/gateway/oozie/OozieDeploymentContributor.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.gateway.oozie;
import org.apache.hadoop.gateway.deploy.DeploymentContext;
import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributorBase;
+import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptor;
import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptorFactory;
@@ -29,6 +30,8 @@ import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.List;
public class OozieDeploymentContributor extends ServiceDeploymentContributorBase {
@@ -51,50 +54,10 @@ public class OozieDeploymentContributor extends ServiceDeploymentContributorBase
contributeResources( context, service );
}
- UrlRewriteRulesDescriptor loadRulesFromTemplate() throws IOException {
-// VelocityEngine engine = new VelocityEngine();
-// engine.setProperty( RuntimeConstants.RESOURCE_LOADER, "classpath" );
-// engine.setProperty( "classpath.resource.loader.class", ClasspathResourceLoader.class.getName() );
-// engine.init();
-// VelocityContext context = new VelocityContext();
-// //context.put( "name", new String( "Velocity" ) );
-// Template template = engine.getTemplate( OozieDeploymentContributor.class.getName().replace( '.', '/' ) + "/rewrite.xml.vm");
-// StringWriter writer = new StringWriter();
-// template.merge( context, writer );
-// String string = writer.toString();
- InputStream stream = this.getClass().getClassLoader().getResourceAsStream( RULES_RESOURCE );
- Reader reader = new InputStreamReader( stream );
- UrlRewriteRulesDescriptor rules = UrlRewriteRulesDescriptorFactory.load( "xml", reader );
- reader.close();
- stream.close();
- return rules;
- }
-
private void contributeRewriteRules( DeploymentContext context, Service service ) throws URISyntaxException, IOException {
UrlRewriteRulesDescriptor oozieRules = loadRulesFromTemplate();
UrlRewriteRulesDescriptor clusterRules = context.getDescriptor( "rewrite" );
clusterRules.addRules( oozieRules );
-// UrlRewriteRuleDescriptor rule;
-// UrlRewriteActionRewriteDescriptorExt rewrite;
-//
-// String prefix = getRole() + "/" + getName();
-//
-// rule = clusterRules.addRule( prefix + "/root/inbound" )
-// .directions( "inbound" )
-// .pattern( "*://*:*/**" + EXTERNAL_PATH + "/{**}?{**}" );
-// rewrite = rule.addStep( "rewrite" );
-// rewrite.template( service.getUrl().toExternalForm() + "/{**}?{**}" );
-//
-// rule = clusterRules.addRule( prefix + "/api/inbound" )
-// .directions( "inbound" )
-// .pattern( "*://*:*/**" + EXTERNAL_PATH + "/v1/{**}?{**}" );
-// rewrite = rule.addStep( "rewrite" );
-// rewrite.template( service.getUrl().toExternalForm() + "/v1/{**}?{**}" );
-//
-// UrlRewriteFilterDescriptor filter;
-// UrlRewriteFilterContentDescriptor content;
-// UrlRewriteFilterBufferDescriptor buffer;
-// UrlRewriteFilterDetectDescriptor detect;
}
public void contributeResources( DeploymentContext context, Service service ) throws URISyntaxException {
@@ -107,23 +70,43 @@ public class OozieDeploymentContributor extends ServiceDeploymentContributorBase
addAuthorizationFilter(context, service, rootResource);
addDispatchFilter( context, service, rootResource );
- ResourceDescriptor apiResource = context.getGatewayDescriptor().addResource();
- apiResource.role( service.getRole() );
- apiResource.pattern( EXTERNAL_PATH + "/v1/**?**" );
- addAuthenticationFilter( context, service, apiResource );
- addRewriteFilter( context, service, apiResource );
- addIdentityAssertionFilter( context, service, apiResource );
- addAuthorizationFilter(context, service, apiResource);
- addDispatchFilter( context, service, apiResource );
+ ResourceDescriptor v1Resource = context.getGatewayDescriptor().addResource();
+ v1Resource.role( service.getRole() );
+ v1Resource.pattern( EXTERNAL_PATH + "/v1/**?**" );
+ addAuthenticationFilter( context, service, v1Resource );
+ addRewriteFilter( context, service, v1Resource );
+ addIdentityAssertionFilter( context, service, v1Resource );
+ addAuthorizationFilter(context, service, v1Resource);
+ addDispatchFilter( context, service, v1Resource );
+
+ ResourceDescriptor v2Resource = context.getGatewayDescriptor().addResource();
+ v2Resource.role( service.getRole() );
+ v2Resource.pattern( EXTERNAL_PATH + "/v2/**?**" );
+ addAuthenticationFilter( context, service, v2Resource );
+ addRewriteFilter( context, service, v2Resource );
+ addIdentityAssertionFilter( context, service, v2Resource );
+ addAuthorizationFilter(context, service, v2Resource);
+ addDispatchFilter( context, service, v2Resource );
}
private void addRewriteFilter(
DeploymentContext context, Service service, ResourceDescriptor resource ) throws URISyntaxException {
- context.contributeFilter( service, resource, "rewrite", null, null );
+ List<FilterParamDescriptor> params = new ArrayList<FilterParamDescriptor>();
+ params.add( resource.createFilterParam().name( "request.body" ).value( "OOZIE/oozie/configuration" ) );
+ context.contributeFilter( service, resource, "rewrite", null, params );
}
private void addDispatchFilter(DeploymentContext context, Service service, ResourceDescriptor resource ) {
context.contributeFilter( service, resource, "dispatch", null, null );
}
+ UrlRewriteRulesDescriptor loadRulesFromTemplate() throws IOException {
+ InputStream stream = this.getClass().getClassLoader().getResourceAsStream( RULES_RESOURCE );
+ Reader reader = new InputStreamReader( stream );
+ UrlRewriteRulesDescriptor rules = UrlRewriteRulesDescriptorFactory.load( "xml", reader );
+ reader.close();
+ stream.close();
+ return rules;
+ }
+
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-service-oozie/src/main/resources/org/apache/hadoop/gateway/oozie/OozieDeploymentContributor/rewrite.xml
----------------------------------------------------------------------
diff --git a/gateway-service-oozie/src/main/resources/org/apache/hadoop/gateway/oozie/OozieDeploymentContributor/rewrite.xml b/gateway-service-oozie/src/main/resources/org/apache/hadoop/gateway/oozie/OozieDeploymentContributor/rewrite.xml
index a4cb3b9..5f8bbb2 100644
--- a/gateway-service-oozie/src/main/resources/org/apache/hadoop/gateway/oozie/OozieDeploymentContributor/rewrite.xml
+++ b/gateway-service-oozie/src/main/resources/org/apache/hadoop/gateway/oozie/OozieDeploymentContributor/rewrite.xml
@@ -29,7 +29,7 @@
</rule>
<rule name="OOZIE/oozie/name-node-url">
- <rewrite template="hdfs://{$serviceAddr[NAMENODE-RPC]}"/>
+ <rewrite template="hdfs://{$serviceAddr[NAMENODE]}"/>
</rule>
<rule name="OOZIE/oozie/job-tracker-address">
@@ -38,7 +38,7 @@
<rule name="OOZIE/oozie/hdfs-path">
<match pattern="{path=**}"/>
- <rewrite template="hdfs://{$serviceHost[NAMENODE-RPC]}:{$servicePort[NAMENODE-RPC]}/{path=**}"/>
+ <rewrite template="hdfs://{$serviceHost[NAMENODE]}:{$servicePort[NAMENODE]}/{path=**}"/>
</rule>
<filter name="OOZIE/oozie/configuration">
@@ -53,16 +53,52 @@
<detect path="name" value="jobTracker">
<apply path="value" rule="OOZIE/oozie/job-tracker-address"/>
</detect>
+ <detect path="name" value="fs.default.name">
+ <apply path="value" rule="OOZIE/oozie/name-node-url"/>
+ </detect>
+ <detect path="name" value="fs.defaultFS">
+ <apply path="value" rule="OOZIE/oozie/name-node-url"/>
+ </detect>
<detect path="name" value="oozie.wf.application.path">
<apply path="value" rule="OOZIE/oozie/hdfs-path"/>
</detect>
+ <detect path="name" value="oozie.coord.application.path">
+ <apply path="value" rule="OOZIE/oozie/hdfs-path"/>
+ </detect>
+ <detect path="name" value="oozie.bundle.application.path">
+ <apply path="value" rule="OOZIE/oozie/hdfs-path"/>
+ </detect>
+ <detect path="name" value="oozie.libpath">
+ <apply path="value" rule="OOZIE/oozie/hdfs-path"/>
+ </detect>
+ <detect path="name" value="mapreduce.job.user.name">
+ <apply path="value" rule="OOZIE/oozie/user-name"/>
+ </detect>
+ <detect path="name" value="mapred.job.tracker">
+ <apply path="value" rule="OOZIE/oozie/job-tracker-address"/>
+ </detect>
+ <detect path="name" value="mapred.input.dir">
+ <apply path="value" rule="OOZIE/oozie/hdfs-path"/>
+ </detect>
+ <detect path="name" value="mapred.output.dir">
+ <apply path="value" rule="OOZIE/oozie/hdfs-path"/>
+ </detect>
</buffer>
</content>
<content type="*/json">
<apply path="$[user.name]" rule="OOZIE/oozie/user-name"/>
<apply path="$[nameNode]" rule="OOZIE/oozie/name-node-url"/>
<apply path="$[jobTracker]" rule="OOZIE/oozie/job-tracker-addr"/>
+ <apply path="$[fs.default.name]" rule="OOZIE/oozie/name-node-url"/>
+ <apply path="$[fs.defaultFS]" rule="OOZIE/oozie/name-node-url"/>
<apply path="$[oozie.wf.application.path]" rule="OOZIE/oozie/hdfs-path"/>
+ <apply path="$[oozie.coord.application.path]" rule="OOZIE/oozie/hdfs-path"/>
+ <apply path="$[oozie.bundle.application.path]" rule="OOZIE/oozie/hdfs-path"/>
+ <apply path="$[oozie.libpath]" rule="OOZIE/oozie/hdfs-path"/>
+ <apply path="$[mapreduce.job.user.name]" rule="OOZIE/oozie/user-name"/>
+ <apply path="$[mapred.job.tracker]" rule="OOZIE/oozie/job-tracker-address"/>
+ <apply path="$[mapred.input.dir]" rule="OOZIE/oozie/hdfs-path"/>
+ <apply path="$[mapred.output.dir]" rule="OOZIE/oozie/hdfs-path"/>
</content>
</filter>
http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/3f1ede53/gateway-service-oozie/src/test/java/org/apache/hadoop/gateway/oozie/OozieDeploymentContributorTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-oozie/src/test/java/org/apache/hadoop/gateway/oozie/OozieDeploymentContributorTest.java b/gateway-service-oozie/src/test/java/org/apache/hadoop/gateway/oozie/OozieDeploymentContributorTest.java
index 2741a06..8b64e9a 100644
--- a/gateway-service-oozie/src/test/java/org/apache/hadoop/gateway/oozie/OozieDeploymentContributorTest.java
+++ b/gateway-service-oozie/src/test/java/org/apache/hadoop/gateway/oozie/OozieDeploymentContributorTest.java
@@ -30,7 +30,7 @@ import org.junit.Ignore;
import org.junit.Test;
import java.io.IOException;
-import java.net.URL;
+import java.net.URISyntaxException;
import java.util.Iterator;
import java.util.ServiceLoader;
@@ -55,9 +55,8 @@ public class OozieDeploymentContributorTest {
fail( "Failed to find " + OozieDeploymentContributor.class.getName() + " via service loader." );
}
-
@Test
- public void testLoadRulesFromTemplate() throws IOException {
+ public void testLoadRulesFromTemplate() throws IOException, URISyntaxException {
WebArchive webArchive = ShrinkWrap.create( WebArchive.class, "test-archive" );
UrlRewriteRulesDescriptorImpl clusterRules = new UrlRewriteRulesDescriptorImpl();
@@ -69,7 +68,7 @@ public class OozieDeploymentContributorTest {
Service service = EasyMock.createNiceMock( Service.class );
EasyMock.expect( service.getRole() ).andReturn( "OOZIE" ).anyTimes();
EasyMock.expect( service.getName() ).andReturn( null ).anyTimes();
- EasyMock.expect( service.getUrl() ).andReturn( new URL( "http://test-host:777" ) ).anyTimes();
+ EasyMock.expect( service.getUrl() ).andReturn( "http://test-host:777" ).anyTimes();
EasyMock.replay( context, service );
@@ -95,7 +94,7 @@ public class OozieDeploymentContributorTest {
Service service = EasyMock.createNiceMock( Service.class );
EasyMock.expect( service.getRole() ).andReturn( "OOZIE" ).anyTimes();
EasyMock.expect( service.getName() ).andReturn( null ).anyTimes();
- EasyMock.expect( service.getUrl() ).andReturn( new URL( "http://test-host:777" ) ).anyTimes();
+ EasyMock.expect( service.getUrl() ).andReturn( "http://test-host:777" ).anyTimes();
EasyMock.replay( context, service );