You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@knox.apache.org by lm...@apache.org on 2014/03/19 05:16:42 UTC

[2/2] git commit: KNOX-314 refactor dispatch and add hive specific one

KNOX-314 refactor dispatch and add hive specific one

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/367e849f
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/367e849f
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/367e849f

Branch: refs/heads/master
Commit: 367e849f31e3194212c1131bc70dd1649c5da542
Parents: 4d45d05
Author: Larry McCay <lm...@hortonworks.com>
Authored: Wed Mar 19 00:14:35 2014 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Wed Mar 19 00:14:35 2014 -0400

----------------------------------------------------------------------
 .../gateway/dispatch/AppCookieManager.java      | 199 -----
 .../dispatch/CappedBufferHttpEntity.java        | 152 ----
 .../gateway/dispatch/HttpClientDispatch.java    | 287 -------
 .../gateway/dispatch/AppCookieManagerTest.java  |  53 --
 .../dispatch/CappedBufferHttpEntityTest.java    | 854 -------------------
 .../dispatch/HttpClientDispatchTest.java        | 122 ---
 .../hbase/HBaseDeploymentContributor.java       |   2 +-
 gateway-service-hive/pom.xml                    |  10 +-
 .../gateway/hive/HiveDeploymentContributor.java |   2 +-
 .../hive/HiveDispatchDeploymentContributor.java |  70 ++
 .../gateway/hive/HiveHttpClientDispatch.java    |  79 ++
 ...gateway.deploy.ProviderDeploymentContributor |  19 +
 .../HiveDispatchDeploymentContributorTest.java  |  44 +
 .../webhcat/WebHCatDeploymentContributor.java   |   4 +-
 gateway-spi/pom.xml                             |  11 +
 .../apache/hadoop/gateway/GatewayMessages.java  |  51 ++
 .../apache/hadoop/gateway/GatewayResources.java |  35 +
 .../ServiceDeploymentContributorBase.java       |   3 +
 .../gateway/dispatch/AppCookieManager.java      | 199 +++++
 .../dispatch/CappedBufferHttpEntity.java        | 152 ++++
 .../gateway/dispatch/HttpClientDispatch.java    | 297 +++++++
 .../gateway/dispatch/AppCookieManagerTest.java  |  53 ++
 .../dispatch/CappedBufferHttpEntityTest.java    | 854 +++++++++++++++++++
 .../dispatch/HttpClientDispatchTest.java        | 121 +++
 pom.xml                                         |   2 +-
 25 files changed, 2002 insertions(+), 1673 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/367e849f/gateway-server/src/main/java/org/apache/hadoop/gateway/dispatch/AppCookieManager.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/dispatch/AppCookieManager.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/dispatch/AppCookieManager.java
deleted file mode 100644
index a316f3c..0000000
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/dispatch/AppCookieManager.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.dispatch;
-
-import java.io.IOException;
-import java.net.URI;
-import java.security.Principal;
-
-import org.apache.hadoop.gateway.GatewayMessages;
-import org.apache.hadoop.gateway.audit.api.Action;
-import org.apache.hadoop.gateway.audit.api.ActionOutcome;
-import org.apache.hadoop.gateway.audit.api.AuditServiceFactory;
-import org.apache.hadoop.gateway.audit.api.Auditor;
-import org.apache.hadoop.gateway.audit.api.ResourceType;
-import org.apache.hadoop.gateway.audit.log4j.audit.AuditConstants;
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.http.Header;
-import org.apache.http.HeaderElement;
-import org.apache.http.HttpEntity;
-import org.apache.http.HttpHost;
-import org.apache.http.HttpRequest;
-import org.apache.http.HttpResponse;
-import org.apache.http.auth.AuthScope;
-import org.apache.http.auth.Credentials;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.methods.HttpOptions;
-import org.apache.http.client.methods.HttpUriRequest;
-import org.apache.http.client.params.AuthPolicy;
-import org.apache.http.impl.auth.SPNegoSchemeFactory;
-import org.apache.http.impl.client.DefaultHttpClient;
-
-/**
- * Handles SPNego authentication as a client of hadoop service, caches
- * hadoop.auth cookie returned by hadoop service on successful SPNego
- * authentication. Refreshes hadoop.auth cookie on demand if the cookie has
- * expired.
- * 
- */
-public class AppCookieManager {
-
-  static final String HADOOP_AUTH = "hadoop.auth";
-  private static final String HADOOP_AUTH_EQ = "hadoop.auth=";
-  private static final String SET_COOKIE = "Set-Cookie";
-
-  private static GatewayMessages LOG = MessagesFactory.get(GatewayMessages.class);
-  private static Auditor auditor = AuditServiceFactory.getAuditService().getAuditor( AuditConstants.DEFAULT_AUDITOR_NAME,
-          AuditConstants.KNOX_SERVICE_NAME, AuditConstants.KNOX_COMPONENT_NAME );
-  private static final EmptyJaasCredentials EMPTY_JAAS_CREDENTIALS = new EmptyJaasCredentials();
-
-  String appCookie;
-
-  /**
-   * Utility method to excerise AppCookieManager directly
-   * @param args element 0 of args should be a URL to hadoop service protected by SPengo
-   * @throws IOException in case of errors
-   */
-  public static void main(String[] args) throws IOException {
-    HttpUriRequest outboundRequest = new HttpGet(args[0]);
-    new AppCookieManager().getAppCookie(outboundRequest, false);
-  }
-
-  public AppCookieManager() {
-  }
-
-  /**
-   * Fetches hadoop.auth cookie from hadoop service authenticating using SpNego
-   * 
-   * @param outboundRequest
-   *          out going request
-   * @param refresh
-   *          flag indicating whether to refresh the cached cookie
-   * @return hadoop.auth cookie from hadoop service authenticating using SpNego
-   * @throws IOException
-   *           in case of errors
-   */
-  public String getAppCookie(HttpUriRequest outboundRequest, boolean refresh)
-      throws IOException {
-
-    URI uri = outboundRequest.getURI();
-    String scheme = uri.getScheme();
-    String host = uri.getHost();
-    int port = uri.getPort();
-    String path = uri.getPath();
-    if (!refresh) {
-      if (appCookie != null) {
-        return appCookie;
-      }
-    }
-
-    DefaultHttpClient client = new DefaultHttpClient();
-    SPNegoSchemeFactory spNegoSF = new SPNegoSchemeFactory(
-    /* stripPort */true);
-    // spNegoSF.setSpengoGenerator(new BouncySpnegoTokenGenerator());
-    client.getAuthSchemes().register(AuthPolicy.SPNEGO, spNegoSF);
-    client.getCredentialsProvider().setCredentials(
-        new AuthScope(/* host */null, /* port */-1, /* realm */null),
-        EMPTY_JAAS_CREDENTIALS);
-
-    clearAppCookie();
-    String hadoopAuthCookie = null;
-    HttpResponse httpResponse = null;
-    try {
-      HttpHost httpHost = new HttpHost(host, port, scheme);
-      HttpRequest httpRequest = new HttpOptions(path);
-      httpResponse = client.execute(httpHost, httpRequest);
-      Header[] headers = httpResponse.getHeaders(SET_COOKIE);
-      hadoopAuthCookie = getHadoopAuthCookieValue(headers);
-      if (hadoopAuthCookie == null) {
-        LOG.failedSPNegoAuthn(uri.toString());
-        auditor.audit( Action.AUTHENTICATION, uri.toString(), ResourceType.URI, ActionOutcome.FAILURE );
-        throw new IOException(
-            "SPNego authn failed, can not get hadoop.auth cookie");
-      }
-    } finally {
-      if (httpResponse != null) {
-        HttpEntity entity = httpResponse.getEntity();
-        if (entity != null) {
-          entity.getContent().close();
-        }
-      }
-
-    }
-    LOG.successfulSPNegoAuthn(uri.toString());
-    auditor.audit( Action.AUTHENTICATION, uri.toString(), ResourceType.URI, ActionOutcome.SUCCESS);
-    hadoopAuthCookie = HADOOP_AUTH_EQ + quote(hadoopAuthCookie);
-    setAppCookie(hadoopAuthCookie);
-    return appCookie;
-  }
-
-  /**
-   * Returns the cached app cookie
-   * 
-   * @return the cached app cookie, can be null
-   */
-  public String getCachedAppCookie() {
-    return appCookie;
-  }
-  
-  private void setAppCookie(String appCookie) {
-    this.appCookie = appCookie;
-  }
-
-  private void clearAppCookie() {
-    appCookie = null;
-  }
-  
-  static String quote(String s) {
-    return s == null ? s : "\"" + s + "\"";
-  }
-
-  static String getHadoopAuthCookieValue(Header[] headers) {
-    if (headers == null) {
-      return null;
-    }
-    for (Header header : headers) {
-      HeaderElement[] elements = header.getElements();
-      for (HeaderElement element : elements) {
-        String cookieName = element.getName();
-        if (cookieName.equals(HADOOP_AUTH)) {
-          if (element.getValue() != null) {
-            String trimmedVal = element.getValue().trim();
-            if (!trimmedVal.isEmpty()) {
-              return trimmedVal;
-            }
-          }
-        }
-      }
-    }
-    return null;
-  }
-
-  private static class EmptyJaasCredentials implements Credentials {
-
-    public String getPassword() {
-      return null;
-    }
-
-    public Principal getUserPrincipal() {
-      return null;
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/367e849f/gateway-server/src/main/java/org/apache/hadoop/gateway/dispatch/CappedBufferHttpEntity.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/dispatch/CappedBufferHttpEntity.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/dispatch/CappedBufferHttpEntity.java
deleted file mode 100644
index d5f7563..0000000
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/dispatch/CappedBufferHttpEntity.java
+++ /dev/null
@@ -1,152 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.dispatch;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.http.HttpEntity;
-import org.apache.http.annotation.NotThreadSafe;
-import org.apache.http.entity.HttpEntityWrapper;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
-@NotThreadSafe
-public class CappedBufferHttpEntity extends HttpEntityWrapper {
-
-  public static final int DEFAULT_BUFFER_SIZE = 4096;
-
-  private int replayWriteIndex;
-  private int replayWriteLimit;
-  private byte[] replayBuffer;
-  private InputStream wrappedStream;
-
-  public CappedBufferHttpEntity( final HttpEntity entity, int bufferSize ) throws IOException {
-    super( entity );
-    this.wrappedStream = null;
-    this.replayWriteIndex = -1;
-    if( !entity.isRepeatable() ) {
-      this.replayBuffer = new byte[ bufferSize ];
-      this.replayWriteLimit = bufferSize-1;
-    } else {
-      this.replayBuffer = null;
-    }
-  }
-
-  public CappedBufferHttpEntity( final HttpEntity entity ) throws IOException {
-    this( entity, DEFAULT_BUFFER_SIZE );
-  }
-
-  @Override
-  public boolean isRepeatable() {
-    return true;
-  }
-
-  @Override
-  public boolean isStreaming() {
-    return wrappedEntity.isStreaming();
-  }
-
-  @Override
-  public boolean isChunked() {
-    return wrappedEntity.isChunked();
-  }
-
-  @Override
-  public long getContentLength() {
-    return wrappedEntity.getContentLength();
-  }
-
-  // This will throw an IOException if an attempt is made to getContent a second time after
-  // more bytes than the buffer can hold has been read on the first stream.
-  @Override
-  public InputStream getContent() throws IOException {
-    // If the wrapped stream is repeatable return it directly.
-    if( replayBuffer == null ) {
-      return wrappedEntity.getContent();
-    // Else if the buffer has overflowed
-    } else {
-      if( wrappedStream == null ) {
-         wrappedStream = wrappedEntity.getContent();
-      }
-      return new ReplayStream();
-    }
-  }
-
-  @Override
-  public void writeTo( final OutputStream stream ) throws IOException {
-    IOUtils.copy( getContent(), stream );
-  }
-
-  @Override
-  public void consumeContent() throws IOException {
-    throw new UnsupportedOperationException();
-  }
-
-  private class ReplayStream extends InputStream {
-
-    private int replayReadIndex = -1;
-
-    @Override
-    public int read() throws IOException {
-      int b;
-      // If we can read from the buffer do so.
-      if( replayReadIndex < replayWriteIndex ) {
-        b = replayBuffer[ ++replayReadIndex ];
-      } else {
-        b = wrappedStream.read();
-        // If the underlying stream is not closed.
-        if( b > -1 ) {
-          if( replayWriteIndex < replayWriteLimit ) {
-            replayBuffer[ ++replayWriteIndex ] = (byte)b;
-            replayReadIndex++;
-          } else {
-            throw new IOException("Hit replay buffer max limit");
-          }
-        }
-      }
-      return b;
-    }
-
-    public int read( byte buffer[], int offset, int limit ) throws IOException {
-      int count = -1;
-      // If we can read from the buffer do so.
-      if( replayReadIndex < replayWriteIndex ) {
-        count = replayWriteIndex - replayReadIndex;
-        count = Math.min( limit, count );
-        System.arraycopy( replayBuffer, replayReadIndex+1, buffer, offset, count );
-        replayReadIndex += count;
-      } else {
-        count = wrappedStream.read( buffer, offset, limit );
-        // If the underlying stream is not closed.
-        if( count > -1 ) {
-          if( replayWriteIndex+count < replayWriteLimit ) {
-            System.arraycopy( buffer, offset, replayBuffer, replayWriteIndex+1, count );
-            replayReadIndex += count;
-            replayWriteIndex += count;
-          } else {
-            throw new IOException("Hit replay buffer max limit");
-          }
-        }
-      }
-      return count;
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/367e849f/gateway-server/src/main/java/org/apache/hadoop/gateway/dispatch/HttpClientDispatch.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/dispatch/HttpClientDispatch.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/dispatch/HttpClientDispatch.java
deleted file mode 100644
index 0e0ffe8..0000000
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/dispatch/HttpClientDispatch.java
+++ /dev/null
@@ -1,287 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.dispatch;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.URI;
-import java.net.URISyntaxException;
-
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.hadoop.gateway.GatewayMessages;
-import org.apache.hadoop.gateway.GatewayResources;
-import org.apache.hadoop.gateway.audit.api.Action;
-import org.apache.hadoop.gateway.audit.api.ActionOutcome;
-import org.apache.hadoop.gateway.audit.api.AuditServiceFactory;
-import org.apache.hadoop.gateway.audit.api.Auditor;
-import org.apache.hadoop.gateway.audit.api.ResourceType;
-import org.apache.hadoop.gateway.audit.log4j.audit.AuditConstants;
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.hadoop.gateway.i18n.resources.ResourcesFactory;
-import org.apache.http.Header;
-import org.apache.http.HttpEntity;
-import org.apache.http.HttpResponse;
-import org.apache.http.HttpStatus;
-import org.apache.http.client.ClientProtocolException;
-import org.apache.http.client.methods.HttpDelete;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.methods.HttpOptions;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.client.methods.HttpPut;
-import org.apache.http.client.methods.HttpUriRequest;
-import org.apache.http.entity.ContentType;
-import org.apache.http.entity.InputStreamEntity;
-import org.apache.http.impl.client.DefaultHttpClient;
-import org.apache.http.message.BasicHeader;
-
-/**
- *
- */
-public class HttpClientDispatch extends AbstractGatewayDispatch {
-  
-  private static final String REPLAY_BUFFER_SIZE = "replayBufferSize";
-  
-  // private static final String CT_APP_WWW_FORM_URL_ENCODED = "application/x-www-form-urlencoded";
-  // private static final String CT_APP_XML = "application/xml";
-  private static final String Q_DELEGATION_EQ = "?delegation=";
-  private static final String AMP_DELEGATION_EQ = "&delegation=";
-  private static final String COOKIE = "Cookie";
-  private static final String SET_COOKIE = "Set-Cookie";
-  private static final String WWW_AUTHENTICATE = "WWW-Authenticate";
-  private static final String NEGOTIATE = "Negotiate";
-
-  private static GatewayMessages LOG = MessagesFactory.get( GatewayMessages.class );
-  private static GatewayResources RES = ResourcesFactory.get( GatewayResources.class );
-  private static Auditor auditor = AuditServiceFactory.getAuditService().getAuditor( AuditConstants.DEFAULT_AUDITOR_NAME,
-          AuditConstants.KNOX_SERVICE_NAME, AuditConstants.KNOX_COMPONENT_NAME );
-  private static final int DEFAULT_REPLAY_BUFFER_SIZE =  4 * 1024; // 4K
-
-  private AppCookieManager appCookieManager = new AppCookieManager();
-  
-  private static final String REPLAY_BUFFER_SIZE_PARAM = "replayBufferSize";
-  
-  private int replayBufferSize = 0;
-  
-  @Override
-  public void init( FilterConfig filterConfig ) throws ServletException {
-    super.init(filterConfig);
-    String replayBufferSizeString = filterConfig.getInitParameter( REPLAY_BUFFER_SIZE_PARAM );
-    if ( replayBufferSizeString != null ) {
-      setReplayBufferSize(Integer.valueOf(replayBufferSizeString));
-    }
-  }
-  
-  protected void executeRequest(
-      HttpUriRequest outboundRequest,
-      HttpServletRequest inboundRequest,
-      HttpServletResponse outboundResponse )
-          throws IOException {
-    LOG.dispatchRequest( outboundRequest.getMethod(), outboundRequest.getURI() );
-    DefaultHttpClient client = new DefaultHttpClient();
-
-    HttpResponse inboundResponse = null;
-    try {
-      String query = outboundRequest.getURI().getQuery();
-      if (!"true".equals(System.getProperty(GatewayConfig.HADOOP_KERBEROS_SECURED))) {
-        // Hadoop cluster not Kerberos enabled
-        inboundResponse = client.execute(outboundRequest);
-      } else if (query.contains(Q_DELEGATION_EQ) ||
-        // query string carries delegation token
-        query.contains(AMP_DELEGATION_EQ)) {
-        inboundResponse = client.execute(outboundRequest);
-      } else { 
-        // Kerberos secured, no delegation token in query string
-        inboundResponse = executeKerberosDispatch(outboundRequest, client);
-      }
-    } catch (IOException e) {
-      // we do not want to expose back end host. port end points to clients, see JIRA KNOX-58
-      LOG.dispatchServiceConnectionException( outboundRequest.getURI(), e );
-      auditor.audit( Action.DISPATCH, outboundRequest.getURI().toString(), ResourceType.URI, ActionOutcome.FAILURE );
-      throw new IOException( RES.dispatchConnectionError() );
-    } finally {
-      if (inboundResponse != null) {
-        int statusCode = inboundResponse.getStatusLine().getStatusCode();
-        if( statusCode != 201 ) {
-          LOG.dispatchResponseStatusCode( statusCode );
-        } else {
-          Header location = inboundResponse.getFirstHeader( "Location" );
-          if( location == null ) {
-            LOG.dispatchResponseStatusCode( statusCode );
-          } else {
-            LOG.dispatchResponseCreatedStatusCode( statusCode, location.getValue() );
-          }
-        }
-        auditor.audit( Action.DISPATCH, outboundRequest.getURI().toString(), ResourceType.URI, ActionOutcome.SUCCESS, RES.responseStatus( statusCode ) );
-      } else {
-        auditor.audit( Action.DISPATCH, outboundRequest.getURI().toString(), ResourceType.URI, ActionOutcome.UNAVAILABLE );
-      }
-      
-    }
-
-    // Copy the client respond header to the server respond.
-    outboundResponse.setStatus( inboundResponse.getStatusLine().getStatusCode() );
-    Header[] headers = inboundResponse.getAllHeaders();
-    for( Header header : headers ) {
-      String name = header.getName();
-      if (name.equals(SET_COOKIE) || name.equals(WWW_AUTHENTICATE)) {
-        continue;
-      }
-      String value = header.getValue();
-      outboundResponse.addHeader( name, value );
-    }
-
-    HttpEntity entity = inboundResponse.getEntity();
-    if( entity != null ) {
-      Header contentType = entity.getContentType();
-      if( contentType != null ) {
-        outboundResponse.setContentType( contentType.getValue() );
-      }
-//KM[ If this is set here it ends up setting the content length to the content returned from the server.
-// This length might not match if the the content is rewritten.
-//      long contentLength = entity.getContentLength();
-//      if( contentLength <= Integer.MAX_VALUE ) {
-//        outboundResponse.setContentLength( (int)contentLength );
-//      }
-//]
-      writeResponse( inboundRequest, outboundResponse, entity.getContent() );
-    }
-  }
-
-  private HttpResponse executeKerberosDispatch(HttpUriRequest outboundRequest,
-      DefaultHttpClient client) throws IOException, ClientProtocolException {
-    HttpResponse inboundResponse;
-    outboundRequest.removeHeaders(COOKIE);
-    String appCookie = appCookieManager.getCachedAppCookie();
-    if (appCookie != null) {
-      outboundRequest.addHeader(new BasicHeader(COOKIE, appCookie));
-    }
-    inboundResponse = client.execute(outboundRequest);
-    // if inBoundResponse has status 401 and header WWW-Authenticate: Negoitate
-    // refresh hadoop.auth.cookie and attempt one more time
-    int statusCode = inboundResponse.getStatusLine().getStatusCode();
-    if (statusCode == HttpStatus.SC_UNAUTHORIZED ) {
-      Header[] wwwAuthHeaders = inboundResponse.getHeaders(WWW_AUTHENTICATE) ;
-      if (wwwAuthHeaders != null && wwwAuthHeaders.length != 0 && 
-          wwwAuthHeaders[0].getValue().trim().startsWith(NEGOTIATE)) {
-        appCookie = appCookieManager.getAppCookie(outboundRequest, true);
-        outboundRequest.removeHeaders(COOKIE);
-        outboundRequest.addHeader(new BasicHeader(COOKIE, appCookie));
-        client = new DefaultHttpClient();
-        inboundResponse = client.execute(outboundRequest);
-      } else {
-        // no supported authentication type found
-        // we would let the original response propogate
-      }
-    } else {
-      // not a 401 Unauthorized status code
-      // we would let the original response propogate
-    }
-    return inboundResponse;
-  }
-
-  protected HttpEntity createRequestEntity(HttpServletRequest request)
-      throws IOException {
-
-    String contentType = request.getContentType();
-    int contentLength = request.getContentLength();
-    InputStream contentStream = request.getInputStream();
-
-    HttpEntity entity;
-    if( contentType == null ) {
-      entity = new InputStreamEntity( contentStream, contentLength );
-    } else {
-      entity = new InputStreamEntity(contentStream, contentLength, ContentType.parse( contentType ) );
-    }
-
- 
-    if ("true".equals(System.getProperty(GatewayConfig.HADOOP_KERBEROS_SECURED))) {  
-   
-      //Check if delegation token is supplied in the request
-      boolean delegationTokenPresent = false;
-      String queryString = request.getQueryString();
-      if (queryString != null) {
-        delegationTokenPresent = queryString.startsWith("delegation=") || 
-            queryString.contains("&delegation=");
-      }     
-      if (!delegationTokenPresent && getReplayBufferSize() > 0 ) {
-          entity = new CappedBufferHttpEntity( entity, getReplayBufferSize() * 1024 );
-      }
-    }
-
-    return entity;
-  }
-
-  @Override
-  public void doGet( URI url, HttpServletRequest request, HttpServletResponse response )
-      throws IOException, URISyntaxException {
-    HttpGet method = new HttpGet( url );
-    // https://issues.apache.org/jira/browse/KNOX-107 - Service URLs not rewritten for WebHDFS GET redirects
-    method.getParams().setBooleanParameter( "http.protocol.handle-redirects", false );
-    copyRequestHeaderFields( method, request );
-    executeRequest( method, request, response );
-  }
-
-  @Override
-  public void doOptions( URI url, HttpServletRequest request, HttpServletResponse response )
-      throws IOException, URISyntaxException {
-    HttpOptions method = new HttpOptions( url );
-    executeRequest( method, request, response );
-  }
-
-  @Override
-  public void doPut( URI url, HttpServletRequest request, HttpServletResponse response )
-      throws IOException, URISyntaxException {
-    HttpPut method = new HttpPut( url );
-    HttpEntity entity = createRequestEntity( request );
-    method.setEntity( entity );
-    copyRequestHeaderFields( method, request );
-    executeRequest( method, request, response );
-  }
-
-  @Override
-  public void doPost( URI url, HttpServletRequest request, HttpServletResponse response )
-      throws IOException, URISyntaxException {
-    HttpPost method = new HttpPost( url );
-    HttpEntity entity = createRequestEntity( request );
-    method.setEntity( entity );
-    copyRequestHeaderFields( method, request );
-    executeRequest( method, request, response );
-  }
-
-  @Override
-  public void doDelete( URI url, HttpServletRequest request, HttpServletResponse response )
-      throws IOException, URISyntaxException {
-    HttpDelete method = new HttpDelete( url );
-    copyRequestHeaderFields( method, request );
-    executeRequest( method, request, response );
-  }
-
-  int getReplayBufferSize() {
-    return replayBufferSize;
-  }
-  
-  void setReplayBufferSize(int size) {
-    replayBufferSize = size;
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/367e849f/gateway-server/src/test/java/org/apache/hadoop/gateway/dispatch/AppCookieManagerTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/dispatch/AppCookieManagerTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/dispatch/AppCookieManagerTest.java
deleted file mode 100644
index 94ef797..0000000
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/dispatch/AppCookieManagerTest.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.dispatch;
-
-
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-
-import org.apache.http.Header;
-import org.apache.http.message.BasicHeader;
-import org.junit.Test;
-
-public class AppCookieManagerTest {
-
-  @Test
-  public void getCachedAppCookie() {
-    assertNull(new AppCookieManager().getCachedAppCookie());
-  }
-
-  @Test
-  public void getHadoopAuthCookieValueWithNullHeaders() {
-    assertNull(AppCookieManager.getHadoopAuthCookieValue(null));
-  }
-  
-  @Test
-  public void getHadoopAuthCookieValueWitEmptylHeaders() {
-    assertNull(AppCookieManager.getHadoopAuthCookieValue(new Header[0]));
-  }
-  
-  @Test
-  public void getHadoopAuthCookieValueWithValidlHeaders() {
-    Header[] headers = new Header[1];
-    headers[0] = new BasicHeader("Set-Cookie", AppCookieManager.HADOOP_AUTH + "=dummyvalue");
-    assertNotNull(AppCookieManager.getHadoopAuthCookieValue(headers));
-  }
-  
-}
-

http://git-wip-us.apache.org/repos/asf/knox/blob/367e849f/gateway-server/src/test/java/org/apache/hadoop/gateway/dispatch/CappedBufferHttpEntityTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/dispatch/CappedBufferHttpEntityTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/dispatch/CappedBufferHttpEntityTest.java
deleted file mode 100644
index e558ea8..0000000
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/dispatch/CappedBufferHttpEntityTest.java
+++ /dev/null
@@ -1,854 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.dispatch;
-
-import org.apache.http.entity.BasicHttpEntity;
-import org.apache.http.entity.BufferedHttpEntity;
-import org.apache.http.entity.ContentType;
-import org.apache.http.entity.InputStreamEntity;
-import org.junit.Test;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.charset.Charset;
-
-import static org.hamcrest.CoreMatchers.nullValue;
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.hamcrest.core.Is.is;
-import static org.junit.Assert.fail;
-
-public class CappedBufferHttpEntityTest {
-
-  private static Charset UTF8 = Charset.forName( "UTF-8" );
-
-  // Variables
-  // Consumers: C1, C2
-  // Reads: FC - Full Content, PC - Partial Content, AC - Any Content
-  // Reads: IB - In Buffer, OB - Overflow Buffer
-  // Close: XC
-  // Expect: EE
-
-  // Test Cases
-  // C1 FC
-  //   C1 FC/IB.
-  //   C1 FC/OB.
-  //   C1 FC/IB; C2 FC.
-  //   C1 FC/OB; C2 AC; EE
-  //   C1 FC/IB; C1 XC; C2 FC.
-  //   C1 FC/OB; C1 XC; C2 AC; EE
-  // C1 PC
-  //   C1 PC/IB.
-  //   C1 PC/OB.
-  //   C1 PC/IB; C2 FC.
-  //   C1 PC/OB; C2 AC; EE
-  //   C1 PC/IB; C1 XC; C2 FC.
-  //   C1 PC/OB; C1 XC; C2 AC; EE
-  // C1 C2 C1
-  //   C1 PC/IB; C2 PC/IB; C1 PC/IB; C2 PC/IB - Back and forth before buffer overflow is OK.
-  //   C1 PC/IB; C2 PC/OB; C1 AC; EE
-
-  @Test
-  public void testS__C1_FC_IB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( "UTF-8" ) ) );
-    replay = new CappedBufferHttpEntity( basic, 20 );
-
-    String output;
-
-    output = byteRead( replay.getContent(), -1 );
-    assertThat( output, is( data ) );
-  }
-
-  @Test
-  public void testB__C1_FC_IB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( "UTF-8" ) ) );
-    replay = new CappedBufferHttpEntity( basic, 20 );
-
-    String output;
-
-    output = blockRead( replay.getContent(), UTF8, -1, 3 );
-    assertThat( output, is( data ) );
-  }
-
-  @Test
-  public void testS__C1_FC_OB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( "UTF-8" ) ) );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-
-    String output;
-
-    try {
-      output = byteRead( replay.getContent(), -1 );
-      fail("expected IOException");
-    } catch (IOException e) {
-      // expected
-    }
-  }
-
-  @Test
-  public void testB__C1_FC_OB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( "UTF-8" ) ) );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-
-    String output;
-
-    try {
-      output = blockRead( replay.getContent(), UTF8, -1, 3 );
-      fail("expected IOException");
-    } catch (IOException e) {
-      // expected
-    }
-  }
-
-  @Test
-  public void testS_C1_FC_IB__C2_FC_IB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( "UTF-8" ) ) );
-    replay = new CappedBufferHttpEntity( basic, 20 );
-
-    String output;
-
-    output = byteRead( replay.getContent(), -1 );
-    assertThat( output, is( data ) );
-
-    output = byteRead( replay.getContent(), -1 );
-    assertThat( output, is( data ) );
-  }
-
-  @Test
-  public void testB_C1_FC_IB__C2_FC_IB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( "UTF-8" ) ) );
-    replay = new CappedBufferHttpEntity( basic, 20 );
-
-    String output;
-
-    output = blockRead( replay.getContent(), UTF8, -1, 3 );
-    assertThat( output, is( data ) );
-
-    output = blockRead( replay.getContent(), UTF8, -1, 3 );
-    assertThat( output, is( data ) );
-  }
-
-  @Test
-  public void testS_C1_FC_OB__C2_AC__EE() throws Exception {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-
-    String output;
-
-    try {
-      output = byteRead( replay.getContent(), -1 );
-    fail( "Expected IOException" );
-    } catch( IOException e ) {
-     // Expected.
-   }
- 
-  }
-
-  @Test
-  public void testB_C1_FC_OB__C2_AC__EE() throws Exception {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-
-    String output;
-    try {
-      output = blockRead( replay.getContent(), UTF8, -1, 3 );
-      fail( "Expected IOException" );
-    } catch( IOException e ) {
-      // Expected.
-    }
-  }
-
-  //   C1 FC/IB; C1 XC; C2 FC.
-  @Test
-  public void testS_C1_FC_IB__C1_XC__C2_FC() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 20 );
-    stream = replay.getContent();
-    text = byteRead( stream, -1 );
-    assertThat( text, is( "0123456789" ) );
-    stream.close();
-
-    stream = replay.getContent();
-    text = byteRead( stream, -1 );
-    assertThat( text, is( "0123456789" ) );
-  }
-
-  //   C1 FC/IB; C1 XC; C2 FC.
-  @Test
-  public void testB_C1_FC_IB__C1_XC__C2_FC() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 20 );
-
-    stream = replay.getContent();
-    text = blockRead( stream, UTF8, -1, 3 );
-    assertThat( text, is( "0123456789" ) );
-    stream.close();
-
-    stream = replay.getContent();
-    text = blockRead( stream, UTF8, -1, 3 );
-    assertThat( text, is( "0123456789" ) );
-  }
-
-  //   C1 FC/OB; C1 XC; C2 AC; EE
-  @Test
-  public void testS_C1_FC_OB__C1_XC__C2_AC__EE() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-
-    stream = replay.getContent();
-    try {
-      text = byteRead( stream, -1 );
-      fail( "Expected IOException" );
-    } catch( IOException e ) {
-      // Expected.
-    }
-  }
-
-  //   C1 FC/OB; C1 XC; C2 AC; EE
-  @Test
-  public void testB_C1_FC_OB__C1_XC__C2_AC_EE() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-
-    stream = replay.getContent();
-    try {
-      text = blockRead( stream, UTF8, -1, 3 );
-      fail( "Expected IOException" );
-    } catch( IOException e ) {
-      // Expected.
-    }
-  }
-
-  //   C1 PC/IB.
-  @Test
-  public void testS_C1_PC_IB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 20 );
-
-    stream = replay.getContent();
-    text = byteRead( stream, 3 );
-    assertThat( text, is( "012" ) );
-  }
-
-  //   C1 PC/IB.
-  @Test
-  public void testB_C1_PC_IB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 20 );
-
-    stream = replay.getContent();
-    text = blockRead( stream, UTF8, 3, 3 );
-    assertThat( text, is( "012" ) );
-  }
-
-  //   C1 PC/OB.
-  @Test
-  public void testS_C1_PC_OB() throws IOException {
-
-    try {
-      String data = "0123456789";
-      BasicHttpEntity basic;
-      CappedBufferHttpEntity replay;
-      InputStream stream;
-      String text;
-
-      basic = new BasicHttpEntity();
-      basic.setContent(new ByteArrayInputStream(data.getBytes(UTF8)));
-      replay = new CappedBufferHttpEntity(basic, 5);
-      stream = replay.getContent();
-      text = byteRead(stream, -1);
-      fail("Expected IOException");
-      assertThat(text, is("0123456789"));
-      stream.close();
-    } catch (IOException e) {
-      // expected
-    }
-  }
-
-  //   C1 PC/OB.
-  @Test
-  public void testB_C1_PC_OB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-
-    stream = replay.getContent();
-    try {
-      text = blockRead( stream, UTF8, -1, 4 );
-      fail( "Expected IOException" );
-    } catch (IOException e) {
-      // expected
-    }
-  }
-
-  //   C1 PC/IB; C2 FC.
-  @Test
-  public void testS_C1_PC_IB__C2_FC() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 20 );
-
-    stream = replay.getContent();
-    text = byteRead( stream, 4 );
-    assertThat( text, is( "0123" ) );
-    stream.close();
-
-    stream = replay.getContent();
-    text = byteRead( stream, -1 );
-    assertThat( text, is( "0123456789" ) );
-  }
-
-  //   C1 PC/IB; C2 FC.
-  @Test
-  public void testB_C1_PC_IB__C2_FC() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 20 );
-
-    stream = replay.getContent();
-    text = blockRead( stream, UTF8, 4, 1 );
-    assertThat( text, is( "0123" ) );
-    stream.close();
-
-    stream = replay.getContent();
-    text = blockRead( stream, UTF8, -1, 7 );
-    assertThat( text, is( "0123456789" ) );
-  }
-
-  //   C1 PC/OB; C2 AC; EE
-  @Test
-  public void testS_C1_PC_OB__C2_AC__EE() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    try {
-      basic = new BasicHttpEntity();
-      basic.setContent(new ByteArrayInputStream(data.getBytes(UTF8)));
-      replay = new CappedBufferHttpEntity(basic, 5);
-
-      stream = replay.getContent();
-      text = byteRead(stream, 7);
-      assertThat(text, is("0123456"));
-      stream.close();
-      fail("Expected IOException");
-    } catch (IOException e) {
-      // Expected.
-    }
-  }
-
-  //   C1 PC/OB; C2 AC; EE
-  @Test
-  public void testB_C1_PC_OB__C2_AC__EE() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-
-    stream = replay.getContent();
-    try {
-      text = blockRead( stream, UTF8, 7, 2 );
-      fail("Expected IOExceptin");
-    } catch (IOException e) {
-      // expected
-    }
-  }
-
-  //   C1 PC/IB; C1 XC; C2 FC.
-  @Test
-  public void testS_C1_PC_IB__C1_XC__C2_FC() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 20 );
-
-    stream = replay.getContent();
-    text = byteRead( stream, 7 );
-    assertThat( text, is( "0123456" ) );
-    stream.close();
-
-    stream = replay.getContent();
-    text = byteRead( stream, -1 );
-    assertThat( text, is( "0123456789" ) );
-  }
-
-  //   C1 PC/IB; C1 XC; C2 FC.
-  @Test
-  public void testB_C1_PC_IB__C1_XC__C2_FC() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 20 );
-
-    stream = replay.getContent();
-    text = blockRead( stream, UTF8, 7, 2 );
-    assertThat( text, is( "0123456" ) );
-    stream.close();
-
-    stream = replay.getContent();
-    text = blockRead( stream, UTF8, -1, 7 );
-    assertThat( text, is( "0123456789" ) );
-  }
-
-  //   C1 PC/OB; C1 XC; C2 AC; EE
-  @Test
-  public void testS_C1_PC_OB__C1_XC__C2_AC__EE() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-
-    try {
-      stream = replay.getContent();
-    } catch ( IOException e ) {
-      // Expected.
-    }
-  }
-
-  //   C1 PC/OB; C1 XC; C2 AC; EE
-  @Test
-  public void testB_C1_PC_OB__C1_XC__C2_AC__EE() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-
-    stream = replay.getContent();
-    try {    
-      text = blockRead( stream, UTF8, 7, 2 );
-      fail( "Expected IOException" );
-    } catch ( IOException e ) {
-      // Expected.
-    }
-  }
-
-  //   C1 PC/IB; C2 PC/IB; C1 PC/IB; C2 PC/IB - Back and forth before buffer overflow is OK.
-  @Test
-  public void testS_C1_PC_IB__C2_PC_IB__C2_PC_IB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-    InputStream stream1, stream2;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 20 );
-
-    stream1 = replay.getContent();
-    text = byteRead( stream1, 3 );
-    assertThat( text, is( "012" ) );
-
-    stream2 = replay.getContent();
-    text = byteRead( stream2, 4 );
-    assertThat( text, is( "0123" ) );
-
-    text = byteRead( stream1, 3 );
-    assertThat( text, is( "345" ) );
-  }
-
-  //   C1 PC/IB; C2 PC/IB; C1 PC/IB; C2 PC/IB - Back and forth before buffer overflow is OK.
-  @Test
-  public void testB_C1_PC_IB__C2_PC_IB__C2_PC_IB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-    InputStream stream1, stream2;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 20 );
-    stream1 = replay.getContent();
-    text = blockRead( stream1, UTF8, 3, 2 );
-    assertThat( text, is( "012" ) );
-
-    stream2 = replay.getContent();
-    text = blockRead( stream2, UTF8, 4, 3 );
-    assertThat( text, is( "0123" ) );
-
-    text = blockRead( stream1, UTF8, 3, 2 );
-    assertThat( text, is( "345" ) );
-  }
-
-  //   C1 PC/IB; C2 PC/OB; C1 AC; EE
-  @Test
-  public void testS_C1_PC_IB__C2_PC_OB__C1_AC__EE() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-    InputStream stream1, stream2;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-
-    stream1 = replay.getContent();
-    text = byteRead( stream1, 3 );
-    assertThat( text, is( "012" ) );
-
-    stream2 = replay.getContent();
-    text = byteRead( stream2, 5 );
-    assertThat( text, is( "01234" ) );
-  }
-
-  //   C1 PC/IB; C2 PC/OB; C1 AC; EE
-  @Test
-  public void testB_C1_PC_IB__C2_PC_OB__C1_AC__EE() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-    InputStream stream1, stream2;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-
-    stream1 = replay.getContent();
-    text = blockRead( stream1, UTF8, 3, 2 );
-    assertThat( text, is( "012" ) );
-
-    stream2 = replay.getContent();
-    try {
-      text = blockRead( stream2, UTF8, 6, 4 );
-      fail("expected IOException");
-    } catch (IOException e) {
-      // expected
-    }
- 
-  }
-
-  @Test
-  public void testWriteTo() throws Exception {
-    String input = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-
-    ByteArrayOutputStream buffer = new ByteArrayOutputStream();
-    try {
-      replay.writeTo( buffer );
-      fail("expected IOException");
-    } catch (IOException e) {
-      // expected
-    }
-  }
-
-  @Test
-  public void testIsRepeatable() throws Exception {
-    String text = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( text.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic );
-    assertThat( replay.isRepeatable(), is( true ) );
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( text.getBytes( UTF8 ) ) );
-    BufferedHttpEntity buffered = new BufferedHttpEntity( basic );
-    replay = new CappedBufferHttpEntity( buffered );
-    assertThat( replay.isRepeatable(), is( true ) );
-  }
-
-  @Test
-  public void testIsChunked() throws Exception {
-    String input = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-    assertThat( replay.isChunked(), is( false ) );
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    basic.setChunked( true );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-    assertThat( replay.isChunked(), is( true ) );
-  }
-
-  @Test
-  public void testGetContentLength() throws Exception {
-    String input = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-    assertThat( replay.getContentLength(), is( -1L ) );
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    basic.setContentLength( input.length() );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-    assertThat( replay.getContentLength(), is( 10L ) );
-  }
-
-  @Test
-  public void testGetContentType() throws Exception {
-    String input = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-    assertThat( replay.getContentType(), nullValue() );
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    basic.setContentType( ContentType.APPLICATION_JSON.getMimeType() );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-    assertThat( replay.getContentType().getValue(), is( "application/json" ) );
-  }
-
-  @Test
-  public void testGetContentEncoding() throws Exception {
-    String input = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-    assertThat( replay.getContentEncoding(), nullValue() );
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    basic.setContentEncoding( "UTF-8" );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-    assertThat( replay.getContentEncoding().getValue(), is( "UTF-8" ) );
-  }
-
-  @Test
-  public void testIsStreaming() throws Exception {
-    String input = "0123456789";
-    BasicHttpEntity basic;
-    InputStreamEntity streaming;
-    CappedBufferHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-    assertThat( replay.isStreaming(), is( true ) );
-
-    basic = new BasicHttpEntity();
-    basic.setContent( null );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-    assertThat( replay.isStreaming(), is( false ) );
-
-    streaming = new InputStreamEntity( new ByteArrayInputStream( input.getBytes( UTF8 ) ), 10, ContentType.TEXT_PLAIN );
-    replay = new CappedBufferHttpEntity( streaming, 5 );
-    assertThat( replay.isStreaming(), is( true ) );
-  }
-
-  @Test
-  public void testConsumeContent() throws Exception {
-    String input = "0123456789";
-    BasicHttpEntity basic;
-    CappedBufferHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    replay = new CappedBufferHttpEntity( basic, 5 );
-
-    try {
-      replay.consumeContent();
-      fail( "Expected UnsupportedOperationException" );
-    } catch ( UnsupportedOperationException e ) {
-      // Expected.
-    }
-  }
-
-  private static String byteRead( InputStream stream, int total ) throws IOException {
-    StringBuilder string = null;
-    int c = 0;
-    if( total < 0 ) {
-      total = Integer.MAX_VALUE;
-    }
-    while( total > 0 && c >= 0 ) {
-      c = stream.read();
-      if( c >= 0 ) {
-        total--;
-        if( string == null ) {
-          string = new StringBuilder();
-        }
-        string.append( (char)c );
-      }
-    }
-    return string == null ? null : string.toString();
-  }
-
-  private static String blockRead( InputStream stream, Charset charset, int total, int chunk ) throws IOException {
-    StringBuilder string = null;
-    byte buffer[] = new byte[ chunk ];
-    int count = 0;
-    if( total < 0 ) {
-      total = Integer.MAX_VALUE;
-    }
-    while( total > 0 && count >= 0 ) {
-      count = stream.read( buffer, 0, Math.min( buffer.length, total ) );
-      if( count >= 0 ) {
-        total -= count;
-        if( string == null ) {
-          string = new StringBuilder();
-        }
-        string.append( new String( buffer, 0, count, charset ) );
-      }
-    }
-    return string == null ? null : string.toString();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/367e849f/gateway-server/src/test/java/org/apache/hadoop/gateway/dispatch/HttpClientDispatchTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/dispatch/HttpClientDispatchTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/dispatch/HttpClientDispatchTest.java
deleted file mode 100644
index 660a627..0000000
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/dispatch/HttpClientDispatchTest.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.dispatch;
-
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.greaterThan;
-import static org.hamcrest.Matchers.instanceOf;
-import static org.hamcrest.Matchers.not;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.net.UnknownHostException;
-
-import javax.servlet.ServletInputStream;
-import javax.servlet.ServletOutputStream;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.http.HttpEntity;
-import org.apache.http.client.methods.HttpUriRequest;
-import org.apache.http.params.BasicHttpParams;
-import org.easymock.EasyMock;
-import org.easymock.IAnswer;
-import org.junit.Test;
-
-public class HttpClientDispatchTest {
-
-  // Make sure Hadoop cluster topology isn't exposed to client when there is a connectivity issue.
-  @Test
-  public void testJiraKnox58() throws URISyntaxException, IOException {
-
-    URI uri = new URI( "http://unreachable-host" );
-    BasicHttpParams params = new BasicHttpParams();
-
-    HttpUriRequest outboundRequest = EasyMock.createNiceMock( HttpUriRequest.class );
-    EasyMock.expect( outboundRequest.getMethod() ).andReturn( "GET" ).anyTimes();
-    EasyMock.expect( outboundRequest.getURI() ).andReturn( uri  ).anyTimes();
-    EasyMock.expect( outboundRequest.getParams() ).andReturn( params ).anyTimes();
-
-    HttpServletRequest inboundRequest = EasyMock.createNiceMock( HttpServletRequest.class );
-
-    HttpServletResponse outboundResponse = EasyMock.createNiceMock( HttpServletResponse.class );
-    EasyMock.expect( outboundResponse.getOutputStream() ).andAnswer( new IAnswer<ServletOutputStream>() {
-      @Override
-      public ServletOutputStream answer() throws Throwable {
-        return new ServletOutputStream() {
-          @Override
-          public void write( int b ) throws IOException {
-            throw new IOException( "unreachable-host" );
-          }
-        };
-      }
-    });
-
-    EasyMock.replay( outboundRequest, inboundRequest, outboundResponse );
-
-    HttpClientDispatch dispatch = new HttpClientDispatch();
-    try {
-      dispatch.executeRequest( outboundRequest, inboundRequest, outboundResponse );
-      fail( "Should have thrown IOException" );
-    } catch( IOException e ) {
-      assertThat( e.getMessage(), not( containsString( "unreachable-host" ) ) );
-      assertThat( e, not( instanceOf( UnknownHostException.class ) ) ) ;
-      assertThat( "Message needs meaningful content.", e.getMessage().trim().length(), greaterThan( 12 ) );
-    }
-  }
-
-  @Test
-  public void testCallToSecureClusterWithDelegationTpken() throws URISyntaxException, IOException {
-    System.setProperty(GatewayConfig.HADOOP_KERBEROS_SECURED, "true");
-    HttpClientDispatch httpClientDispatch = new HttpClientDispatch();
-    ServletInputStream inputStream = EasyMock.createNiceMock( ServletInputStream.class );
-    HttpServletRequest inboundRequest = EasyMock.createNiceMock( HttpServletRequest.class );
-    EasyMock.expect(inboundRequest.getQueryString()).andReturn( "delegation=123").anyTimes();
-    EasyMock.expect(inboundRequest.getInputStream()).andReturn( inputStream).anyTimes();
-    EasyMock.replay( inboundRequest );
-    HttpEntity httpEntity = httpClientDispatch.createRequestEntity(inboundRequest);
-    System.setProperty(GatewayConfig.HADOOP_KERBEROS_SECURED, "false");
-    assertFalse("buffering in the presence of delegation token", 
-        (httpEntity instanceof CappedBufferHttpEntity));
-  }
-  
-  @Test
-  public void testCallToSecureClusterWithoutDelegationTpken() throws URISyntaxException, IOException {
-    System.setProperty(GatewayConfig.HADOOP_KERBEROS_SECURED, "true");
-    HttpClientDispatch httpClientDispatch = new HttpClientDispatch();
-    httpClientDispatch.setReplayBufferSize(10);
-    ServletInputStream inputStream = EasyMock.createNiceMock( ServletInputStream.class );
-    HttpServletRequest inboundRequest = EasyMock.createNiceMock( HttpServletRequest.class );
-    EasyMock.expect(inboundRequest.getQueryString()).andReturn( "a=123").anyTimes();
-    EasyMock.expect(inboundRequest.getInputStream()).andReturn( inputStream).anyTimes();
-    EasyMock.replay( inboundRequest );
-    HttpEntity httpEntity = httpClientDispatch.createRequestEntity(inboundRequest);
-    System.setProperty(GatewayConfig.HADOOP_KERBEROS_SECURED, "false");
-    assertTrue("not buffering in the absence of delegation token", 
-        (httpEntity instanceof CappedBufferHttpEntity));
-  }
-  
-  
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/367e849f/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HBaseDeploymentContributor.java
----------------------------------------------------------------------
diff --git a/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HBaseDeploymentContributor.java b/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HBaseDeploymentContributor.java
index d0923ca..0e6915f 100644
--- a/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HBaseDeploymentContributor.java
+++ b/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HBaseDeploymentContributor.java
@@ -115,7 +115,7 @@ public class HBaseDeploymentContributor extends ServiceDeploymentContributorBase
 
   private void addDispatchFilter(
       DeploymentContext context, Service service, ResourceDescriptor resource ) {
-    context.contributeFilter( service, resource, "dispatch", null, null );
+    context.contributeFilter( service, resource, "dispatch", "http-client", null );
   }
 
   private String getQualifiedName() {

http://git-wip-us.apache.org/repos/asf/knox/blob/367e849f/gateway-service-hive/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-service-hive/pom.xml b/gateway-service-hive/pom.xml
index 5ce53d3..6998411 100644
--- a/gateway-service-hive/pom.xml
+++ b/gateway-service-hive/pom.xml
@@ -45,7 +45,15 @@
             <groupId>${gateway-group}</groupId>
             <artifactId>gateway-provider-rewrite</artifactId>
         </dependency>
-
+        <dependency>
+            <groupId>org.apache.httpcomponents</groupId>
+            <artifactId>httpclient</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-test-utils</artifactId>
+            <scope>test</scope>
+        </dependency>
         <dependency>
             <groupId>junit</groupId>
             <artifactId>junit</artifactId>

http://git-wip-us.apache.org/repos/asf/knox/blob/367e849f/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDeploymentContributor.java
----------------------------------------------------------------------
diff --git a/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDeploymentContributor.java b/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDeploymentContributor.java
index d0af1be..eb3779f 100644
--- a/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDeploymentContributor.java
+++ b/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDeploymentContributor.java
@@ -79,6 +79,6 @@ public class HiveDeploymentContributor extends ServiceDeploymentContributorBase
   }
 
   private void addDispatchFilter( DeploymentContext context, Service service, ResourceDescriptor resource ) {
-    context.contributeFilter( service, resource, "dispatch", null, null );
+    context.contributeFilter( service, resource, "dispatch", "hive", null );
   }
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/367e849f/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDispatchDeploymentContributor.java
----------------------------------------------------------------------
diff --git a/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDispatchDeploymentContributor.java b/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDispatchDeploymentContributor.java
new file mode 100644
index 0000000..3cb58ab
--- /dev/null
+++ b/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDispatchDeploymentContributor.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.hive;
+
+import java.util.List;
+
+import org.apache.hadoop.gateway.deploy.DeploymentContext;
+import org.apache.hadoop.gateway.deploy.ProviderDeploymentContributorBase;
+import org.apache.hadoop.gateway.descriptor.FilterDescriptor;
+import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
+import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
+import org.apache.hadoop.gateway.topology.Provider;
+import org.apache.hadoop.gateway.topology.Service;
+
+/**
+ *
+ */
+public class HiveDispatchDeploymentContributor extends ProviderDeploymentContributorBase {
+  
+  private static final String REPLAY_BUFFER_SIZE_PARAM = "replayBufferSize";
+  
+  // Default global replay buffer size in KB
+  public static final String DEFAULT_REPLAY_BUFFER_SIZE = "4";
+
+  @Override
+  public String getRole() {
+    return "dispatch";
+  }
+
+  @Override
+  public String getName() {
+    return "hive";
+  }
+
+  @Override
+  public void contributeFilter( DeploymentContext context, Provider provider, Service service, ResourceDescriptor resource, List<FilterParamDescriptor> params ) {
+    String replayBufferSize = DEFAULT_REPLAY_BUFFER_SIZE;
+    if (params != null) {
+      for (FilterParamDescriptor paramDescriptor : params) {
+        if (REPLAY_BUFFER_SIZE_PARAM.equals( paramDescriptor.name() )) {
+          replayBufferSize = paramDescriptor.value();
+          break;
+        }
+      }
+    }
+    FilterDescriptor filter = resource.addFilter().name( getName() ).role( getRole() ).impl( HiveHttpClientDispatch.class );
+    filter.param().name("replayBufferSize").value(replayBufferSize);
+    if( context.getGatewayConfig().isHadoopKerberosSecured() ) {
+      filter.param().name("kerberos").value("true");
+    }
+    else {
+      filter.param().name("basicAuthPreemptive").value("true");
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/367e849f/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveHttpClientDispatch.java
----------------------------------------------------------------------
diff --git a/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveHttpClientDispatch.java b/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveHttpClientDispatch.java
new file mode 100644
index 0000000..a52b61f
--- /dev/null
+++ b/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveHttpClientDispatch.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.hive;
+
+import org.apache.hadoop.gateway.dispatch.HttpClientDispatch;
+import org.apache.hadoop.gateway.security.PrimaryPrincipal;
+import org.apache.http.auth.UsernamePasswordCredentials;
+import org.apache.http.client.methods.HttpUriRequest;
+import org.apache.http.impl.auth.BasicScheme;
+import javax.security.auth.Subject;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import java.security.AccessController;
+import java.security.Principal;
+
+/**
+ * This specialized dispatch provides Hive specific features to the
+ * default HttpClientDispatch.
+ */
+public class HiveHttpClientDispatch extends HttpClientDispatch {
+  private static final String BASIC_AUTH_PREEMPTIVE_PARAM = "basicAuthPreemptive";
+  private static final String PASSWORD_PLACEHOLDER = "*";
+  private boolean basicAuthPreemptive = false;
+
+  @Override
+  public void init( FilterConfig filterConfig ) throws ServletException {
+    super.init( filterConfig );
+    String basicAuthPreemptiveString = filterConfig.getInitParameter( BASIC_AUTH_PREEMPTIVE_PARAM );
+    if( basicAuthPreemptiveString != null ) {
+      setBasicAuthPreemptive( Boolean.parseBoolean( basicAuthPreemptiveString ) );
+    }
+  }
+
+  protected Principal getPrimaryPrincipal() {
+    Principal principal = null;
+    Subject subject = Subject.getSubject( AccessController.getContext());
+    if( subject != null ) {
+      principal = (Principal)subject.getPrincipals(PrimaryPrincipal.class).toArray()[0];
+    }
+    return principal;
+  }
+
+  protected void addCredentialsToRequest(HttpUriRequest request) {
+    if( isBasicAuthPreemptive() ) {
+      Principal principal = getPrimaryPrincipal();
+      if( principal != null ) {
+
+        UsernamePasswordCredentials credentials =
+            new UsernamePasswordCredentials( principal.getName(), PASSWORD_PLACEHOLDER );
+        
+        request.addHeader(BasicScheme.authenticate(credentials,"US-ASCII",false));
+      }
+    }
+  }
+
+  public void setBasicAuthPreemptive( boolean basicAuthPreemptive ) {
+    this.basicAuthPreemptive = basicAuthPreemptive;
+  }
+
+  public boolean isBasicAuthPreemptive() {
+    return basicAuthPreemptive;
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/knox/blob/367e849f/gateway-service-hive/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
----------------------------------------------------------------------
diff --git a/gateway-service-hive/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor b/gateway-service-hive/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
new file mode 100644
index 0000000..3762427
--- /dev/null
+++ b/gateway-service-hive/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.hadoop.gateway.hive.HiveDispatchDeploymentContributor

http://git-wip-us.apache.org/repos/asf/knox/blob/367e849f/gateway-service-hive/src/test/java/org/apache/hadoop/gateway/hive/HiveDispatchDeploymentContributorTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-hive/src/test/java/org/apache/hadoop/gateway/hive/HiveDispatchDeploymentContributorTest.java b/gateway-service-hive/src/test/java/org/apache/hadoop/gateway/hive/HiveDispatchDeploymentContributorTest.java
new file mode 100644
index 0000000..d748ea4
--- /dev/null
+++ b/gateway-service-hive/src/test/java/org/apache/hadoop/gateway/hive/HiveDispatchDeploymentContributorTest.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.hive;
+
+import org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor;
+import org.junit.Test;
+
+import java.util.Iterator;
+import java.util.ServiceLoader;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.junit.Assert.fail;
+
+public class HiveDispatchDeploymentContributorTest {
+
+  @Test
+  public void testServiceLoader() throws Exception {
+    ServiceLoader loader = ServiceLoader.load( ProviderDeploymentContributor.class );
+    Iterator iterator = loader.iterator();
+    assertThat( "Service iterator empty.", iterator.hasNext() );
+    while( iterator.hasNext() ) {
+      Object object = iterator.next();
+      if( object instanceof HiveDispatchDeploymentContributor ) {
+        return;
+      }
+    }
+    fail( "Failed to find " + HiveDispatchDeploymentContributor.class.getName() + " via service loader." );
+  }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/367e849f/gateway-service-webhcat/src/main/java/org/apache/hadoop/gateway/webhcat/WebHCatDeploymentContributor.java
----------------------------------------------------------------------
diff --git a/gateway-service-webhcat/src/main/java/org/apache/hadoop/gateway/webhcat/WebHCatDeploymentContributor.java b/gateway-service-webhcat/src/main/java/org/apache/hadoop/gateway/webhcat/WebHCatDeploymentContributor.java
index 946a4f6..9ee7a9a 100644
--- a/gateway-service-webhcat/src/main/java/org/apache/hadoop/gateway/webhcat/WebHCatDeploymentContributor.java
+++ b/gateway-service-webhcat/src/main/java/org/apache/hadoop/gateway/webhcat/WebHCatDeploymentContributor.java
@@ -90,6 +90,6 @@ public class WebHCatDeploymentContributor extends ServiceDeploymentContributorBa
 
   private void addDispatchFilter(
       DeploymentContext context, Service service, ResourceDescriptor resource ) {
-    context.contributeFilter( service, resource, "dispatch", null, null );
+    context.contributeFilter( service, resource, "dispatch", "http-client", null );
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/367e849f/gateway-spi/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-spi/pom.xml b/gateway-spi/pom.xml
index ef8fa3c..632ad6f 100644
--- a/gateway-spi/pom.xml
+++ b/gateway-spi/pom.xml
@@ -109,6 +109,17 @@
             <artifactId>gateway-test-utils</artifactId>
             <scope>test</scope>
         </dependency>
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest-core</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest-library</artifactId>
+            <scope>test</scope>
+        </dependency>
+
     </dependencies>
 
 </project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/367e849f/gateway-spi/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
new file mode 100644
index 0000000..d416326
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway;
+
+import org.apache.hadoop.gateway.i18n.messages.Message;
+import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
+import org.apache.hadoop.gateway.i18n.messages.Messages;
+import org.apache.hadoop.gateway.i18n.messages.StackTrace;
+
+import java.net.URI;
+
+
+/**
+ *
+ */
+@Messages(logger="org.apache.hadoop.gateway")
+public interface GatewayMessages {
+
+  @Message( level = MessageLevel.DEBUG, text = "Dispatch request: {0} {1}" )
+  void dispatchRequest( String method, URI uri );
+  
+  @Message( level = MessageLevel.WARN, text = "Connection exception dispatching request: {0} {1}" )
+  void dispatchServiceConnectionException( URI uri, @StackTrace(level=MessageLevel.WARN) Exception e );
+
+  @Message( level = MessageLevel.DEBUG, text = "Dispatch response status: {0}" )
+  void dispatchResponseStatusCode(int statusCode);
+
+  @Message( level = MessageLevel.DEBUG, text = "Dispatch response status: {0}, Location: {1}" )
+  void dispatchResponseCreatedStatusCode( int statusCode, String location );
+
+  @Message( level = MessageLevel.DEBUG, text = "Successful Knox->Hadoop SPNegotiation authentication for URL: {0}" )
+  void successfulSPNegoAuthn(String uri);
+  
+  @Message( level = MessageLevel.ERROR, text = "Failed Knox->Hadoop SPNegotiation authentication for URL: {0}" )
+  void failedSPNegoAuthn(String uri);
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/367e849f/gateway-spi/src/main/java/org/apache/hadoop/gateway/GatewayResources.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/GatewayResources.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/GatewayResources.java
new file mode 100644
index 0000000..3d9c514
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/GatewayResources.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway;
+
+import org.apache.hadoop.gateway.i18n.resources.Resource;
+import org.apache.hadoop.gateway.i18n.resources.Resources;
+
+/**
+ *
+ */
+@Resources
+public interface GatewayResources {
+
+  @Resource( text="Service connectivity error." )
+  String dispatchConnectionError();
+
+  @Resource( text="Response status: {0}" )
+  String responseStatus( int status );
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/367e849f/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributorBase.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributorBase.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributorBase.java
index 83a67cd..4d33fa8 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributorBase.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributorBase.java
@@ -87,6 +87,9 @@ public abstract class ServiceDeploymentContributorBase extends DeploymentContrib
   }
 
   protected void addDispatchFilter(DeploymentContext context, Service service, ResourceDescriptor resource, String role, String name ) {
+    if (name == null) {
+      name = "http-client";
+    }
     context.contributeFilter( service, resource, role, name, null );
   }