You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ni...@apache.org on 2016/12/28 09:33:22 UTC

[22/23] ambari git commit: AMBARI-19302 : removed contrib/views/hive folder and made necessary changes in pom.xml files (nitirajrathore)

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
deleted file mode 100644
index d69a1a5..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
+++ /dev/null
@@ -1,740 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-import org.apache.commons.codec.binary.Hex;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.shims.ShimLoader;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hive.service.auth.HiveAuthFactory;
-import org.apache.hive.service.auth.KerberosSaslHelper;
-import org.apache.hive.service.auth.PlainSaslHelper;
-import org.apache.hive.service.auth.SaslQOP;
-import org.apache.hive.service.cli.thrift.*;
-import org.apache.http.HttpRequestInterceptor;
-import org.apache.http.HttpResponse;
-import org.apache.http.client.CookieStore;
-import org.apache.http.client.ServiceUnavailableRetryStrategy;
-import org.apache.http.config.Registry;
-import org.apache.http.config.RegistryBuilder;
-import org.apache.http.conn.socket.ConnectionSocketFactory;
-import org.apache.http.conn.ssl.SSLSocketFactory;
-import org.apache.http.impl.client.BasicCookieStore;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.HttpClientBuilder;
-import org.apache.http.impl.client.HttpClients;
-import org.apache.http.impl.conn.BasicHttpClientConnectionManager;
-import org.apache.http.protocol.HttpContext;
-import org.apache.thrift.TException;
-import org.apache.thrift.protocol.TBinaryProtocol;
-import org.apache.thrift.transport.THttpClient;
-import org.apache.thrift.transport.TTransport;
-import org.apache.thrift.transport.TTransportException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.net.ssl.KeyManagerFactory;
-import javax.net.ssl.SSLContext;
-import javax.net.ssl.TrustManagerFactory;
-import javax.security.sasl.Sasl;
-import javax.security.sasl.SaslException;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.security.KeyStore;
-import java.security.SecureRandom;
-import java.sql.SQLException;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Holds sessions
- */
-public class Connection {
-  private final static Logger LOG =
-      LoggerFactory.getLogger(Connection.class);
-  private String host;
-  private int port;
-  private Map<String, String> authParams;
-
-  private TCLIService.Client client = null;
-  private Map<String, TSessionHandle> sessHandles = null;
-  private TProtocolVersion protocol = null;
-  private TTransport transport;
-
-  private DDLDelegator ddl;
-  private String username;
-  private String password;
-
-  public Connection(String host, int port, Map<String, String> authParams, String username, String password)
-      throws HiveClientException, HiveAuthRequiredException {
-    this.host = host;
-    this.port = port;
-    this.authParams = authParams;
-    this.username = username;
-    this.password = password;
-
-    this.sessHandles = new HashMap<String, TSessionHandle>();
-
-    openConnection();
-    ddl = new DDLDelegator(this);
-  }
-
-  public DDLDelegator ddl() {
-    return ddl;
-  }
-
-  public synchronized void openConnection() throws HiveClientException, HiveAuthRequiredException {
-    try {
-      transport = isHttpTransportMode() ? createHttpTransport() : createBinaryTransport();
-      transport.open();
-      client = new TCLIService.Client(new TBinaryProtocol(transport));
-    } catch (TTransportException e) {
-      throw new HiveClientException("H020 Could not establish connection to "
-          + host + ":" + port + ": " + e.toString(), e);
-    } catch (SQLException e) {
-      throw new HiveClientException(e.getMessage(), e);
-    }
-    LOG.info("Hive connection opened");
-  }
-
-  /**
-   * Based on JDBC implementation of HiveConnection.createBinaryTransport
-   *
-   * @return transport
-   * @throws HiveClientException
-   */
-  protected TTransport createBinaryTransport() throws HiveClientException, TTransportException, HiveAuthRequiredException {
-    TTransport transport;
-    boolean assumeSubject =
-        Utils.HiveAuthenticationParams.AUTH_KERBEROS_AUTH_TYPE_FROM_SUBJECT.equals(authParams
-            .get(Utils.HiveAuthenticationParams.AUTH_KERBEROS_AUTH_TYPE));
-    try {
-      if (!Utils.HiveAuthenticationParams.AUTH_SIMPLE.equalsIgnoreCase(authParams.get(Utils.HiveAuthenticationParams.AUTH_TYPE))) {
-        // If Kerberos
-        Map<String, String> saslProps = new HashMap<String, String>();
-        SaslQOP saslQOP = SaslQOP.AUTH;
-        if (authParams.containsKey(Utils.HiveAuthenticationParams.AUTH_PRINCIPAL)) {
-          if (authParams.containsKey(Utils.HiveAuthenticationParams.AUTH_QOP)) {
-            try {
-              saslQOP = SaslQOP.fromString(authParams.get(Utils.HiveAuthenticationParams.AUTH_QOP));
-            } catch (IllegalArgumentException e) {
-              throw new HiveClientException("H040 Invalid " + Utils.HiveAuthenticationParams.AUTH_QOP +
-                  " parameter. " + e.getMessage(), e);
-            }
-          }
-          saslProps.put(Sasl.QOP, saslQOP.toString());
-          saslProps.put(Sasl.SERVER_AUTH, "true");
-
-          Configuration conf = new Configuration();
-          conf.set("hadoop.security.authentication", "kerberos");
-          UserGroupInformation.setConfiguration(conf);
-
-          transport = KerberosSaslHelper.getKerberosTransport(
-              authParams.get(Utils.HiveAuthenticationParams.AUTH_PRINCIPAL), host,
-              HiveAuthFactory.getSocketTransport(host, port, 10000), saslProps,
-              assumeSubject);
-        } else {
-          // If there's a delegation token available then use token based connection
-          String tokenStr = getClientDelegationToken(authParams);
-          if (tokenStr != null) {
-            transport = KerberosSaslHelper.getTokenTransport(tokenStr,
-                host, HiveAuthFactory.getSocketTransport(host, port, 10000), saslProps);
-          } else {
-            // we are using PLAIN Sasl connection with user/password
-            String userName = getAuthParamDefault(Utils.HiveAuthenticationParams.AUTH_USER, getUsername());
-            String passwd = getPassword();
-            // Note: Thrift returns an SSL socket that is already bound to the specified host:port
-            // Therefore an open called on this would be a no-op later
-            // Hence, any TTransportException related to connecting with the peer are thrown here.
-            // Bubbling them up the call hierarchy so that a retry can happen in openTransport,
-            // if dynamic service discovery is configured.
-            if (isSslConnection()) {
-              // get SSL socket
-              String sslTrustStore = authParams.get(Utils.HiveAuthenticationParams.SSL_TRUST_STORE);
-              String sslTrustStorePassword = authParams.get(Utils.HiveAuthenticationParams.SSL_TRUST_STORE_PASSWORD);
-              if (sslTrustStore == null || sslTrustStore.isEmpty()) {
-                transport = HiveAuthFactory.getSSLSocket(host, port, 10000);
-              } else {
-                transport = HiveAuthFactory.getSSLSocket(host, port, 10000,
-                    sslTrustStore, sslTrustStorePassword);
-              }
-            } else {
-              // get non-SSL socket transport
-              transport = HiveAuthFactory.getSocketTransport(host, port, 10000);
-            }
-            // Overlay the SASL transport on top of the base socket transport (SSL or non-SSL)
-            transport = PlainSaslHelper.getPlainTransport(userName, passwd, transport);
-          }
-        }
-      } else {
-        //NOSASL
-        return HiveAuthFactory.getSocketTransport(host, port, 10000);
-      }
-    } catch (SaslException e) {
-      throw new HiveClientException("H040 Could not create secure connection to "
-          + host + ": " + e.getMessage(), e);
-    }
-    return transport;
-  }
-
-  private String getServerHttpUrl(boolean useSsl) {
-    // Create the http/https url
-    // JDBC driver will set up an https url if ssl is enabled, otherwise http
-    String schemeName = useSsl ? "https" : "http";
-    // http path should begin with "/"
-    String httpPath;
-    httpPath = authParams.get(Utils.HiveAuthenticationParams.HTTP_PATH);
-    if (httpPath == null) {
-      httpPath = "/";
-    } else if (!httpPath.startsWith("/")) {
-      httpPath = "/" + httpPath;
-    }
-    return schemeName + "://" + host + ":" + port + httpPath;
-  }
-
-  private TTransport createHttpTransport() throws SQLException, TTransportException {
-    CloseableHttpClient httpClient;
-    boolean useSsl = isSslConnection();
-    // Create an http client from the configs
-    httpClient = getHttpClient(useSsl);
-    try {
-      transport = new THttpClient(getServerHttpUrl(useSsl), httpClient);
-      // We'll call an open/close here to send a test HTTP message to the server. Any
-      // TTransportException caused by trying to connect to a non-available peer are thrown here.
-      // Bubbling them up the call hierarchy so that a retry can happen in openTransport,
-      // if dynamic service discovery is configured.
-      TCLIService.Iface client = new TCLIService.Client(new TBinaryProtocol(transport));
-      TOpenSessionResp openResp = client.OpenSession(new TOpenSessionReq());
-      if (openResp != null) {
-        client.CloseSession(new TCloseSessionReq(openResp.getSessionHandle()));
-      }
-    } catch (TException e) {
-      LOG.info("JDBC Connection Parameters used : useSSL = " + useSsl + " , httpPath  = " +
-          authParams.get(Utils.HiveAuthenticationParams.HTTP_PATH) + " Authentication type = " +
-          authParams.get(Utils.HiveAuthenticationParams.AUTH_TYPE));
-      String msg = "Could not create http connection to " +
-          getServerHttpUrl(useSsl) + ". " + e.getMessage();
-      throw new TTransportException(msg, e);
-    }
-    return transport;
-  }
-
-  private CloseableHttpClient getHttpClient(Boolean useSsl) throws SQLException {
-    boolean isCookieEnabled = authParams.get(Utils.HiveAuthenticationParams.COOKIE_AUTH) == null ||
-        (!Utils.HiveAuthenticationParams.COOKIE_AUTH_FALSE.equalsIgnoreCase(
-            authParams.get(Utils.HiveAuthenticationParams.COOKIE_AUTH)));
-    String cookieName = authParams.get(Utils.HiveAuthenticationParams.COOKIE_NAME) == null ?
-        Utils.HiveAuthenticationParams.DEFAULT_COOKIE_NAMES_HS2 :
-        authParams.get(Utils.HiveAuthenticationParams.COOKIE_NAME);
-    CookieStore cookieStore = isCookieEnabled ? new BasicCookieStore() : null;
-    HttpClientBuilder httpClientBuilder;
-    // Request interceptor for any request pre-processing logic
-    HttpRequestInterceptor requestInterceptor;
-    Map<String, String> additionalHttpHeaders = new HashMap<String, String>();
-
-    // Retrieve the additional HttpHeaders
-    for (Map.Entry<String, String> entry : authParams.entrySet()) {
-      String key = entry.getKey();
-
-      if (key.startsWith(Utils.HiveAuthenticationParams.HTTP_HEADER_PREFIX)) {
-        additionalHttpHeaders.put(key.substring(Utils.HiveAuthenticationParams.HTTP_HEADER_PREFIX.length()),
-            entry.getValue());
-      }
-    }
-    // Configure http client for kerberos/password based authentication
-    if (isKerberosAuthMode()) {
-      /**
-       * Add an interceptor which sets the appropriate header in the request.
-       * It does the kerberos authentication and get the final service ticket,
-       * for sending to the server before every request.
-       * In https mode, the entire information is encrypted
-       */
-
-      Boolean assumeSubject =
-          Utils.HiveAuthenticationParams.AUTH_KERBEROS_AUTH_TYPE_FROM_SUBJECT.equals(authParams
-              .get(Utils.HiveAuthenticationParams.AUTH_KERBEROS_AUTH_TYPE));
-      requestInterceptor =
-          new HttpKerberosRequestInterceptor(authParams.get(Utils.HiveAuthenticationParams.AUTH_PRINCIPAL),
-              host, getServerHttpUrl(useSsl), assumeSubject, cookieStore, cookieName, useSsl,
-              additionalHttpHeaders);
-    } else {
-      /**
-       * Add an interceptor to pass username/password in the header.
-       * In https mode, the entire information is encrypted
-       */
-      requestInterceptor = new HttpBasicAuthInterceptor(
-          getAuthParamDefault(Utils.HiveAuthenticationParams.AUTH_USER, getUsername())
-          , getPassword(),cookieStore, cookieName, useSsl,
-          additionalHttpHeaders);
-    }
-    // Configure http client for cookie based authentication
-    if (isCookieEnabled) {
-      // Create a http client with a retry mechanism when the server returns a status code of 401.
-      httpClientBuilder =
-          HttpClients.custom().setServiceUnavailableRetryStrategy(
-              new ServiceUnavailableRetryStrategy() {
-
-                @Override
-                public boolean retryRequest(
-                    final HttpResponse response,
-                    final int executionCount,
-                    final HttpContext context) {
-                  int statusCode = response.getStatusLine().getStatusCode();
-                  boolean ret = statusCode == 401 && executionCount <= 1;
-
-                  // Set the context attribute to true which will be interpreted by the request interceptor
-                  if (ret) {
-                    context.setAttribute(Utils.HIVE_SERVER2_RETRY_KEY, Utils.HIVE_SERVER2_RETRY_TRUE);
-                  }
-                  return ret;
-                }
-
-                @Override
-                public long getRetryInterval() {
-                  // Immediate retry
-                  return 0;
-                }
-              });
-    } else {
-      httpClientBuilder = HttpClientBuilder.create();
-    }
-    // Add the request interceptor to the client builder
-    httpClientBuilder.addInterceptorFirst(requestInterceptor);
-    // Configure http client for SSL
-    if (useSsl) {
-      String useTwoWaySSL = authParams.get(Utils.HiveAuthenticationParams.USE_TWO_WAY_SSL);
-      String sslTrustStorePath = authParams.get(Utils.HiveAuthenticationParams.SSL_TRUST_STORE);
-      String sslTrustStorePassword = authParams.get(
-          Utils.HiveAuthenticationParams.SSL_TRUST_STORE_PASSWORD);
-      KeyStore sslTrustStore;
-      SSLSocketFactory socketFactory;
-
-      /**
-       * The code within the try block throws:
-       * 1. SSLInitializationException
-       * 2. KeyStoreException
-       * 3. IOException
-       * 4. NoSuchAlgorithmException
-       * 5. CertificateException
-       * 6. KeyManagementException
-       * 7. UnrecoverableKeyException
-       * We don't want the client to retry on any of these, hence we catch all
-       * and throw a SQLException.
-       */
-      try {
-        if (useTwoWaySSL != null &&
-            useTwoWaySSL.equalsIgnoreCase(Utils.HiveAuthenticationParams.TRUE)) {
-          socketFactory = getTwoWaySSLSocketFactory();
-        } else if (sslTrustStorePath == null || sslTrustStorePath.isEmpty()) {
-          // Create a default socket factory based on standard JSSE trust material
-          socketFactory = SSLSocketFactory.getSocketFactory();
-        } else {
-          // Pick trust store config from the given path
-          sslTrustStore = KeyStore.getInstance(Utils.HiveAuthenticationParams.SSL_TRUST_STORE_TYPE);
-          try (FileInputStream fis = new FileInputStream(sslTrustStorePath)) {
-            sslTrustStore.load(fis, sslTrustStorePassword.toCharArray());
-          }
-          socketFactory = new SSLSocketFactory(sslTrustStore);
-        }
-        socketFactory.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
-
-        final Registry<ConnectionSocketFactory> registry =
-            RegistryBuilder.<ConnectionSocketFactory>create()
-                .register("https", socketFactory)
-                .build();
-
-        httpClientBuilder.setConnectionManager(new BasicHttpClientConnectionManager(registry));
-      } catch (Exception e) {
-        String msg = "Could not create an https connection to " +
-            getServerHttpUrl(useSsl) + ". " + e.getMessage();
-        throw new SQLException(msg, " 08S01", e);
-      }
-    }
-    return httpClientBuilder.build();
-  }
-
-  private boolean isKerberosAuthMode() {
-    return !Utils.HiveAuthenticationParams.AUTH_SIMPLE.equals(authParams.get(Utils.HiveAuthenticationParams.AUTH_TYPE))
-        && authParams.containsKey(Utils.HiveAuthenticationParams.AUTH_PRINCIPAL);
-  }
-
-  private boolean isHttpTransportMode() {
-    String transportMode = authParams.get(Utils.HiveAuthenticationParams.TRANSPORT_MODE);
-    if (transportMode != null && (transportMode.equalsIgnoreCase("http"))) {
-      return true;
-    }
-    return false;
-  }
-
-  private String getPassword() throws HiveAuthRequiredException {
-    String password = getAuthParamDefault(Utils.HiveAuthenticationParams.AUTH_PASSWD, Utils.HiveAuthenticationParams.ANONYMOUS_USER);
-    if (password.equals("${ask_password}")) {
-      if (this.password == null) {
-        throw new HiveAuthRequiredException();
-      } else {
-        password = this.password;
-      }
-    }
-    return password;
-  }
-
-  SSLSocketFactory getTwoWaySSLSocketFactory() throws SQLException {
-    SSLSocketFactory socketFactory = null;
-
-    try {
-      KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(
-          Utils.HiveAuthenticationParams.SUNX509_ALGORITHM_STRING,
-          Utils.HiveAuthenticationParams.SUNJSSE_ALGORITHM_STRING);
-      String keyStorePath = authParams.get(Utils.HiveAuthenticationParams.SSL_KEY_STORE);
-      String keyStorePassword = authParams.get(Utils.HiveAuthenticationParams.SSL_KEY_STORE_PASSWORD);
-      KeyStore sslKeyStore = KeyStore.getInstance(Utils.HiveAuthenticationParams.SSL_KEY_STORE_TYPE);
-
-      if (keyStorePath == null || keyStorePath.isEmpty()) {
-        throw new IllegalArgumentException(Utils.HiveAuthenticationParams.SSL_KEY_STORE
-            + " Not configured for 2 way SSL connection, keyStorePath param is empty");
-      }
-      try (FileInputStream fis = new FileInputStream(keyStorePath)) {
-        sslKeyStore.load(fis, keyStorePassword.toCharArray());
-      }
-      keyManagerFactory.init(sslKeyStore, keyStorePassword.toCharArray());
-
-      TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(
-          Utils.HiveAuthenticationParams.SUNX509_ALGORITHM_STRING);
-      String trustStorePath = authParams.get(Utils.HiveAuthenticationParams.SSL_TRUST_STORE);
-      String trustStorePassword = authParams.get(
-          Utils.HiveAuthenticationParams.SSL_TRUST_STORE_PASSWORD);
-      KeyStore sslTrustStore = KeyStore.getInstance(Utils.HiveAuthenticationParams.SSL_TRUST_STORE_TYPE);
-
-      if (trustStorePath == null || trustStorePath.isEmpty()) {
-        throw new IllegalArgumentException(Utils.HiveAuthenticationParams.SSL_TRUST_STORE
-            + " Not configured for 2 way SSL connection");
-      }
-      try (FileInputStream fis = new FileInputStream(trustStorePath)) {
-        sslTrustStore.load(fis, trustStorePassword.toCharArray());
-      }
-      trustManagerFactory.init(sslTrustStore);
-      SSLContext context = SSLContext.getInstance("TLS");
-      context.init(keyManagerFactory.getKeyManagers(),
-          trustManagerFactory.getTrustManagers(), new SecureRandom());
-      socketFactory = new SSLSocketFactory(context);
-    } catch (Exception e) {
-      throw new SQLException("Error while initializing 2 way ssl socket factory ", e);
-    }
-    return socketFactory;
-  }
-
-  private boolean isSslConnection() {
-    return "true".equalsIgnoreCase(authParams.get(Utils.HiveAuthenticationParams.USE_SSL));
-  }
-
-  // Lookup the delegation token. First in the connection URL, then Configuration
-  private String getClientDelegationToken(Map<String, String> jdbcConnConf) throws HiveClientException {
-    String tokenStr = null;
-    if (Utils.HiveAuthenticationParams.AUTH_TOKEN.equalsIgnoreCase(jdbcConnConf.get(Utils.HiveAuthenticationParams.AUTH_TYPE))) {
-      // check delegation token in job conf if any
-      try {
-        tokenStr = ShimLoader.getHadoopShims().
-            getTokenStrForm(HiveAuthFactory.HS2_CLIENT_TOKEN);
-      } catch (IOException e) {
-        throw new HiveClientException("H050 Error reading token", e);
-      }
-    }
-    return tokenStr;
-  }
-
-  private String getAuthParamDefault(String key, String defaultValue) {
-    if (authParams.containsKey(key)) {
-      return authParams.get(key);
-    }
-    return defaultValue;
-  }
-
-  public synchronized TSessionHandle openSession() throws HiveClientException {
-    return openSession(null);
-  }
-
-  public synchronized TSessionHandle openSession(String forcedTag) throws HiveClientException {
-    TOpenSessionResp openResp = new HiveCall<TOpenSessionResp>(this) {
-      @Override
-      public TOpenSessionResp body() throws HiveClientException {
-        TOpenSessionReq openReq = new TOpenSessionReq();
-        Map<String, String> openConf = new HashMap<String, String>();
-        if(authParams.containsKey(Utils.HiveAuthenticationParams.HS2_PROXY_USER)){
-          openConf.put(Utils.HiveAuthenticationParams.HS2_PROXY_USER,
-                       authParams.get(Utils.HiveAuthenticationParams.HS2_PROXY_USER));
-        }
-        openReq.setConfiguration(openConf);
-        try {
-          return client.OpenSession(openReq);
-        } catch (TException e) {
-          throw new HiveClientException("H060 Unable to open Hive session", e);
-        }
-
-      }
-    }.call();
-    Utils.verifySuccess(openResp.getStatus(), "H070 Unable to open Hive session");
-
-    if (protocol == null)
-      protocol = openResp.getServerProtocolVersion();
-    LOG.info("Hive session opened");
-
-    TSessionHandle sessionHandle = openResp.getSessionHandle();
-    String tag;
-    if (forcedTag == null)
-      tag = Hex.encodeHexString(sessionHandle.getSessionId().getGuid());
-    else
-      tag = forcedTag;
-
-    sessHandles.put(tag, sessionHandle);
-
-    return sessionHandle;
-  }
-
-  public TSessionHandle getSessionByTag(String tag) throws HiveClientException {
-    TSessionHandle sessionHandle = sessHandles.get(tag);
-    if (sessionHandle == null) {
-      throw new HiveClientException("E030 Session with provided tag not found", null);
-    }
-    return sessionHandle;
-  }
-
-  public TSessionHandle getOrCreateSessionByTag(String tag) throws HiveClientException {
-    try {
-      return getSessionByTag(tag);
-    } catch (HiveClientException e) {
-      return openSession(tag);
-    }
-  }
-
-  public void invalidateSessionByTag(String tag) throws HiveClientException {
-    TSessionHandle sessionHandle = getSessionByTag(tag);
-    closeSession(sessionHandle);
-    sessHandles.remove(tag);
-  }
-
-  public void invalidateSessionBySessionHandle(TSessionHandle sessionHandle) throws HiveClientException{
-    sessHandles.values().remove(sessionHandle);
-    closeSession(sessionHandle);
-  }
-
-  private synchronized void closeSession(TSessionHandle sessHandle) throws HiveClientException {
-    if (sessHandle == null) return;
-    TCloseSessionReq closeReq = new TCloseSessionReq(sessHandle);
-    TCloseSessionResp closeResp = null;
-    try {
-      closeResp = client.CloseSession(closeReq);
-      Utils.verifySuccess(closeResp.getStatus(), "H080 Unable to close Hive session");
-    } catch (TException e) {
-      throw new HiveClientException("H090 Unable to close Hive session", e);
-    }
-    LOG.info("Hive session closed");
-  }
-
-  public synchronized void closeConnection() throws HiveClientException {
-    if (client == null) return;
-    try {
-
-      for(Iterator<Map.Entry<String, TSessionHandle>> it = sessHandles.entrySet().iterator(); it.hasNext(); ) {
-        Map.Entry<String, TSessionHandle> entry = it.next();
-        try {
-          closeSession(entry.getValue());
-        } catch (HiveClientException e) {
-          LOG.error("Unable to close Hive session: " + e.getMessage());
-        } finally {
-          it.remove();
-        }
-      }
-
-    } finally {
-      transport.close();
-      transport = null;
-      client = null;
-      protocol = null;
-    }
-    LOG.info("Connection to Hive closed");
-  }
-
-  /**
-   * Execute query
-   * @param cmd query
-   * @param async wait till query finish?
-   * @return handle of operation
-   * @throws HiveClientException
-   */
-  public TOperationHandle execute(final TSessionHandle session, final String cmd, final boolean async) throws HiveClientException {
-    TOperationHandle handle = null;
-
-    String[] commands = Utils.removeEmptyStrings(cmd.split(";"));
-    for(int i=0; i<commands.length; i++) {
-      final String oneCmd = commands[i];
-      final boolean lastCommand = i == commands.length-1;
-
-      TExecuteStatementResp execResp = new HiveCall<TExecuteStatementResp>(this,session) {
-        @Override
-        public TExecuteStatementResp body() throws HiveClientException {
-
-          TExecuteStatementReq execReq = null;
-          execReq = new TExecuteStatementReq(session, oneCmd);
-
-          // only last command should be asynchronous and return some results
-          // all previous commands are supposed to be set properties entries
-          if (lastCommand) {
-            execReq.setRunAsync(async);
-          } else {
-            execReq.setRunAsync(false);
-          }
-          execReq.setConfOverlay(new HashMap<String, String>());
-          try {
-            return client.ExecuteStatement(execReq);
-          } catch (TException e) {
-            throw new HiveClientException("H100 Unable to submit statement " + cmd, e);
-          }
-
-        }
-      }.call();
-
-      Utils.verifySuccess(execResp.getStatus(), "H110 Unable to submit statement");
-      //TODO: check if status have results
-      handle = execResp.getOperationHandle();
-    }
-    if (handle == null) {
-      throw new HiveClientException("H120 Empty command given", null);
-    }
-    return handle;
-  }
-
-  public TOperationHandle executeAsync(TSessionHandle session, String cmd) throws HiveClientException {
-    return execute(session, cmd, true);
-  }
-
-  public TOperationHandle executeSync(TSessionHandle session, String cmd) throws HiveClientException {
-    return execute(session, cmd, false);
-  }
-
-  public String getLogs(TOperationHandle handle) {
-    LogsCursor results = new LogsCursor(this, handle);
-    results.reset(); // we have to read from FIRST line, to get
-    // logs from beginning on every call this function
-    List<String> logLineList = results.getValuesInColumn(0);
-    StringBuilder log = new StringBuilder();
-    for (String line : logLineList) {
-      log.append(line);
-      log.append('\n');
-    }
-    return log.toString();
-  }
-
-  public Cursor getResults(TOperationHandle handle) {
-    Cursor cursor = new Cursor(this, handle);
-    cursor.reset(); // we have to read from FIRST line, to get
-    // logs from beginning on every call this function
-    return cursor;
-  }
-
-  /**
-   * Retrieve status of operation
-   * @param operationHandle handle
-   * @return thrift status response object
-   * @throws HiveClientException
-   */
-  public TGetOperationStatusResp getOperationStatus(final TOperationHandle operationHandle) throws HiveClientException {
-    return new HiveCall<TGetOperationStatusResp>(this) {
-      @Override
-      public TGetOperationStatusResp body() throws HiveClientException {
-
-        TGetOperationStatusReq statusReq = new TGetOperationStatusReq(operationHandle);
-        try {
-          return client.GetOperationStatus(statusReq);
-        } catch (TException e) {
-          throw new HiveClientException("H130 Unable to fetch operation status", e);
-        }
-
-      }
-    }.call();
-  }
-
-  /**
-   * Cancel operation
-   * @param operationHandle operation handle
-   */
-  public void cancelOperation(final TOperationHandle operationHandle) throws HiveClientException {
-    TCancelOperationResp cancelResp = new HiveCall<TCancelOperationResp>(this) {
-      @Override
-      public TCancelOperationResp body() throws HiveClientException {
-        TCancelOperationReq cancelReq = new TCancelOperationReq(operationHandle);
-        try {
-          return client.CancelOperation(cancelReq);
-        } catch (TException e) {
-          throw new HiveClientException("H140 Unable to cancel operation", null);
-        }
-      }
-    }.call();
-    Utils.verifySuccess(cancelResp.getStatus(), "H150 Unable to cancel operation");
-  }
-
-  public int getPort() {
-    return port;
-  }
-
-  public void setPort(int port) {
-    this.port = port;
-  }
-
-  public String getHost() {
-    return host;
-  }
-
-  public void setHost(String host) {
-    this.host = host;
-  }
-
-  public TCLIService.Client getClient() {
-    return client;
-  }
-
-  public void setClient(TCLIService.Client client) {
-    this.client = client;
-  }
-
-  public TProtocolVersion getProtocol() {
-    return protocol;
-  }
-
-  public void setProtocol(TProtocolVersion protocol) {
-    this.protocol = protocol;
-  }
-
-  public Map<String, String> getAuthParams() {
-    return authParams;
-  }
-
-  public void setAuthParams(Map<String, String> authParams) {
-    this.authParams = authParams;
-  }
-
-  public String getUsername() {
-    return username;
-  }
-
-  public void setUsername(String username) {
-    this.username = username;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java
deleted file mode 100644
index d3cbb08..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java
+++ /dev/null
@@ -1,166 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.hive.utils.HiveClientFormattedException;
-import org.apache.ambari.view.hive.utils.ServiceFormattedException;
-import org.apache.ambari.view.utils.UserLocalFactory;
-import org.apache.ambari.view.utils.ambari.AmbariApi;
-import org.apache.ambari.view.utils.ambari.AmbariApiException;
-import org.apache.ambari.view.utils.hdfs.HdfsApi;
-import org.apache.ambari.view.utils.hdfs.HdfsUtil;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-public class ConnectionFactory implements UserLocalFactory<Connection> {
-  private final static Logger LOG =
-      LoggerFactory.getLogger(ConnectionFactory.class);
-  private ViewContext context;
-  private HiveAuthCredentials credentials;
-  private AmbariApi ambariApi;
-  private HdfsApi hdfsApi = null;
-
-  public static String HIVE_SERVER2_AUTHENTICATION = "hive.server2.authentication" ;
-  public static String HIVE_SITE = "hive-site" ;
-  public static String HIVE_SERVER2_KERBEROS_PRINCIPAL = "hive.server2.authentication.kerberos.principal" ;
-  public static String HIVE_SASL_QOP = "hive.server2.thrift.sasl.qop" ;
-
-  public ConnectionFactory(ViewContext context, HiveAuthCredentials credentials) {
-    this.context = context;
-    this.credentials = credentials;
-    this.ambariApi = new AmbariApi(context);
-  }
-
-  /**
-   * Get HdfsApi instance
-   * @return HdfsApi business delegate
-   */
-  public synchronized HdfsApi getHDFSApi() {
-    if (hdfsApi == null) {
-      try {
-        hdfsApi = HdfsUtil.connectToHDFSApi(context);
-      } catch (Exception ex) {
-        throw new ServiceFormattedException("HdfsApi connection failed. Check \"webhdfs.url\" property", ex);
-      }
-    }
-    return hdfsApi;
-  }
-
-  @Override
-  public Connection create() {
-    try {
-      return new Connection(getHiveHost(), Integer.valueOf(getHivePort()),
-          getHiveAuthParams(), context.getUsername(), getCredentials().getPassword());
-    } catch (HiveClientException e) {
-      throw new HiveClientFormattedException(e);
-    }
-  }
-
-  private String getHiveHost() {
-    if (context.getCluster() != null) {
-      List<String> hiveServerHosts;
-      try {
-        hiveServerHosts = context.getCluster().getHostsForServiceComponent("HIVE","HIVE_SERVER");
-      } catch (AmbariApiException e) {
-        throw new ServiceFormattedException(e);
-      }
-
-      if (!hiveServerHosts.isEmpty()) {
-        String hostname = hiveServerHosts.get(0);
-        LOG.info("HIVE_SERVER component was found on host " + hostname);
-        return hostname;
-      }
-      LOG.warn("No host was found with HIVE_SERVER component. Using hive.host property to get hostname.");
-    }
-    return context.getProperties().get("hive.host");
-  }
-
-  private String getHivePort() {
-    Boolean isHttpMode = context.getProperties().get("hive.transport.mode").equalsIgnoreCase("http");
-    String port;
-    if(isHttpMode){
-      port = context.getProperties().get("hive.http.port");
-    }else{
-      port = context.getProperties().get("hive.port");
-    }
-    return  port;
-  }
-
-  private Map<String, String> getHiveAuthParams() {
-    String auth = context.getProperties().get("hive.auth");
-    Map<String, String> params = new HashMap<String, String>();
-    if ((auth == null || auth.isEmpty()) && context.getCluster() != null) {
-      params.putAll(getDefaultAuthParams());
-    } else if(auth == null || auth.isEmpty()) {
-      params.put("auth","NONE");
-    } else {
-      for (String param : auth.split(";")) {
-        String[] keyvalue = param.split("=");
-        if (keyvalue.length != 2) {
-          //Should never happen because validator already checked this
-          throw new ServiceFormattedException("H010 Can not parse authentication param " + param + " in " + auth);
-        }
-        params.put(keyvalue[0], keyvalue[1]);
-      }
-    }
-    params.put(Utils.HiveAuthenticationParams.TRANSPORT_MODE,context.getProperties().get("hive.transport.mode"));
-    params.put(Utils.HiveAuthenticationParams.HTTP_PATH,context.getProperties().get("hive.http.path"));
-    return params;
-  }
-
-  private Map<String, String> getDefaultAuthParams() {
-    Map<String, String> params = new HashMap<String, String>();
-    String auth = getProperty(HIVE_SITE, HIVE_SERVER2_AUTHENTICATION);
-    params.put("auth", auth);
-
-    if (auth.equalsIgnoreCase("KERBEROS")) {
-      params.put("principal", getProperty(HIVE_SITE, HIVE_SERVER2_KERBEROS_PRINCIPAL));
-      params.put(Utils.HiveAuthenticationParams.HS2_PROXY_USER, context.getUsername());
-    } else if (auth.equalsIgnoreCase("LDAP") || auth.equalsIgnoreCase("CUSTOM")) {
-      params.put("auth", "NONE");
-      params.put("password", "${ask_password}");
-    }
-
-    String qop = getProperty(HIVE_SITE, HIVE_SASL_QOP);
-    if (qop != null && !qop.equals("auth")) {
-      params.put(Utils.HiveAuthenticationParams.AUTH_QOP, qop);
-    }
-    return params;
-  }
-
-  private String getProperty(String type,String key){
-    if(context.getCluster() != null){
-      return context.getCluster().getConfigurationValue(type,key);
-    }
-    return null;
-  }
-
-  public HiveAuthCredentials getCredentials() {
-    return credentials;
-  }
-
-  public void setCredentials(HiveAuthCredentials credentials) {
-    this.credentials = credentials;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Cursor.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Cursor.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Cursor.java
deleted file mode 100644
index 16fdf36..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Cursor.java
+++ /dev/null
@@ -1,243 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-import static org.apache.hive.service.cli.thrift.TCLIServiceConstants.TYPE_NAMES;
-
-import org.apache.ambari.view.hive.utils.BadRequestFormattedException;
-import org.apache.ambari.view.hive.utils.HiveClientFormattedException;
-import org.apache.hive.service.cli.RowSet;
-import org.apache.hive.service.cli.RowSetFactory;
-import org.apache.hive.service.cli.thrift.*;
-import org.apache.thrift.TException;
-import sun.reflect.generics.reflectiveObjects.NotImplementedException;
-
-import java.util.*;
-
-public class Cursor implements Iterator<Row>, Iterable<Row> {
-  private final int FETCH_SIZE = 50;
-
-  private TCLIService.Client client;
-  private TOperationHandle opHandle;
-
-  private RowSet fetched = null;
-  private Iterator<Object[]> fetchedIterator = null;
-  private Connection connection;
-  private boolean resetCursor = false;
-  private ArrayList<ColumnDescription> schema;
-  private long offset;
-  private HashSet<Integer> selectedColumns = new LinkedHashSet<Integer>();
-
-  public Cursor(Connection connection, TOperationHandle opHandle) {
-    this.connection = connection;
-    this.client = connection.getClient();
-    this.opHandle = opHandle;
-  }
-
-  public TOperationHandle getOpHandle() {
-    return opHandle;
-  }
-
-  public void setOpHandle(TOperationHandle opHandle) {
-    this.opHandle = opHandle;
-  }
-
-  private void fetchNextBlock() throws HiveClientException {
-    //fetch another bunch
-    TFetchResultsResp fetchResp = new HiveCall<TFetchResultsResp>(connection) {
-      @Override
-      public TFetchResultsResp body() throws HiveClientException {
-        TFetchOrientation orientation = TFetchOrientation.FETCH_NEXT;
-        if (resetCursor) {
-          orientation = TFetchOrientation.FETCH_FIRST;
-          resetCursor = false;
-          offset = 0;
-        }
-
-        TFetchResultsReq fetchReq = getFetchResultsReq(orientation);
-        try {
-          return client.FetchResults(fetchReq);
-        } catch (TException e) {
-          throw new HiveClientException("H160 Unable to fetch results", e);
-        }
-
-      }
-    }.call();
-    Utils.verifySuccess(fetchResp.getStatus(), "H170 Unable to fetch results");
-    TRowSet results = fetchResp.getResults();
-    fetched = RowSetFactory.create(results, connection.getProtocol());
-    fetchedIterator = fetched.iterator();
-  }
-
-  protected TFetchResultsReq getFetchResultsReq(TFetchOrientation orientation) {
-    return new TFetchResultsReq(opHandle, orientation, FETCH_SIZE);
-  }
-
-  public ArrayList<ColumnDescription> getSchema() throws HiveClientException {
-    if (this.schema == null) {
-      TGetResultSetMetadataResp fetchResp = new HiveCall<TGetResultSetMetadataResp>(connection) {
-        @Override
-        public TGetResultSetMetadataResp body() throws HiveClientException {
-
-          TGetResultSetMetadataReq fetchReq = new TGetResultSetMetadataReq(opHandle);
-          try {
-            return client.GetResultSetMetadata(fetchReq);
-          } catch (TException e) {
-            throw new HiveClientException("H180 Unable to fetch results metadata", e);
-          }
-
-        }
-      }.call();
-      Utils.verifySuccess(fetchResp.getStatus(), "H190 Unable to fetch results metadata");
-      TTableSchema schema = fetchResp.getSchema();
-
-      List<TColumnDesc> thriftColumns = schema.getColumns();
-      ArrayList<ColumnDescription> columnDescriptions = new ArrayList<ColumnDescription>(thriftColumns.size());
-
-      for (TColumnDesc columnDesc : thriftColumns) {
-        String name = columnDesc.getColumnName();
-        String type = TYPE_NAMES.get(columnDesc.getTypeDesc().getTypes().get(0).getPrimitiveEntry().getType());
-        int position = columnDesc.getPosition();
-        columnDescriptions.add(ColumnDescriptionShort.createShortColumnDescription(name, type, position));
-      }
-      if (selectedColumns.size() == 0)
-        this.schema = columnDescriptions;
-      else {
-        ArrayList<ColumnDescription> selectedColumnsSchema = new ArrayList<ColumnDescription>();
-        for (Integer selectedIndex : selectedColumns) {
-          selectedColumnsSchema.add(columnDescriptions.get(selectedIndex));
-        }
-        this.schema = selectedColumnsSchema;
-      }
-    }
-    return this.schema;
-  }
-
-  /**
-   * Get list with all values in one column
-   * @param column column index
-   * @return list of objects in column
-   */
-  public <T> List<T> getValuesInColumn(int column) {
-    LinkedList<T> list = new LinkedList<T>();
-    for (Row row : this) {
-      list.add((T) row.getRow()[column]);
-    }
-    return list;
-  }
-
-  /**
-   * Get logs Result object
-   * @return Result object configured to fetch logs
-   */
-  public Cursor getLogs() {
-    return new LogsCursor(connection, opHandle);
-  }
-
-  public void reset() {
-    fetchedIterator = null;
-    fetched = null;
-    resetCursor = true;
-    offset = 0;
-  }
-
-  @Override
-  public boolean hasNext() {
-    fetchIfNeeded();
-    return fetchedIterator.hasNext();
-  }
-
-  private void fetchIfNeeded() {
-    if (fetchedIterator == null || !fetchedIterator.hasNext()) {
-      try {
-        fetchNextBlock();
-      } catch (HiveClientException e) {
-        throw new HiveClientFormattedException(e);
-      }
-    }
-  }
-
-  @Override
-  public Row next() {
-    if (!hasNext())
-      throw new NoSuchElementException();
-    Row row = new Row(fetchedIterator.next(), selectedColumns);
-    offset ++;
-    return row;
-  }
-
-  @Override
-  public void remove() {
-    throw new NotImplementedException();
-  }
-
-  @Override
-  public Iterator<Row> iterator() {
-    return this;
-  }
-
-//  public int size() {
-//    fetchIfNeeded();
-//    return fetched.numRows();
-//  }
-  public long getOffset() {
-    return offset;
-  }
-
-  public int read(ArrayList<Row> rows, int count) {
-    int read = 0;
-    while(read < count && hasNext()) {
-      rows.add(next());
-      read ++;
-    }
-    return read;
-  }
-
-  public Row getHeadersRow() throws HiveClientException {
-    ArrayList<ColumnDescription> schema = getSchema();
-
-    Object[] row = new Object[schema.size()];
-    for (ColumnDescription columnDescription : schema) {
-      row[columnDescription.getPosition()-1] = columnDescription.getName();
-    }
-    return new Row(row, selectedColumns);
-  }
-
-  public int readRaw(ArrayList<Object[]> rows, int count) {
-    int read = 0;
-    while(read < count && hasNext()) {
-      rows.add(next().getRow());
-      read ++;
-    }
-    return read;
-  }
-
-  public void selectColumns(String columnsRequested) {
-    selectedColumns.clear();
-    if (columnsRequested != null) {
-      for (String columnRequested : columnsRequested.split(",")) {
-        try {
-          selectedColumns.add(Integer.parseInt(columnRequested));
-        } catch (NumberFormatException ex) {
-          throw new BadRequestFormattedException("Columns param should be comma-separated integers", ex);
-        }
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/DDLDelegator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/DDLDelegator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/DDLDelegator.java
deleted file mode 100644
index e609978..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/DDLDelegator.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-import org.apache.hive.service.cli.thrift.*;
-import org.apache.thrift.TException;
-
-import java.util.LinkedList;
-import java.util.List;
-
-public class DDLDelegator {
-  private Connection connection;
-
-  public DDLDelegator(Connection connection) {
-    this.connection = connection;
-  }
-
-  /**
-   * Retrieve list of tables in DB
-   * @param db db name
-   * @return list of table names
-   * @throws HiveClientException
-   */
-  public List<String> getTableList(TSessionHandle session, String db, String like) throws HiveClientException {
-    Cursor cursor = getTableListCursor(session, db, like);
-    return cursor.getValuesInColumn(0);
-  }
-
-  /**
-   * Retrieve list of tables in DB results set
-   * @param db db name
-   * @return list of table names
-   * @throws HiveClientException
-   */
-   public Cursor getTableListCursor(TSessionHandle session, String db, String like) throws HiveClientException {
-    connection.executeSync(session, String.format("use %s", db));
-    TOperationHandle handle = connection.executeSync(session, String.format("show tables like '%s'", like));
-
-    return new Cursor(connection, handle);
-  }
-
-  /**
-   * Retrieve databases
-   * @param like '*' for all
-   * @return list of databases
-   * @throws HiveClientException
-   */
-  public List<String> getDBList(TSessionHandle session, String like) throws HiveClientException {
-    Cursor cursor = getDBListCursor(session, like);
-    return cursor.getValuesInColumn(0);
-  }
-
-  /**
-   * Retrieve databases results set
-   * @param like '*' for all
-   * @return list of databases
-   * @throws HiveClientException
-   */
-  public Cursor getDBListCursor(TSessionHandle session, String like) throws HiveClientException {
-    TOperationHandle handle = connection.executeSync(session, String.format("show databases like '%s'", like));
-    return new Cursor(connection, handle);
-  }
-
-  /**
-   * Retrieve table schema
-   * @param db database name
-   * @param table table name
-   * @return schema
-   * @throws HiveClientException
-   */
-  public List<ColumnDescription> getTableDescription(TSessionHandle session, final String db, final String table, String like, boolean extended) throws HiveClientException {
-    List<ColumnDescription> columnDescriptions = new LinkedList<ColumnDescription>();
-    Cursor cursor = getTableDescriptionCursor(session, db, table, like);
-    for(Row row : cursor) {
-      Object[] rowObjects = row.getRow();
-
-      ColumnDescription columnDescription;
-      if (extended) {
-        //TODO: retrieve sortedBy, clusteredBy, partitioned
-        columnDescription = ColumnDescriptionExtended.createExtendedColumnDescription(
-            (String) rowObjects[3], (String) rowObjects[5], (String) rowObjects[11],
-            false, false, false, (Integer) rowObjects[16]);
-      } else {
-        columnDescription = ColumnDescriptionShort.createShortColumnDescription(
-            (String) rowObjects[3], (String) rowObjects[5], (Integer) rowObjects[16]);
-      }
-      columnDescriptions.add(columnDescription);
-    }
-    return columnDescriptions;
-  }
-
-  /**
-   * Retrieve table schema results set
-   * @param db database name
-   * @param table table name
-   * @return schema
-   * @throws HiveClientException
-   */
-  public Cursor getTableDescriptionCursor(final TSessionHandle session, final String db, final String table, String like) throws HiveClientException {
-    if (like == null)
-      like = ".*";
-    else
-      like = ".*" + like + ".*";
-    final String finalLike = like;
-    TGetColumnsResp resp = new HiveCall<TGetColumnsResp>(connection,session) {
-      @Override
-      public TGetColumnsResp body() throws HiveClientException {
-
-        TGetColumnsReq req = new TGetColumnsReq(session);
-        req.setSchemaName(db);
-        req.setTableName(table);
-        req.setColumnName(finalLike);
-        try {
-          return connection.getClient().GetColumns(req);
-        } catch (TException e) {
-          throw new HiveClientException("H200 Unable to get table columns", e);
-        }
-      }
-
-    }.call();
-
-    return new Cursor(connection, resp.getOperationHandle());
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveAuthCredentials.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveAuthCredentials.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveAuthCredentials.java
deleted file mode 100644
index 2b3f43b..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveAuthCredentials.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-public class HiveAuthCredentials {
-  private String password;
-
-  public String getPassword() {
-    return password;
-  }
-
-  public void setPassword(String password) {
-    this.password = password;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveAuthRequiredException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveAuthRequiredException.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveAuthRequiredException.java
deleted file mode 100644
index ac15f2f..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveAuthRequiredException.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-import org.apache.ambari.view.hive.utils.ServiceFormattedException;
-
-public class HiveAuthRequiredException extends ServiceFormattedException {
-  public HiveAuthRequiredException() {
-    super("Hive Password Required", null, 401);
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveCall.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveCall.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveCall.java
deleted file mode 100644
index d2a459f..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveCall.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-import org.apache.commons.lang.exception.ExceptionUtils;
-import org.apache.hive.service.cli.thrift.TSessionHandle;
-import org.apache.hive.service.cli.thrift.TStatus;
-import org.apache.hive.service.cli.thrift.TStatusCode;
-import org.apache.thrift.transport.TTransportException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
-
-public abstract class HiveCall <T> {
-  private final static Logger LOG =
-      LoggerFactory.getLogger(HiveCall.class);
-
-  protected final Connection conn;
-  protected final TSessionHandle sessionHandle;
-
-  public HiveCall(Connection connection) {
-    this(connection,null);
-  }
-
-  public HiveCall(Connection connection, TSessionHandle sessionHandle) {
-    this.conn = connection;
-    this.sessionHandle = sessionHandle;
-  }
-
-  public abstract T body() throws HiveClientException;
-
-  public boolean validateSession(T t) throws HiveClientException {
-    //invalidate a session
-    try {
-      Method m = t.getClass().getMethod("getStatus");
-      if (m != null) {
-        TStatus status = (TStatus) m.invoke(t);
-        if (status.getStatusCode().equals(TStatusCode.ERROR_STATUS) &&
-          status.getErrorMessage().startsWith("Invalid SessionHandle: SessionHandle")) {
-          try {
-            conn.invalidateSessionBySessionHandle(sessionHandle);
-          } catch (HiveClientException e) {
-            LOG.error(e.getMessage(),e);
-          }
-          throw new HiveClientException("Please Retry." + status.getErrorMessage(), null);
-          //return false;
-        }
-      }
-    } catch (NoSuchMethodException e) {
-
-    } catch (InvocationTargetException e) {
-
-    } catch (IllegalAccessException e) {
-
-    }
-    return true;
-  }
-
-  public T call() throws HiveClientException {
-    T result = null;
-    boolean needRetry = false;
-    int attempts = 0;
-    do {
-      if (needRetry) {
-        needRetry = false;
-        attempts += 1;
-        try {
-          conn.closeConnection();
-        } catch (Exception e) {
-          LOG.error("Connection closed with error", e);
-        }
-      }
-
-      if (conn.getClient() == null) {
-        // previous attempt closed the connection, but new was failed to be established.
-        // on new call trying to open the connection again.
-        conn.openConnection();
-      }
-
-      try {
-
-        synchronized (conn) {
-          result = body();
-          if(sessionHandle !=null) {
-            this.validateSession(result);
-          }
-        }
-
-      } catch (HiveClientException ex) {
-        Throwable root = ExceptionUtils.getRootCause(ex);
-        if (attempts < 2 && root instanceof TTransportException) {
-          needRetry = true;
-          LOG.error("Retry call because of Transport Exception: " + root.toString());
-          continue;
-        }
-        throw ex;
-      }
-    } while (needRetry);
-    return result;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientAuthRequiredException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientAuthRequiredException.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientAuthRequiredException.java
deleted file mode 100644
index 955bdf9..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientAuthRequiredException.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-public class HiveClientAuthRequiredException extends Exception {
-  public HiveClientAuthRequiredException(String comment, Exception ex) {
-    super(comment + ((ex == null)?"":(": " + ex.toString())), ex);
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientException.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientException.java
deleted file mode 100644
index 9dd04de..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientException.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-public class HiveClientException extends Exception {
-  public HiveClientException(String comment, Exception ex) {
-    super(comment + ((ex == null)?"":(": " + ex.toString())), ex);
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientRuntimeException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientRuntimeException.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientRuntimeException.java
deleted file mode 100644
index 1393012..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientRuntimeException.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-public class HiveClientRuntimeException extends RuntimeException {
-  public HiveClientRuntimeException(String comment, Exception ex) {
-    super(comment + ((ex == null)?"":(": " + ex.toString())), ex);
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveErrorStatusException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveErrorStatusException.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveErrorStatusException.java
deleted file mode 100644
index 1b306dc..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveErrorStatusException.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-import org.apache.hive.service.cli.thrift.TStatusCode;
-
-/**
- * Some thrift operation done with status 'failed'
- */
-public class HiveErrorStatusException extends HiveClientException {
-  public HiveErrorStatusException(TStatusCode statusCode, String comment) {
-    super(String.format("%s [%s]", comment, statusCode), null);
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveInvalidQueryException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveInvalidQueryException.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveInvalidQueryException.java
deleted file mode 100644
index 473ab65..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveInvalidQueryException.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-import org.apache.hive.service.cli.thrift.TStatusCode;
-
-public class HiveInvalidQueryException extends HiveClientException {
-  public HiveInvalidQueryException(TStatusCode statusCode, String comment) {
-    super(String.format("%s [%s]", comment, statusCode), null);
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpBasicAuthInterceptor.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpBasicAuthInterceptor.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpBasicAuthInterceptor.java
deleted file mode 100644
index dea8fcb..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpBasicAuthInterceptor.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-import org.apache.http.Header;
-import org.apache.http.HttpRequest;
-import org.apache.http.auth.UsernamePasswordCredentials;
-import org.apache.http.client.CookieStore;
-import org.apache.http.impl.auth.AuthSchemeBase;
-import org.apache.http.impl.auth.BasicScheme;
-import org.apache.http.protocol.HttpContext;
-
-import java.util.Map;
-
-/**
- * The class is instantiated with the username and password, it is then
- * used to add header with these credentials to HTTP requests
- *
- */
-public class HttpBasicAuthInterceptor extends HttpRequestInterceptorBase {
-  UsernamePasswordCredentials credentials;
-  AuthSchemeBase authScheme;
-
-  public HttpBasicAuthInterceptor(String username, String password, CookieStore cookieStore,
-                           String cn, boolean isSSL, Map<String, String> additionalHeaders) {
-    super(cookieStore, cn, isSSL, additionalHeaders);
-    this.authScheme = new BasicScheme();
-    if (username != null){
-      this.credentials = new UsernamePasswordCredentials(username, password);
-    }
-  }
-
-  @Override
-  protected void addHttpAuthHeader(HttpRequest httpRequest, HttpContext httpContext)
-    throws Exception {
-    Header basicAuthHeader = authScheme.authenticate(credentials, httpRequest, httpContext);
-    httpRequest.addHeader(basicAuthHeader);
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpKerberosRequestInterceptor.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpKerberosRequestInterceptor.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpKerberosRequestInterceptor.java
deleted file mode 100644
index 786c94d..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpKerberosRequestInterceptor.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-import org.apache.hive.service.auth.HttpAuthUtils;
-import org.apache.http.HttpException;
-import org.apache.http.HttpRequest;
-import org.apache.http.client.CookieStore;
-import org.apache.http.protocol.HttpContext;
-
-import java.util.Map;
-import java.util.concurrent.locks.ReentrantLock;
-
-/**
- * Authentication interceptor which adds Base64 encoded payload,
- * containing the username and kerberos service ticket,
- * to the outgoing http request header.
- */
-public class HttpKerberosRequestInterceptor extends HttpRequestInterceptorBase {
-
-  // A fair reentrant lock
-  private static ReentrantLock kerberosLock = new ReentrantLock(true);
-  String principal;
-  String host;
-  String serverHttpUrl;
-  boolean assumeSubject;
-
-  public HttpKerberosRequestInterceptor(String principal, String host,
-      String serverHttpUrl, boolean assumeSubject, CookieStore cs, String cn,
-      boolean isSSL, Map<String, String> additionalHeaders) {
-    super(cs, cn, isSSL, additionalHeaders);
-    this.principal = principal;
-    this.host = host;
-    this.serverHttpUrl = serverHttpUrl;
-    this.assumeSubject = assumeSubject;
-  }
-
-  @Override
-  protected void addHttpAuthHeader(HttpRequest httpRequest,
-    HttpContext httpContext) throws Exception {
-	try {
-      // Generate the service ticket for sending to the server.
-      // Locking ensures the tokens are unique in case of concurrent requests
-      kerberosLock.lock();
-      String kerberosAuthHeader = HttpAuthUtils.getKerberosServiceTicket(
-              principal, host, serverHttpUrl, assumeSubject);
-      // Set the session key token (Base64 encoded) in the headers
-      httpRequest.addHeader(HttpAuthUtils.AUTHORIZATION + ": " +
-        HttpAuthUtils.NEGOTIATE + " ", kerberosAuthHeader);
-    } catch (Exception e) {
-      throw new HttpException(e.getMessage(), e);
-    } finally {
-      kerberosLock.unlock();
-    }
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpRequestInterceptorBase.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpRequestInterceptorBase.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpRequestInterceptorBase.java
deleted file mode 100644
index 7dc3c53..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpRequestInterceptorBase.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-import org.apache.http.HttpException;
-import org.apache.http.HttpRequest;
-import org.apache.http.HttpRequestInterceptor;
-import org.apache.http.client.CookieStore;
-import org.apache.http.client.protocol.ClientContext;
-import org.apache.http.protocol.HttpContext;
-
-import java.io.IOException;
-import java.util.Map;
-
-public abstract class HttpRequestInterceptorBase implements HttpRequestInterceptor {
-  CookieStore cookieStore;
-  boolean isCookieEnabled;
-  String cookieName;
-  boolean isSSL;
-  Map<String, String> additionalHeaders;
-
-  public HttpRequestInterceptorBase(CookieStore cs, String cn, boolean isSSL,
-                                    Map<String, String> additionalHeaders) {
-    this.cookieStore = cs;
-    this.isCookieEnabled = (cs != null);
-    this.cookieName = cn;
-    this.isSSL = isSSL;
-    this.additionalHeaders = additionalHeaders;
-  }
-
-  // Abstract function to add HttpAuth Header
-  protected abstract void addHttpAuthHeader(HttpRequest httpRequest, HttpContext httpContext)
-    throws Exception;
-
-  @Override
-  public void process(HttpRequest httpRequest, HttpContext httpContext)
-    throws HttpException, IOException {
-    try {
-      // If cookie based authentication is allowed, generate ticket only when necessary.
-      // The necessary condition is either when there are no server side cookies in the
-      // cookiestore which can be send back or when the server returns a 401 error code
-      // indicating that the previous cookie has expired.
-      if (isCookieEnabled) {
-        httpContext.setAttribute(ClientContext.COOKIE_STORE, cookieStore);
-      }
-      // Generate the kerberos ticket under the following scenarios:
-      // 1. Cookie Authentication is disabled OR
-      // 2. The first time when the request is sent OR
-      // 3. The server returns a 401, which sometimes means the cookie has expired
-      // 4. The cookie is secured where as the client connect does not use SSL
-      if (!isCookieEnabled || ((httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) == null &&
-          (cookieStore == null || (cookieStore != null &&
-          Utils.needToSendCredentials(cookieStore, cookieName, isSSL)))) ||
-          (httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) != null &&
-          httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY).
-          equals(Utils.HIVE_SERVER2_RETRY_TRUE)))) {
-        addHttpAuthHeader(httpRequest, httpContext);
-      }
-      if (isCookieEnabled) {
-        httpContext.setAttribute(Utils.HIVE_SERVER2_RETRY_KEY, Utils.HIVE_SERVER2_RETRY_FALSE);
-      }
-      // Insert the additional http headers
-      if (additionalHeaders != null) {
-        for (Map.Entry<String, String> entry : additionalHeaders.entrySet()) {
-          httpRequest.addHeader(entry.getKey(), entry.getValue());
-        }
-      }
-    } catch (Exception e) {
-      throw new HttpException(e.getMessage(), e);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/LogsCursor.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/LogsCursor.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/LogsCursor.java
deleted file mode 100644
index a6705e4..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/LogsCursor.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-import org.apache.hive.service.cli.thrift.TFetchOrientation;
-import org.apache.hive.service.cli.thrift.TFetchResultsReq;
-import org.apache.hive.service.cli.thrift.TOperationHandle;
-
-public class LogsCursor extends Cursor {
-  public LogsCursor(Connection connection, TOperationHandle opHandle) {
-    super(connection, opHandle);
-  }
-
-  @Override
-  protected TFetchResultsReq getFetchResultsReq(TFetchOrientation orientation) {
-    TFetchResultsReq req = super.getFetchResultsReq(orientation);
-    req.setFetchType((short) 1);
-    return req;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Row.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Row.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Row.java
deleted file mode 100644
index cfce1f0..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Row.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-import java.util.Arrays;
-import java.util.HashSet;
-
-public class Row {
-  private Object[] row;
-
-  public Row(Object[] row) {
-    this(row, null);
-  }
-
-  public Row(Object[] row, HashSet<Integer> selectedColumns) {
-    if (selectedColumns == null || selectedColumns.size() == 0)
-      this.row = row.clone();
-    else {
-      this.row = new Object[selectedColumns.size()];
-      int rowIndex = 0;
-      for (Integer selectedIndex : selectedColumns) {
-        this.row[rowIndex] = row[selectedIndex];
-        rowIndex ++;
-      }
-    }
-  }
-
-  public Object[] getRow() {
-    return row;
-  }
-
-  public void setRow(Object[] row) {
-    this.row = row;
-  }
-
-  @Override
-  public boolean equals(Object o) {
-    if (this == o) return true;
-    if (o == null || getClass() != o.getClass()) return false;
-
-    Row row1 = (Row) o;
-
-    boolean retValue = Arrays.equals(row, row1.row);
-    return retValue;
-  }
-
-  @Override
-  public int hashCode() {
-    return Arrays.hashCode(row);
-  }
-
-  @Override
-  public String toString() {
-    return "Row{" +
-            "row=" + Arrays.toString(row) +
-            '}';
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/UserLocalConnection.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/UserLocalConnection.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/UserLocalConnection.java
deleted file mode 100644
index 3e2c3cc..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/UserLocalConnection.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.utils.UserLocal;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class UserLocalConnection extends UserLocal<Connection> {
-  protected final static Logger LOG =
-      LoggerFactory.getLogger(UserLocalConnection.class);
-
-  public UserLocalConnection() {
-    super(Connection.class);
-  }
-
-  private UserLocal<HiveAuthCredentials> authCredentialsLocal =
-      new UserLocalHiveAuthCredentials();
-
-  @Override
-  protected Connection initialValue(ViewContext context) {
-      LOG.debug("creating connection for context : {}" , context);
-      ConnectionFactory hiveConnectionFactory = new ConnectionFactory(context, authCredentialsLocal.get(context));
-      authCredentialsLocal.remove(context);  // we should not store credentials in memory,
-      // password is erased after connection established
-      Connection connection = hiveConnectionFactory.create();
-    LOG.debug("returning connection : {} for context : {} ", connection, context);
-      return connection;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/UserLocalHiveAuthCredentials.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/UserLocalHiveAuthCredentials.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/UserLocalHiveAuthCredentials.java
deleted file mode 100644
index f658c14..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/UserLocalHiveAuthCredentials.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.utils.UserLocal;
-
-public class UserLocalHiveAuthCredentials extends UserLocal<HiveAuthCredentials> {
-  public UserLocalHiveAuthCredentials() {
-    super(HiveAuthCredentials.class);
-  }
-
-  @Override
-  protected HiveAuthCredentials initialValue(ViewContext context) {
-    return new HiveAuthCredentials();
-  }
-}