You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kyuubi.apache.org by ch...@apache.org on 2022/07/13 02:19:07 UTC

[incubator-kyuubi] branch master updated: [KYUUBI #3043] Clean up Kyuubi Hive JDBC client

This is an automated email from the ASF dual-hosted git repository.

chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new b99f25f2f [KYUUBI #3043] Clean up Kyuubi Hive JDBC client
b99f25f2f is described below

commit b99f25f2f4ffa3315662ae324f7e0c6a37475cb9
Author: Cheng Pan <ch...@apache.org>
AuthorDate: Wed Jul 13 10:18:54 2022 +0800

    [KYUUBI #3043] Clean up Kyuubi Hive JDBC client
    
    ### _Why are the changes needed?_
    
    Code clean up
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [x] [Run test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests) locally before make a pull request
    
    Closes #3043 from pan3793/jdbc-cleanup.
    
    Closes #3043
    
    1d1f8fa5 [Cheng Pan] Clean up Kyuubi Hive JDBC client
    
    Authored-by: Cheng Pan <ch...@apache.org>
    Signed-off-by: Cheng Pan <ch...@apache.org>
---
 .../org/apache/kyuubi/jdbc/KyuubiHiveDriver.java   |   6 +-
 .../jdbc/hive/HttpKerberosRequestInterceptor.java  |  10 +-
 .../jdbc/hive/HttpRequestInterceptorBase.java      |  23 +-
 .../kyuubi/jdbc/hive/HttpTokenAuthInterceptor.java |   2 +-
 .../org/apache/kyuubi/jdbc/hive/JdbcColumn.java    |  36 +--
 .../org/apache/kyuubi/jdbc/hive/JdbcTable.java     |  12 +-
 .../apache/kyuubi/jdbc/hive/KyuubiConnection.java  | 158 ++++-------
 .../kyuubi/jdbc/hive/KyuubiDatabaseMetaData.java   |  13 +-
 .../kyuubi/jdbc/hive/KyuubiPreparedStatement.java  |  20 +-
 .../kyuubi/jdbc/hive/KyuubiQueryResultSet.java     |   9 +-
 .../java/org/apache/kyuubi/jdbc/hive/Utils.java    |  35 +--
 .../jdbc/hive/XsrfHttpRequestInterceptor.java      |   9 +-
 .../jdbc/hive/ZooKeeperHiveClientHelper.java       |  31 +--
 .../kyuubi/jdbc/hive/auth/HttpAuthUtils.java       |  26 +-
 .../kyuubi/jdbc/hive/auth/PlainSaslHelper.java     |   5 +-
 .../org/apache/kyuubi/jdbc/hive/auth/SaslQOP.java  |   3 +-
 .../kyuubi/jdbc/hive/cli/ColumnBasedSet.java       |  75 +----
 .../apache/kyuubi/jdbc/hive/cli/ColumnBuffer.java  | 306 +--------------------
 .../apache/kyuubi/jdbc/hive/cli/ColumnValue.java   | 180 ------------
 .../org/apache/kyuubi/jdbc/hive/cli/FetchType.java |   9 -
 .../apache/kyuubi/jdbc/hive/cli/RowBasedSet.java   |  46 +---
 .../org/apache/kyuubi/jdbc/hive/cli/RowSet.java    |  10 -
 .../kyuubi/jdbc/hive/cli/TypeDescriptor.java       | 172 ------------
 .../kyuubi/jdbc/hive/cli/TypeQualifiers.java       | 111 --------
 24 files changed, 134 insertions(+), 1173 deletions(-)

diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/KyuubiHiveDriver.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/KyuubiHiveDriver.java
index 4ee231f1d..cdb591677 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/KyuubiHiveDriver.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/KyuubiHiveDriver.java
@@ -48,11 +48,7 @@ public class KyuubiHiveDriver implements Driver {
 
   @Override
   public boolean acceptsURL(String url) throws SQLException {
-    return url != null
-        && Utils.URL_PREFIX_LIST.stream()
-            .filter(pre -> url.startsWith(pre))
-            .findFirst()
-            .isPresent();
+    return url != null && Utils.URL_PREFIX_LIST.stream().anyMatch(url::startsWith);
   }
 
   @Override
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/HttpKerberosRequestInterceptor.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/HttpKerberosRequestInterceptor.java
index c799fb343..13bbc551b 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/HttpKerberosRequestInterceptor.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/HttpKerberosRequestInterceptor.java
@@ -32,18 +32,15 @@ import org.apache.kyuubi.jdbc.hive.auth.HttpAuthUtils;
  */
 public class HttpKerberosRequestInterceptor extends HttpRequestInterceptorBase {
 
+  private static final ReentrantLock kerberosLock = new ReentrantLock(true);
+
   String principal;
   String host;
-  String serverHttpUrl;
   Subject loggedInSubject;
 
-  // A fair reentrant lock
-  private static ReentrantLock kerberosLock = new ReentrantLock(true);
-
   public HttpKerberosRequestInterceptor(
       String principal,
       String host,
-      String serverHttpUrl,
       Subject loggedInSubject,
       CookieStore cs,
       String cn,
@@ -53,7 +50,6 @@ public class HttpKerberosRequestInterceptor extends HttpRequestInterceptorBase {
     super(cs, cn, isSSL, additionalHeaders, customCookies);
     this.principal = principal;
     this.host = host;
-    this.serverHttpUrl = serverHttpUrl;
     this.loggedInSubject = loggedInSubject;
   }
 
@@ -65,7 +61,7 @@ public class HttpKerberosRequestInterceptor extends HttpRequestInterceptorBase {
       // Locking ensures the tokens are unique in case of concurrent requests
       kerberosLock.lock();
       String kerberosAuthHeader =
-          HttpAuthUtils.getKerberosServiceTicket(principal, host, serverHttpUrl, loggedInSubject);
+          HttpAuthUtils.getKerberosServiceTicket(principal, host, loggedInSubject);
       // Set the session key token (Base64 encoded) in the headers
       httpRequest.addHeader(
           HttpAuthUtils.AUTHORIZATION + ": " + HttpAuthUtils.NEGOTIATE + " ", kerberosAuthHeader);
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/HttpRequestInterceptorBase.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/HttpRequestInterceptorBase.java
index eb00e344e..b3a035025 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/HttpRequestInterceptorBase.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/HttpRequestInterceptorBase.java
@@ -24,7 +24,7 @@ import org.apache.http.HttpException;
 import org.apache.http.HttpRequest;
 import org.apache.http.HttpRequestInterceptor;
 import org.apache.http.client.CookieStore;
-import org.apache.http.client.protocol.ClientContext;
+import org.apache.http.client.protocol.HttpClientContext;
 import org.apache.http.protocol.HttpContext;
 
 public abstract class HttpRequestInterceptorBase implements HttpRequestInterceptor {
@@ -62,7 +62,7 @@ public abstract class HttpRequestInterceptorBase implements HttpRequestIntercept
       // cookiestore which can be send back or when the server returns a 401 error code
       // indicating that the previous cookie has expired.
       if (isCookieEnabled) {
-        httpContext.setAttribute(ClientContext.COOKIE_STORE, cookieStore);
+        httpContext.setAttribute(HttpClientContext.COOKIE_STORE, cookieStore);
       }
       // Generate the kerberos ticket under the following scenarios:
       // 1. Cookie Authentication is disabled OR
@@ -72,8 +72,7 @@ public abstract class HttpRequestInterceptorBase implements HttpRequestIntercept
       if (!isCookieEnabled
           || ((httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) == null
                   && (cookieStore == null
-                      || (cookieStore != null
-                          && Utils.needToSendCredentials(cookieStore, cookieName, isSSL))))
+                      || Utils.needToSendCredentials(cookieStore, cookieName, isSSL)))
               || (httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) != null
                   && httpContext
                       .getAttribute(Utils.HIVE_SERVER2_RETRY_KEY)
@@ -91,18 +90,22 @@ public abstract class HttpRequestInterceptorBase implements HttpRequestIntercept
       }
       // Add custom cookies if passed to the jdbc driver
       if (customCookies != null) {
-        String cookieHeaderKeyValues = "";
+        StringBuilder cookieHeaderKeyValues = new StringBuilder();
         Header cookieHeaderServer = httpRequest.getFirstHeader("Cookie");
         if ((cookieHeaderServer != null) && (cookieHeaderServer.getValue() != null)) {
-          cookieHeaderKeyValues = cookieHeaderServer.getValue();
+          cookieHeaderKeyValues = new StringBuilder(cookieHeaderServer.getValue());
         }
         for (Map.Entry<String, String> entry : customCookies.entrySet()) {
-          cookieHeaderKeyValues += ";" + entry.getKey() + "=" + entry.getValue();
+          cookieHeaderKeyValues
+              .append(";")
+              .append(entry.getKey())
+              .append("=")
+              .append(entry.getValue());
         }
-        if (cookieHeaderKeyValues.startsWith(";")) {
-          cookieHeaderKeyValues = cookieHeaderKeyValues.substring(1);
+        if (cookieHeaderKeyValues.toString().startsWith(";")) {
+          cookieHeaderKeyValues = new StringBuilder(cookieHeaderKeyValues.substring(1));
         }
-        httpRequest.addHeader("Cookie", cookieHeaderKeyValues);
+        httpRequest.addHeader("Cookie", cookieHeaderKeyValues.toString());
       }
     } catch (Exception e) {
       throw new HttpException(e.getMessage(), e);
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/HttpTokenAuthInterceptor.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/HttpTokenAuthInterceptor.java
index 2523c918c..2e6a45007 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/HttpTokenAuthInterceptor.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/HttpTokenAuthInterceptor.java
@@ -27,7 +27,7 @@ import org.apache.http.protocol.HttpContext;
  * these credentials to HTTP requests
  */
 public class HttpTokenAuthInterceptor extends HttpRequestInterceptorBase {
-  private String tokenStr;
+  private final String tokenStr;
   private static final String HIVE_DELEGATION_TOKEN_HEADER = "X-Hive-Delegation-Token";
 
   public HttpTokenAuthInterceptor(
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/JdbcColumn.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/JdbcColumn.java
index 6db89defa..c885eea30 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/JdbcColumn.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/JdbcColumn.java
@@ -68,6 +68,14 @@ public class JdbcColumn {
     return type;
   }
 
+  public String getComment() {
+    return comment;
+  }
+
+  public int getOrdinalPos() {
+    return ordinalPos;
+  }
+
   static String columnClassName(TTypeId tType, JdbcColumnAttributes columnAttributes)
       throws SQLException {
     int columnType = convertTTypeIdToSqlType(tType);
@@ -353,32 +361,4 @@ public class JdbcColumn {
         throw new KyuubiSQLException("Invalid column type: " + columnType);
     }
   }
-
-  public Integer getNumPrecRadix() {
-    if (type.equalsIgnoreCase("tinyint")) {
-      return 10;
-    } else if (type.equalsIgnoreCase("smallint")) {
-      return 10;
-    } else if (type.equalsIgnoreCase("int")) {
-      return 10;
-    } else if (type.equalsIgnoreCase("bigint")) {
-      return 10;
-    } else if (type.equalsIgnoreCase("float")) {
-      return 10;
-    } else if (type.equalsIgnoreCase("double")) {
-      return 10;
-    } else if (type.equalsIgnoreCase("decimal")) {
-      return 10;
-    } else { // anything else including boolean and string is null
-      return null;
-    }
-  }
-
-  public String getComment() {
-    return comment;
-  }
-
-  public int getOrdinalPos() {
-    return ordinalPos;
-  }
 }
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/JdbcTable.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/JdbcTable.java
index a0127cf21..efb976c44 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/JdbcTable.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/JdbcTable.java
@@ -17,14 +17,12 @@
 
 package org.apache.kyuubi.jdbc.hive;
 
-import java.sql.SQLException;
-
 /** Table metadata. */
 public class JdbcTable {
-  private String tableCatalog;
-  private String tableName;
-  private String type;
-  private String comment;
+  private final String tableCatalog;
+  private final String tableName;
+  private final String type;
+  private final String comment;
 
   public JdbcTable(String tableCatalog, String tableName, String type, String comment) {
     this.tableCatalog = tableCatalog;
@@ -45,7 +43,7 @@ public class JdbcTable {
     return type;
   }
 
-  public String getSqlTableType() throws SQLException {
+  public String getSqlTableType() {
     return KyuubiDatabaseMetaData.toJdbcTableType(type);
   }
 
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/KyuubiConnection.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/KyuubiConnection.java
index d2b1d05f9..8ebdcf82c 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/KyuubiConnection.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/KyuubiConnection.java
@@ -17,6 +17,8 @@
 
 package org.apache.kyuubi.jdbc.hive;
 
+import static org.apache.kyuubi.jdbc.hive.Utils.JdbcConnectionParams.*;
+
 import java.io.*;
 import java.lang.reflect.InvocationHandler;
 import java.lang.reflect.InvocationTargetException;
@@ -92,7 +94,6 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
   private JdbcConnectionParams connParams;
   private TTransport transport;
   private boolean assumeSubject;
-  // TODO should be replaced by CliServiceClient
   private TCLIService.Iface client;
   private boolean isClosed = true;
   private SQLWarning warningChain = null;
@@ -114,13 +115,7 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
 
   private boolean isBeeLineMode;
 
-  /**
-   * Get all direct HiveServer2 URLs from a ZooKeeper based HiveServer2 URL
-   *
-   * @param zookeeperBasedHS2Url
-   * @return
-   * @throws Exception
-   */
+  /** Get all direct HiveServer2 URLs from a ZooKeeper based HiveServer2 URL */
   public static List<JdbcConnectionParams> getAllUrls(String zookeeperBasedHS2Url)
       throws Exception {
     JdbcConnectionParams params = Utils.parseURL(zookeeperBasedHS2Url, new Properties());
@@ -151,14 +146,14 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
     port = connParams.getPort();
     sessConfMap = connParams.getSessionVars();
 
-    if (sessConfMap.containsKey(JdbcConnectionParams.FETCH_SIZE)) {
-      fetchSize = Integer.parseInt(sessConfMap.get(JdbcConnectionParams.FETCH_SIZE));
+    if (sessConfMap.containsKey(FETCH_SIZE)) {
+      fetchSize = Integer.parseInt(sessConfMap.get(FETCH_SIZE));
     }
-    if (sessConfMap.containsKey(JdbcConnectionParams.INIT_FILE)) {
-      initFile = sessConfMap.get(JdbcConnectionParams.INIT_FILE);
+    if (sessConfMap.containsKey(INIT_FILE)) {
+      initFile = sessConfMap.get(INIT_FILE);
     }
-    wmPool = sessConfMap.get(JdbcConnectionParams.WM_POOL);
-    for (String application : JdbcConnectionParams.APPLICATION) {
+    wmPool = sessConfMap.get(WM_POOL);
+    for (String application : APPLICATION) {
       wmApp = sessConfMap.get(application);
       if (wmApp != null) break;
     }
@@ -178,7 +173,7 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
 
     int maxRetries = 1;
     try {
-      String strRetries = sessConfMap.get(JdbcConnectionParams.RETRIES);
+      String strRetries = sessConfMap.get(RETRIES);
       if (StringUtils.isNotBlank(strRetries)) {
         maxRetries = Integer.parseInt(strRetries);
       }
@@ -391,8 +386,7 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
 
   private void openTransport() throws Exception {
     assumeSubject =
-        JdbcConnectionParams.AUTH_KERBEROS_AUTH_TYPE_FROM_SUBJECT.equals(
-            sessConfMap.get(JdbcConnectionParams.AUTH_KERBEROS_AUTH_TYPE));
+        AUTH_KERBEROS_AUTH_TYPE_FROM_SUBJECT.equals(sessConfMap.get(AUTH_KERBEROS_AUTH_TYPE));
     transport = isHttpTransportMode() ? createHttpTransport() : createBinaryTransport();
     if (!transport.isOpen()) {
       transport.open();
@@ -410,7 +404,7 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
     String schemeName = useSsl ? "https" : "http";
     // http path should begin with "/"
     String httpPath;
-    httpPath = sessConfMap.get(JdbcConnectionParams.HTTP_PATH);
+    httpPath = sessConfMap.get(HTTP_PATH);
     if (httpPath == null) {
       httpPath = "/";
     } else if (!httpPath.startsWith("/")) {
@@ -430,13 +424,12 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
 
   private CloseableHttpClient getHttpClient(Boolean useSsl) throws SQLException {
     boolean isCookieEnabled =
-        sessConfMap.get(JdbcConnectionParams.COOKIE_AUTH) == null
-            || (!JdbcConnectionParams.COOKIE_AUTH_FALSE.equalsIgnoreCase(
-                sessConfMap.get(JdbcConnectionParams.COOKIE_AUTH)));
+        sessConfMap.get(COOKIE_AUTH) == null
+            || (!COOKIE_AUTH_FALSE.equalsIgnoreCase(sessConfMap.get(COOKIE_AUTH)));
     String cookieName =
-        sessConfMap.get(JdbcConnectionParams.COOKIE_NAME) == null
-            ? JdbcConnectionParams.DEFAULT_COOKIE_NAMES_HS2
-            : sessConfMap.get(JdbcConnectionParams.COOKIE_NAME);
+        sessConfMap.get(COOKIE_NAME) == null
+            ? DEFAULT_COOKIE_NAMES_HS2
+            : sessConfMap.get(COOKIE_NAME);
     CookieStore cookieStore = isCookieEnabled ? new BasicCookieStore() : null;
     HttpClientBuilder httpClientBuilder;
     // Request interceptor for any request pre-processing logic
@@ -448,13 +441,11 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
     for (Map.Entry<String, String> entry : sessConfMap.entrySet()) {
       String key = entry.getKey();
 
-      if (key.startsWith(JdbcConnectionParams.HTTP_HEADER_PREFIX)) {
-        additionalHttpHeaders.put(
-            key.substring(JdbcConnectionParams.HTTP_HEADER_PREFIX.length()), entry.getValue());
+      if (key.startsWith(HTTP_HEADER_PREFIX)) {
+        additionalHttpHeaders.put(key.substring(HTTP_HEADER_PREFIX.length()), entry.getValue());
       }
-      if (key.startsWith(JdbcConnectionParams.HTTP_COOKIE_PREFIX)) {
-        customCookies.put(
-            key.substring(JdbcConnectionParams.HTTP_COOKIE_PREFIX.length()), entry.getValue());
+      if (key.startsWith(HTTP_COOKIE_PREFIX)) {
+        customCookies.put(key.substring(HTTP_COOKIE_PREFIX.length()), entry.getValue());
       }
     }
     // Configure http client for kerberos/password based authentication
@@ -468,16 +459,15 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
           throw new KyuubiSQLException("The Subject is not set");
         }
       }
-      /**
+      /*
        * Add an interceptor which sets the appropriate header in the request. It does the kerberos
        * authentication and get the final service ticket, for sending to the server before every
        * request. In https mode, the entire information is encrypted
        */
       requestInterceptor =
           new HttpKerberosRequestInterceptor(
-              sessConfMap.get(JdbcConnectionParams.AUTH_PRINCIPAL),
+              sessConfMap.get(AUTH_PRINCIPAL),
               host,
-              getServerHttpUrl(useSsl),
               loggedInSubject,
               cookieStore,
               cookieName,
@@ -492,7 +482,7 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
             new HttpTokenAuthInterceptor(
                 tokenStr, cookieStore, cookieName, useSsl, additionalHttpHeaders, customCookies);
       } else {
-        /**
+        /*
          * Add an interceptor to pass username/password in the header. In https mode, the entire
          * information is encrypted
          */
@@ -567,27 +557,27 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
 
     // Configure http client for SSL
     if (useSsl) {
-      String useTwoWaySSL = sessConfMap.get(JdbcConnectionParams.USE_TWO_WAY_SSL);
-      String sslTrustStorePath = sessConfMap.get(JdbcConnectionParams.SSL_TRUST_STORE);
-      String sslTrustStorePassword = sessConfMap.get(JdbcConnectionParams.SSL_TRUST_STORE_PASSWORD);
+      String useTwoWaySSL = sessConfMap.get(USE_TWO_WAY_SSL);
+      String sslTrustStorePath = sessConfMap.get(SSL_TRUST_STORE);
+      String sslTrustStorePassword = sessConfMap.get(SSL_TRUST_STORE_PASSWORD);
       KeyStore sslTrustStore;
       SSLConnectionSocketFactory socketFactory;
       SSLContext sslContext;
-      /**
+      /*
        * The code within the try block throws: SSLInitializationException, KeyStoreException,
        * IOException, NoSuchAlgorithmException, CertificateException, KeyManagementException &
        * UnrecoverableKeyException. We don't want the client to retry on any of these, hence we
        * catch all and throw a SQLException.
        */
       try {
-        if (useTwoWaySSL != null && useTwoWaySSL.equalsIgnoreCase(JdbcConnectionParams.TRUE)) {
+        if (useTwoWaySSL != null && useTwoWaySSL.equalsIgnoreCase(TRUE)) {
           socketFactory = getTwoWaySSLSocketFactory();
         } else if (sslTrustStorePath == null || sslTrustStorePath.isEmpty()) {
           // Create a default socket factory based on standard JSSE trust material
           socketFactory = SSLConnectionSocketFactory.getSocketFactory();
         } else {
           // Pick trust store config from the given path
-          sslTrustStore = KeyStore.getInstance(JdbcConnectionParams.SSL_TRUST_STORE_TYPE);
+          sslTrustStore = KeyStore.getInstance(SSL_TRUST_STORE_TYPE);
           try (FileInputStream fis = new FileInputStream(sslTrustStorePath)) {
             sslTrustStore.load(fis, sslTrustStorePassword.toCharArray());
           }
@@ -609,12 +599,7 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
     return httpClientBuilder.build();
   }
 
-  /**
-   * Create underlying SSL or non-SSL transport
-   *
-   * @return TTransport
-   * @throws TTransportException
-   */
+  /** Create underlying SSL or non-SSL transport */
   private TTransport createUnderlyingTransport() throws TTransportException {
     TTransport transport = null;
     // Note: Thrift returns an SSL socket that is already bound to the specified host:port
@@ -624,8 +609,8 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
     // if dynamic service discovery is configured.
     if (isSslConnection()) {
       // get SSL socket
-      String sslTrustStore = sessConfMap.get(JdbcConnectionParams.SSL_TRUST_STORE);
-      String sslTrustStorePassword = sessConfMap.get(JdbcConnectionParams.SSL_TRUST_STORE_PASSWORD);
+      String sslTrustStore = sessConfMap.get(SSL_TRUST_STORE);
+      String sslTrustStorePassword = sessConfMap.get(SSL_TRUST_STORE_PASSWORD);
 
       if (sslTrustStore == null || sslTrustStore.isEmpty()) {
         transport = HiveAuthUtils.getSSLSocket(host, port, loginTimeout);
@@ -653,19 +638,16 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
     try {
       TTransport socketTransport = createUnderlyingTransport();
       // handle secure connection if specified
-      if (!JdbcConnectionParams.AUTH_SIMPLE.equals(
-          sessConfMap.get(JdbcConnectionParams.AUTH_TYPE))) {
+      if (!AUTH_SIMPLE.equals(sessConfMap.get(AUTH_TYPE))) {
         // If Kerberos
         Map<String, String> saslProps = new HashMap<>();
         SaslQOP saslQOP = SaslQOP.AUTH;
-        if (sessConfMap.containsKey(JdbcConnectionParams.AUTH_QOP)) {
+        if (sessConfMap.containsKey(AUTH_QOP)) {
           try {
-            saslQOP = SaslQOP.fromString(sessConfMap.get(JdbcConnectionParams.AUTH_QOP));
+            saslQOP = SaslQOP.fromString(sessConfMap.get(AUTH_QOP));
           } catch (IllegalArgumentException e) {
             throw new KyuubiSQLException(
-                "Invalid " + JdbcConnectionParams.AUTH_QOP + " parameter. " + e.getMessage(),
-                "42000",
-                e);
+                "Invalid " + AUTH_QOP + " parameter. " + e.getMessage(), "42000", e);
           }
           saslProps.put(Sasl.QOP, saslQOP.toString());
         } else {
@@ -673,14 +655,10 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
           saslProps.put(Sasl.QOP, "auth-conf,auth-int,auth");
         }
         saslProps.put(Sasl.SERVER_AUTH, "true");
-        if (sessConfMap.containsKey(JdbcConnectionParams.AUTH_PRINCIPAL)) {
+        if (sessConfMap.containsKey(AUTH_PRINCIPAL)) {
           transport =
               KerberosSaslHelper.getKerberosTransport(
-                  sessConfMap.get(JdbcConnectionParams.AUTH_PRINCIPAL),
-                  host,
-                  socketTransport,
-                  saslProps,
-                  assumeSubject);
+                  sessConfMap.get(AUTH_PRINCIPAL), host, socketTransport, saslProps, assumeSubject);
         } else {
           // If there's a delegation token available then use token based connection
           String tokenStr = getClientDelegationToken(sessConfMap);
@@ -713,16 +691,14 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
 
     try {
       KeyManagerFactory keyManagerFactory =
-          KeyManagerFactory.getInstance(
-              JdbcConnectionParams.SUNX509_ALGORITHM_STRING,
-              JdbcConnectionParams.SUNJSSE_ALGORITHM_STRING);
-      String keyStorePath = sessConfMap.get(JdbcConnectionParams.SSL_KEY_STORE);
-      String keyStorePassword = sessConfMap.get(JdbcConnectionParams.SSL_KEY_STORE_PASSWORD);
-      KeyStore sslKeyStore = KeyStore.getInstance(JdbcConnectionParams.SSL_KEY_STORE_TYPE);
+          KeyManagerFactory.getInstance(SUNX509_ALGORITHM_STRING, SUNJSSE_ALGORITHM_STRING);
+      String keyStorePath = sessConfMap.get(SSL_KEY_STORE);
+      String keyStorePassword = sessConfMap.get(SSL_KEY_STORE_PASSWORD);
+      KeyStore sslKeyStore = KeyStore.getInstance(SSL_KEY_STORE_TYPE);
 
       if (keyStorePath == null || keyStorePath.isEmpty()) {
         throw new IllegalArgumentException(
-            JdbcConnectionParams.SSL_KEY_STORE
+            SSL_KEY_STORE
                 + " Not configured for 2 way SSL connection, keyStorePath param is empty");
       }
       try (FileInputStream fis = new FileInputStream(keyStorePath)) {
@@ -731,14 +707,14 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
       keyManagerFactory.init(sslKeyStore, keyStorePassword.toCharArray());
 
       TrustManagerFactory trustManagerFactory =
-          TrustManagerFactory.getInstance(JdbcConnectionParams.SUNX509_ALGORITHM_STRING);
-      String trustStorePath = sessConfMap.get(JdbcConnectionParams.SSL_TRUST_STORE);
-      String trustStorePassword = sessConfMap.get(JdbcConnectionParams.SSL_TRUST_STORE_PASSWORD);
-      KeyStore sslTrustStore = KeyStore.getInstance(JdbcConnectionParams.SSL_TRUST_STORE_TYPE);
+          TrustManagerFactory.getInstance(SUNX509_ALGORITHM_STRING);
+      String trustStorePath = sessConfMap.get(SSL_TRUST_STORE);
+      String trustStorePassword = sessConfMap.get(SSL_TRUST_STORE_PASSWORD);
+      KeyStore sslTrustStore = KeyStore.getInstance(SSL_TRUST_STORE_TYPE);
 
       if (trustStorePath == null || trustStorePath.isEmpty()) {
         throw new IllegalArgumentException(
-            JdbcConnectionParams.SSL_TRUST_STORE + " Not configured for 2 way SSL connection");
+            SSL_TRUST_STORE + " Not configured for 2 way SSL connection");
       }
       try (FileInputStream fis = new FileInputStream(trustStorePath)) {
         sslTrustStore.load(fis, trustStorePassword.toCharArray());
@@ -759,8 +735,7 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
   // Lookup the delegation token. First in the connection URL, then Configuration
   private String getClientDelegationToken(Map<String, String> jdbcConnConf) throws SQLException {
     String tokenStr = null;
-    if (JdbcConnectionParams.AUTH_TOKEN.equalsIgnoreCase(
-        jdbcConnConf.get(JdbcConnectionParams.AUTH_TYPE))) {
+    if (AUTH_TOKEN.equalsIgnoreCase(jdbcConnConf.get(AUTH_TYPE))) {
       // check delegation token in job conf if any
       try {
         tokenStr = SessionUtils.getTokenStrForm(HS2_CLIENT_TOKEN);
@@ -804,9 +779,9 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
     openReq.setConfiguration(openConf);
 
     // Store the user name in the open request in case no non-sasl authentication
-    if (JdbcConnectionParams.AUTH_SIMPLE.equals(sessConfMap.get(JdbcConnectionParams.AUTH_TYPE))) {
-      openReq.setUsername(sessConfMap.get(JdbcConnectionParams.AUTH_USER));
-      openReq.setPassword(sessConfMap.get(JdbcConnectionParams.AUTH_PASSWD));
+    if (AUTH_SIMPLE.equals(sessConfMap.get(AUTH_TYPE))) {
+      openReq.setUsername(sessConfMap.get(AUTH_USER));
+      openReq.setPassword(sessConfMap.get(AUTH_PASSWD));
     }
 
     try {
@@ -857,29 +832,26 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
 
   /** @return username from sessConfMap */
   private String getUserName() {
-    return getSessionValue(JdbcConnectionParams.AUTH_USER, JdbcConnectionParams.ANONYMOUS_USER);
+    return getSessionValue(AUTH_USER, ANONYMOUS_USER);
   }
 
   /** @return password from sessConfMap */
   private String getPassword() {
-    return getSessionValue(JdbcConnectionParams.AUTH_PASSWD, JdbcConnectionParams.ANONYMOUS_PASSWD);
+    return getSessionValue(AUTH_PASSWD, ANONYMOUS_PASSWD);
   }
 
   private boolean isSslConnection() {
-    return "true".equalsIgnoreCase(sessConfMap.get(JdbcConnectionParams.USE_SSL));
+    return "true".equalsIgnoreCase(sessConfMap.get(USE_SSL));
   }
 
   private boolean isKerberosAuthMode() {
-    return !JdbcConnectionParams.AUTH_SIMPLE.equals(sessConfMap.get(JdbcConnectionParams.AUTH_TYPE))
-        && sessConfMap.containsKey(JdbcConnectionParams.AUTH_PRINCIPAL);
+    return !AUTH_SIMPLE.equals(sessConfMap.get(AUTH_TYPE))
+        && sessConfMap.containsKey(AUTH_PRINCIPAL);
   }
 
   private boolean isHttpTransportMode() {
-    String transportMode = sessConfMap.get(JdbcConnectionParams.TRANSPORT_MODE);
-    if (transportMode != null && (transportMode.equalsIgnoreCase("http"))) {
-      return true;
-    }
-    return false;
+    String transportMode = sessConfMap.get(TRANSPORT_MODE);
+    return transportMode != null && (transportMode.equalsIgnoreCase("http"));
   }
 
   private void logZkDiscoveryMessage(String message) {
@@ -888,13 +860,7 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
     }
   }
 
-  /**
-   * Lookup varName in sessConfMap, if its null or empty return the default value varDefault
-   *
-   * @param varName
-   * @param varDefault
-   * @return
-   */
+  /** Lookup varName in sessConfMap, if its null or empty return the default value varDefault */
   private String getSessionValue(String varName, String varDefault) {
     String varValue = sessConfMap.get(varName);
     if ((varValue == null) || varValue.isEmpty()) {
@@ -935,7 +901,6 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
     try {
       TCancelDelegationTokenResp cancelResp = client.CancelDelegationToken(cancelReq);
       Utils.verifySuccess(cancelResp.getStatus());
-      return;
     } catch (TException e) {
       throw new KyuubiSQLException("Could not cancel token: " + e.getMessage(), " 08S01", e);
     }
@@ -949,7 +914,6 @@ public class KyuubiConnection implements SQLConnection, KyuubiLoggable {
     try {
       TRenewDelegationTokenResp renewResp = client.RenewDelegationToken(cancelReq);
       Utils.verifySuccess(renewResp.getStatus());
-      return;
     } catch (TException e) {
       throw new KyuubiSQLException("Could not renew token: " + e.getMessage(), " 08S01", e);
     }
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/KyuubiDatabaseMetaData.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/KyuubiDatabaseMetaData.java
index 4d3875cea..f5e29f8e7 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/KyuubiDatabaseMetaData.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/KyuubiDatabaseMetaData.java
@@ -696,12 +696,7 @@ public class KyuubiDatabaseMetaData implements SQLDatabaseMetaData {
     }
   }
 
-  /**
-   * Translate hive table types into jdbc table types.
-   *
-   * @param hivetabletype
-   * @return the type of the table
-   */
+  /** Translate hive table types into jdbc table types. */
   public static String toJdbcTableType(String hivetabletype) {
     if (hivetabletype == null) {
       return null;
@@ -916,12 +911,6 @@ public class KyuubiDatabaseMetaData implements SQLDatabaseMetaData {
     return true;
   }
 
-  public static void main(String[] args) throws SQLException {
-    KyuubiDatabaseMetaData meta = new KyuubiDatabaseMetaData(null, null, null);
-    System.out.println("DriverName: " + meta.getDriverName());
-    System.out.println("DriverVersion: " + meta.getDriverVersion());
-  }
-
   private TGetInfoResp getServerInfo(TGetInfoType type) throws SQLException {
     TGetInfoReq req = new TGetInfoReq(sessHandle, type);
     TGetInfoResp resp;
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/KyuubiPreparedStatement.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/KyuubiPreparedStatement.java
index 991edc550..43c2a030b 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/KyuubiPreparedStatement.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/KyuubiPreparedStatement.java
@@ -60,19 +60,13 @@ public class KyuubiPreparedStatement extends KyuubiStatement implements SQLPrepa
    *
    * @return boolean Returns true if a resultSet is created, false if not. Note: If the result set
    *     is empty a true is returned.
-   * @throws SQLException
    */
   @Override
   public boolean execute() throws SQLException {
     return super.execute(updateSql(sql, parameters));
   }
 
-  /**
-   * Invokes executeQuery(sql) using the sql provided to the constructor.
-   *
-   * @return ResultSet
-   * @throws SQLException
-   */
+  /** Invokes executeQuery(sql) using the sql provided to the constructor. */
   @Override
   public ResultSet executeQuery() throws SQLException {
     return super.executeQuery(updateSql(sql, parameters));
@@ -84,14 +78,7 @@ public class KyuubiPreparedStatement extends KyuubiStatement implements SQLPrepa
     return 0;
   }
 
-  /**
-   * update the SQL string with parameters set by setXXX methods of {@link PreparedStatement}
-   *
-   * @param sql
-   * @param parameters
-   * @return updated SQL string
-   * @throws SQLException
-   */
+  /** update the SQL string with parameters set by setXXX methods of {@link PreparedStatement} */
   private String updateSql(final String sql, HashMap<Integer, String> parameters)
       throws SQLException {
     List<String> parts = splitSqlStatement(sql);
@@ -113,9 +100,6 @@ public class KyuubiPreparedStatement extends KyuubiStatement implements SQLPrepa
    * <p>taking into account ' and \ escaping.
    *
    * <p>output for: 'select 1 from ? where a = ?' ['select 1 from ',' where a = ','']
-   *
-   * @param sql
-   * @return
    */
   private List<String> splitSqlStatement(String sql) {
     List<String> parts = new ArrayList<>();
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/KyuubiQueryResultSet.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/KyuubiQueryResultSet.java
index b74c5f6b6..13ba6395d 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/KyuubiQueryResultSet.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/KyuubiQueryResultSet.java
@@ -255,12 +255,7 @@ public class KyuubiQueryResultSet extends KyuubiBaseResultSet {
     }
   }
 
-  /**
-   * Set the specified schema to the resultset
-   *
-   * @param colNames
-   * @param colTypes
-   */
+  /** Set the specified schema to the resultset */
   private void setSchema(
       List<String> colNames, List<TTypeId> colTypes, List<JdbcColumnAttributes> colAttributes) {
     columnNames.addAll(colNames);
@@ -318,7 +313,7 @@ public class KyuubiQueryResultSet extends KyuubiBaseResultSet {
       return false;
     }
 
-    /**
+    /*
      * Poll on the operation status, till the operation is complete. We need to wait only for
      * HiveStatement to complete. HiveDatabaseMetaData which also uses this ResultSet returns only
      * after the RPC is complete.
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/Utils.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/Utils.java
index 429eca140..20495a4b1 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/Utils.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/Utils.java
@@ -308,10 +308,6 @@ public class Utils {
    * <p>Connect to http://server:10001/hs2, with specified basicAuth credentials and initial
    * database:
    * jdbc:hive2://server:10001/db;user=foo;password=bar?hive.server2.transport.mode=http;hive.server2.thrift.http.path=hs2
-   *
-   * @param uri
-   * @return
-   * @throws SQLException
    */
   public static JdbcConnectionParams parseURL(String uri, Properties info)
       throws JdbcUriParseException, SQLException, ZooKeeperHiveClientException {
@@ -326,11 +322,6 @@ public class Utils {
   /**
    * This method handles the base parsing of the given jdbc uri. Some of JdbcConnectionParams
    * returned from this method are updated if ZooKeeper is used for service discovery
-   *
-   * @param uri
-   * @param info
-   * @return
-   * @throws JdbcUriParseException
    */
   public static JdbcConnectionParams extractURLComponents(String uri, Properties info)
       throws JdbcUriParseException {
@@ -355,7 +346,7 @@ public class Utils {
     URI jdbcURI = URI.create(uri.substring(URI_JDBC_PREFIX.length()));
 
     // key=value pattern
-    Pattern pattern = Pattern.compile("([^;]*)=([^;]*)[;]?");
+    Pattern pattern = Pattern.compile("([^;]*)=([^;]*);?");
 
     // dbname and session settings
     String sessVars = jdbcURI.getPath();
@@ -370,14 +361,11 @@ public class Utils {
         // we have dbname followed by session parameters
         dbName = sessVars.substring(0, sessVars.indexOf(';'));
         sessVars = sessVars.substring(sessVars.indexOf(';') + 1);
-        if (sessVars != null) {
-          Matcher sessMatcher = pattern.matcher(sessVars);
-          while (sessMatcher.find()) {
-            if (connParams.getSessionVars().put(sessMatcher.group(1), sessMatcher.group(2))
-                != null) {
-              throw new JdbcUriParseException(
-                  "Bad URL format: Multiple values for property " + sessMatcher.group(1));
-            }
+        Matcher sessMatcher = pattern.matcher(sessVars);
+        while (sessMatcher.find()) {
+          if (connParams.getSessionVars().put(sessMatcher.group(1), sessMatcher.group(2)) != null) {
+            throw new JdbcUriParseException(
+                "Bad URL format: Multiple values for property " + sessMatcher.group(1));
           }
         }
       }
@@ -551,12 +539,6 @@ public class Utils {
   /**
    * Remove the deprecatedName param from the fromMap and put the key value in the toMap. Also log a
    * deprecation message for the client.
-   *
-   * @param fromMap
-   * @param toMap
-   * @param deprecatedName
-   * @param newName
-   * @param newUsage
    */
   private static void handleParamDeprecation(
       Map<String, String> fromMap,
@@ -576,11 +558,6 @@ public class Utils {
   /**
    * Get the authority string from the supplied uri, which could potentially contain multiple
    * host:port pairs.
-   *
-   * @param uri
-   * @param connParams
-   * @return
-   * @throws JdbcUriParseException
    */
   private static String getAuthorityFromJdbcURL(String uri) throws JdbcUriParseException {
     String authorities;
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/XsrfHttpRequestInterceptor.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/XsrfHttpRequestInterceptor.java
index b25d0117f..ee20069bd 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/XsrfHttpRequestInterceptor.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/XsrfHttpRequestInterceptor.java
@@ -17,8 +17,6 @@
 
 package org.apache.kyuubi.jdbc.hive;
 
-import java.io.IOException;
-import org.apache.http.HttpException;
 import org.apache.http.HttpRequest;
 import org.apache.http.HttpRequestInterceptor;
 import org.apache.http.protocol.HttpContext;
@@ -38,15 +36,14 @@ public class XsrfHttpRequestInterceptor implements HttpRequestInterceptor {
   // redirecting a browser that has login credentials from making a
   // request to HS2 on their behalf.
 
-  private static boolean injectHeader = true;
+  private boolean injectHeader = true;
 
-  public static void enableHeaderInjection(boolean enabled) {
+  public void enableHeaderInjection(boolean enabled) {
     injectHeader = enabled;
   }
 
   @Override
-  public void process(HttpRequest httpRequest, HttpContext httpContext)
-      throws HttpException, IOException {
+  public void process(HttpRequest httpRequest, HttpContext httpContext) {
     if (injectHeader) {
       httpRequest.addHeader("X-XSRF-HEADER", "true");
     }
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/ZooKeeperHiveClientHelper.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/ZooKeeperHiveClientHelper.java
index 9d3cd5539..c26ebd9e3 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/ZooKeeperHiveClientHelper.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/ZooKeeperHiveClientHelper.java
@@ -28,13 +28,10 @@ import org.apache.curator.framework.CuratorFramework;
 import org.apache.curator.framework.CuratorFrameworkFactory;
 import org.apache.curator.retry.ExponentialBackoffRetry;
 import org.apache.kyuubi.jdbc.hive.Utils.JdbcConnectionParams;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 class ZooKeeperHiveClientHelper {
-  static final Logger LOG = LoggerFactory.getLogger(ZooKeeperHiveClientHelper.class.getName());
   // Pattern for key1=value1;key2=value2
-  private static final Pattern kvPattern = Pattern.compile("([^=;]*)=([^;]*)[;]?");
+  private static final Pattern kvPattern = Pattern.compile("([^=;]*)=([^;]*);?");
 
   private static String getZooKeeperNamespace(JdbcConnectionParams connParams) {
     String zooKeeperNamespace =
@@ -109,9 +106,7 @@ class ZooKeeperHiveClientHelper {
 
   static void configureConnParams(JdbcConnectionParams connParams)
       throws ZooKeeperHiveClientException {
-    CuratorFramework zooKeeperClient = null;
-    try {
-      zooKeeperClient = getZkClient(connParams);
+    try (CuratorFramework zooKeeperClient = getZkClient(connParams)) {
       List<String> serverHosts = getServerHosts(connParams, zooKeeperClient);
       // Now pick a server node randomly
       String serverNode = serverHosts.get(new Random().nextInt(serverHosts.size()));
@@ -119,19 +114,13 @@ class ZooKeeperHiveClientHelper {
     } catch (Exception e) {
       throw new ZooKeeperHiveClientException(
           "Unable to read HiveServer2 configs from ZooKeeper", e);
-    } finally {
-      // Close the client connection with ZooKeeper
-      if (zooKeeperClient != null) {
-        zooKeeperClient.close();
-      }
     }
+    // Close the client connection with ZooKeeper
   }
 
   static List<JdbcConnectionParams> getDirectParamsList(JdbcConnectionParams connParams)
       throws ZooKeeperHiveClientException {
-    CuratorFramework zooKeeperClient = null;
-    try {
-      zooKeeperClient = getZkClient(connParams);
+    try (CuratorFramework zooKeeperClient = getZkClient(connParams)) {
       List<String> serverHosts = getServerHosts(connParams, zooKeeperClient);
       final List<JdbcConnectionParams> directParamsList = new ArrayList<>();
       // For each node
@@ -144,21 +133,13 @@ class ZooKeeperHiveClientHelper {
     } catch (Exception e) {
       throw new ZooKeeperHiveClientException(
           "Unable to read HiveServer2 configs from ZooKeeper", e);
-    } finally {
-      // Close the client connection with ZooKeeper
-      if (zooKeeperClient != null) {
-        zooKeeperClient.close();
-      }
     }
+    // Close the client connection with ZooKeeper
   }
 
   /**
    * Apply configs published by the server. Configs specified from client's JDBC URI override
    * configs published by the server.
-   *
-   * @param serverConfStr
-   * @param connParams
-   * @throws Exception
    */
   private static void applyConfs(String serverConfStr, Utils.JdbcConnectionParams connParams)
       throws Exception {
@@ -207,7 +188,7 @@ class ZooKeeperHiveClientHelper {
             && !(connParams.getSessionVars().containsKey(Utils.JdbcConnectionParams.USE_SSL))) {
           connParams.getSessionVars().put(Utils.JdbcConnectionParams.USE_SSL, matcher.group(2));
         }
-        /**
+        /*
          * Note: this is pretty messy, but sticking to the current implementation. Set
          * authentication configs. Note that in JDBC driver, we have 3 auth modes: NOSASL, Kerberos
          * (including delegation token mechanism) and password based. The use of
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/auth/HttpAuthUtils.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/auth/HttpAuthUtils.java
index e41dd9ed4..09ce3bab4 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/auth/HttpAuthUtils.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/auth/HttpAuthUtils.java
@@ -18,14 +18,9 @@
 package org.apache.kyuubi.jdbc.hive.auth;
 
 import java.security.PrivilegedExceptionAction;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.Set;
 import javax.security.auth.Subject;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.http.protocol.BasicHttpContext;
-import org.apache.http.protocol.HttpContext;
 import org.ietf.jgss.GSSContext;
 import org.ietf.jgss.GSSManager;
 import org.ietf.jgss.GSSName;
@@ -35,28 +30,21 @@ import org.ietf.jgss.Oid;
 public final class HttpAuthUtils {
   public static final String AUTHORIZATION = "Authorization";
   public static final String NEGOTIATE = "Negotiate";
-  private static final String COOKIE_CLIENT_USER_NAME = "cu";
-  private static final String COOKIE_CLIENT_RAND_NUMBER = "rn";
-  private static final String COOKIE_KEY_VALUE_SEPARATOR = "=";
-  private static final Set<String> COOKIE_ATTRIBUTES =
-      new HashSet<String>(Arrays.asList(COOKIE_CLIENT_USER_NAME, COOKIE_CLIENT_RAND_NUMBER));
 
   /**
    * @return Stringified Base64 encoded kerberosAuthHeader on success
    * @throws Exception
    */
   public static String getKerberosServiceTicket(
-      String principal, String host, String serverHttpUrl, Subject loggedInSubject)
-      throws Exception {
+      String principal, String host, Subject loggedInSubject) throws Exception {
     String serverPrincipal = HadoopThriftAuthBridge.getBridge().getServerPrincipal(principal, host);
     if (loggedInSubject != null) {
-      return Subject.doAs(
-          loggedInSubject, new HttpKerberosClientAction(serverPrincipal, serverHttpUrl));
+      return Subject.doAs(loggedInSubject, new HttpKerberosClientAction(serverPrincipal));
     } else {
       // JAAS login from ticket cache to setup the client UserGroupInformation
       UserGroupInformation clientUGI =
           HadoopThriftAuthBridge.getBridge().getCurrentUGIWithConf("kerberos");
-      return clientUGI.doAs(new HttpKerberosClientAction(serverPrincipal, serverHttpUrl));
+      return clientUGI.doAs(new HttpKerberosClientAction(serverPrincipal));
     }
   }
 
@@ -69,18 +57,12 @@ public final class HttpAuthUtils {
    * can be read from the Subject
    */
   public static class HttpKerberosClientAction implements PrivilegedExceptionAction<String> {
-    public static final String SERVER_HTTP_URL = "SERVER_HTTP_URL";
     private final String serverPrincipal;
-    private final String serverHttpUrl;
     private final Base64 base64codec;
-    private final HttpContext httpContext;
 
-    public HttpKerberosClientAction(String serverPrincipal, String serverHttpUrl) {
+    public HttpKerberosClientAction(String serverPrincipal) {
       this.serverPrincipal = serverPrincipal;
-      this.serverHttpUrl = serverHttpUrl;
       base64codec = new Base64(0);
-      httpContext = new BasicHttpContext();
-      httpContext.setAttribute(SERVER_HTTP_URL, serverHttpUrl);
     }
 
     @Override
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/auth/PlainSaslHelper.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/auth/PlainSaslHelper.java
index 483e2867e..62b4898e2 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/auth/PlainSaslHelper.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/auth/PlainSaslHelper.java
@@ -17,7 +17,6 @@
 
 package org.apache.kyuubi.jdbc.hive.auth;
 
-import java.io.IOException;
 import java.util.HashMap;
 import javax.security.auth.callback.*;
 import javax.security.sasl.SaslException;
@@ -33,7 +32,7 @@ public final class PlainSaslHelper {
         null,
         null,
         null,
-        new HashMap<String, String>(),
+        new HashMap<>(),
         new PlainCallbackHandler(username, password),
         underlyingTransport);
   }
@@ -53,7 +52,7 @@ public final class PlainSaslHelper {
     }
 
     @Override
-    public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
+    public void handle(Callback[] callbacks) throws UnsupportedCallbackException {
       for (Callback callback : callbacks) {
         if (callback instanceof NameCallback) {
           NameCallback nameCallback = (NameCallback) callback;
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/auth/SaslQOP.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/auth/SaslQOP.java
index ed842e50b..1f89ed43e 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/auth/SaslQOP.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/auth/SaslQOP.java
@@ -29,7 +29,7 @@ public enum SaslQOP {
 
   public final String saslQop;
 
-  private static final Map<String, SaslQOP> STR_TO_ENUM = new HashMap<String, SaslQOP>();
+  private static final Map<String, SaslQOP> STR_TO_ENUM = new HashMap<>();
 
   static {
     for (SaslQOP saslQop : values()) {
@@ -41,6 +41,7 @@ public enum SaslQOP {
     this.saslQop = saslQop;
   }
 
+  @Override
   public String toString() {
     return saslQop;
   }
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/ColumnBasedSet.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/ColumnBasedSet.java
index 3d397644b..2e1ccf062 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/ColumnBasedSet.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/ColumnBasedSet.java
@@ -22,10 +22,7 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 import org.apache.hive.service.rpc.thrift.TColumn;
-import org.apache.hive.service.rpc.thrift.TRow;
 import org.apache.hive.service.rpc.thrift.TRowSet;
-import org.apache.hive.service.rpc.thrift.TTypeId;
-import org.apache.kyuubi.jdbc.hive.common.HiveDecimal;
 import org.apache.thrift.TException;
 import org.apache.thrift.protocol.TCompactProtocol;
 import org.apache.thrift.protocol.TProtocol;
@@ -35,18 +32,13 @@ import org.slf4j.LoggerFactory;
 
 /** ColumnBasedSet. */
 public class ColumnBasedSet implements RowSet {
+  public static final Logger LOG = LoggerFactory.getLogger(ColumnBasedSet.class);
 
-  private long startOffset;
-
-  private final TypeDescriptor[] descriptors; // non-null only for writing (server-side)
+  private final long startOffset;
   private final List<ColumnBuffer> columns;
-  private byte[] blob;
-  private boolean isBlobBased = false;
-  public static final Logger LOG = LoggerFactory.getLogger(ColumnBasedSet.class);
 
   public ColumnBasedSet(TRowSet tRowSet) throws TException {
-    descriptors = null;
-    columns = new ArrayList<ColumnBuffer>();
+    columns = new ArrayList<>();
     // Use TCompactProtocol to read serialized TColumns
     if (tRowSet.isSetBinaryColumns()) {
       TProtocol protocol =
@@ -73,35 +65,6 @@ public class ColumnBasedSet implements RowSet {
     startOffset = tRowSet.getStartRowOffset();
   }
 
-  private ColumnBasedSet(
-      TypeDescriptor[] descriptors, List<ColumnBuffer> columns, long startOffset) {
-    this.descriptors = descriptors;
-    this.columns = columns;
-    this.startOffset = startOffset;
-  }
-
-  @Override
-  public ColumnBasedSet addRow(Object[] fields) {
-    if (isBlobBased) {
-      this.blob = (byte[]) fields[0];
-    } else {
-      for (int i = 0; i < fields.length; i++) {
-        TypeDescriptor descriptor = descriptors[i];
-        Object field = fields[i];
-        if (field != null && descriptor.getType() == TTypeId.DECIMAL_TYPE) {
-          int scale = descriptor.getDecimalDigits();
-          field = ((HiveDecimal) field).toFormatString(scale);
-        }
-        columns.get(i).addValue(descriptor.getType(), field);
-      }
-    }
-    return this;
-  }
-
-  public List<ColumnBuffer> getColumns() {
-    return columns;
-  }
-
   @Override
   public int numColumns() {
     return columns.size();
@@ -112,43 +75,11 @@ public class ColumnBasedSet implements RowSet {
     return columns.isEmpty() ? 0 : columns.get(0).size();
   }
 
-  @Override
-  public ColumnBasedSet extractSubset(int maxRows) {
-    int numRows = Math.min(numRows(), maxRows);
-
-    List<ColumnBuffer> subset = new ArrayList<ColumnBuffer>();
-    for (int i = 0; i < columns.size(); i++) {
-      subset.add(columns.get(i).extractSubset(numRows));
-    }
-    ColumnBasedSet result = new ColumnBasedSet(descriptors, subset, startOffset);
-    startOffset += numRows;
-    return result;
-  }
-
   @Override
   public long getStartOffset() {
     return startOffset;
   }
 
-  @Override
-  public void setStartOffset(long startOffset) {
-    this.startOffset = startOffset;
-  }
-
-  public TRowSet toTRowSet() {
-    TRowSet tRowSet = new TRowSet(startOffset, new ArrayList<TRow>());
-    if (isBlobBased) {
-      tRowSet.setColumns(null);
-      tRowSet.setBinaryColumns(blob);
-      tRowSet.setColumnCount(numColumns());
-    } else {
-      for (int i = 0; i < columns.size(); i++) {
-        tRowSet.addToColumns(columns.get(i).toTColumn());
-      }
-    }
-    return tRowSet;
-  }
-
   @Override
   public Iterator<Object[]> iterator() {
     return new Iterator<Object[]>() {
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/ColumnBuffer.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/ColumnBuffer.java
index 287605653..af9fafbf8 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/ColumnBuffer.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/ColumnBuffer.java
@@ -17,22 +17,22 @@
 
 package org.apache.kyuubi.jdbc.hive.cli;
 
-import com.google.common.annotations.VisibleForTesting;
 import com.google.common.primitives.*;
 import java.nio.ByteBuffer;
-import java.util.*;
-import org.apache.hive.service.rpc.thrift.*;
+import java.util.AbstractList;
+import java.util.BitSet;
+import java.util.List;
+import org.apache.hive.service.rpc.thrift.TColumn;
+import org.apache.hive.service.rpc.thrift.TTypeId;
 
 /** ColumnBuffer */
-public class ColumnBuffer extends AbstractList {
-
-  private static final int DEFAULT_SIZE = 100;
+public class ColumnBuffer extends AbstractList<Object> {
 
   private final TTypeId type;
 
-  private BitSet nulls;
+  private final BitSet nulls;
 
-  private int size;
+  private final int size;
   private boolean[] boolVars;
   private byte[] byteVars;
   private short[] shortVars;
@@ -42,74 +42,6 @@ public class ColumnBuffer extends AbstractList {
   private List<String> stringVars;
   private List<ByteBuffer> binaryVars;
 
-  public ColumnBuffer(TTypeId type, BitSet nulls, Object values) {
-    this.type = type;
-    this.nulls = nulls;
-    if (type == TTypeId.BOOLEAN_TYPE) {
-      boolVars = (boolean[]) values;
-      size = boolVars.length;
-    } else if (type == TTypeId.TINYINT_TYPE) {
-      byteVars = (byte[]) values;
-      size = byteVars.length;
-    } else if (type == TTypeId.SMALLINT_TYPE) {
-      shortVars = (short[]) values;
-      size = shortVars.length;
-    } else if (type == TTypeId.INT_TYPE) {
-      intVars = (int[]) values;
-      size = intVars.length;
-    } else if (type == TTypeId.BIGINT_TYPE) {
-      longVars = (long[]) values;
-      size = longVars.length;
-    } else if (type == TTypeId.DOUBLE_TYPE || type == TTypeId.FLOAT_TYPE) {
-      doubleVars = (double[]) values;
-      size = doubleVars.length;
-    } else if (type == TTypeId.BINARY_TYPE) {
-      binaryVars = (List<ByteBuffer>) values;
-      size = binaryVars.size();
-    } else if (type == TTypeId.STRING_TYPE) {
-      stringVars = (List<String>) values;
-      size = stringVars.size();
-    } else {
-      throw new IllegalStateException("invalid union object");
-    }
-  }
-
-  public ColumnBuffer(TTypeId type) {
-    nulls = new BitSet();
-    switch (type) {
-      case BOOLEAN_TYPE:
-        boolVars = new boolean[DEFAULT_SIZE];
-        break;
-      case TINYINT_TYPE:
-        byteVars = new byte[DEFAULT_SIZE];
-        break;
-      case SMALLINT_TYPE:
-        shortVars = new short[DEFAULT_SIZE];
-        break;
-      case INT_TYPE:
-        intVars = new int[DEFAULT_SIZE];
-        break;
-      case BIGINT_TYPE:
-        longVars = new long[DEFAULT_SIZE];
-        break;
-      case FLOAT_TYPE:
-        type = TTypeId.FLOAT_TYPE;
-        doubleVars = new double[DEFAULT_SIZE];
-        break;
-      case DOUBLE_TYPE:
-        type = TTypeId.DOUBLE_TYPE;
-        doubleVars = new double[DEFAULT_SIZE];
-        break;
-      case BINARY_TYPE:
-        binaryVars = new ArrayList<ByteBuffer>();
-        break;
-      default:
-        type = TTypeId.STRING_TYPE;
-        stringVars = new ArrayList<String>();
-    }
-    this.type = type;
-  }
-
   public ColumnBuffer(TColumn colValues) {
     if (colValues.isSetBoolVal()) {
       type = TTypeId.BOOLEAN_TYPE;
@@ -156,78 +88,6 @@ public class ColumnBuffer extends AbstractList {
     }
   }
 
-  /**
-   * Get a subset of this ColumnBuffer, starting from the 1st value.
-   *
-   * @param end index after the last value to include
-   */
-  public ColumnBuffer extractSubset(int end) {
-    BitSet subNulls = nulls.get(0, end);
-    if (type == TTypeId.BOOLEAN_TYPE) {
-      ColumnBuffer subset = new ColumnBuffer(type, subNulls, Arrays.copyOfRange(boolVars, 0, end));
-      boolVars = Arrays.copyOfRange(boolVars, end, size);
-      nulls = nulls.get(end, size);
-      size = boolVars.length;
-      return subset;
-    }
-    if (type == TTypeId.TINYINT_TYPE) {
-      ColumnBuffer subset = new ColumnBuffer(type, subNulls, Arrays.copyOfRange(byteVars, 0, end));
-      byteVars = Arrays.copyOfRange(byteVars, end, size);
-      nulls = nulls.get(end, size);
-      size = byteVars.length;
-      return subset;
-    }
-    if (type == TTypeId.SMALLINT_TYPE) {
-      ColumnBuffer subset = new ColumnBuffer(type, subNulls, Arrays.copyOfRange(shortVars, 0, end));
-      shortVars = Arrays.copyOfRange(shortVars, end, size);
-      nulls = nulls.get(end, size);
-      size = shortVars.length;
-      return subset;
-    }
-    if (type == TTypeId.INT_TYPE) {
-      ColumnBuffer subset = new ColumnBuffer(type, subNulls, Arrays.copyOfRange(intVars, 0, end));
-      intVars = Arrays.copyOfRange(intVars, end, size);
-      nulls = nulls.get(end, size);
-      size = intVars.length;
-      return subset;
-    }
-    if (type == TTypeId.BIGINT_TYPE) {
-      ColumnBuffer subset = new ColumnBuffer(type, subNulls, Arrays.copyOfRange(longVars, 0, end));
-      longVars = Arrays.copyOfRange(longVars, end, size);
-      nulls = nulls.get(end, size);
-      size = longVars.length;
-      return subset;
-    }
-    if (type == TTypeId.DOUBLE_TYPE || type == TTypeId.FLOAT_TYPE) {
-      ColumnBuffer subset =
-          new ColumnBuffer(type, subNulls, Arrays.copyOfRange(doubleVars, 0, end));
-      doubleVars = Arrays.copyOfRange(doubleVars, end, size);
-      nulls = nulls.get(end, size);
-      size = doubleVars.length;
-      return subset;
-    }
-    if (type == TTypeId.BINARY_TYPE) {
-      ColumnBuffer subset = new ColumnBuffer(type, subNulls, binaryVars.subList(0, end));
-      binaryVars = binaryVars.subList(end, binaryVars.size());
-      nulls = nulls.get(end, size);
-      size = binaryVars.size();
-      return subset;
-    }
-    if (type == TTypeId.STRING_TYPE) {
-      ColumnBuffer subset = new ColumnBuffer(type, subNulls, stringVars.subList(0, end));
-      stringVars = stringVars.subList(end, stringVars.size());
-      nulls = nulls.get(end, size);
-      size = stringVars.size();
-      return subset;
-    }
-    throw new IllegalStateException("invalid union object");
-  }
-
-  @VisibleForTesting
-  BitSet getNulls() {
-    return nulls;
-  }
-
   private static final byte[] MASKS =
       new byte[] {0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, (byte) 0x80};
 
@@ -240,14 +100,6 @@ public class ColumnBuffer extends AbstractList {
     return bitset;
   }
 
-  private static byte[] toBinary(BitSet bitset) {
-    byte[] nulls = new byte[1 + (bitset.length() / 8)];
-    for (int i = 0; i < bitset.length(); i++) {
-      nulls[i / 8] |= bitset.get(i) ? MASKS[i % 8] : 0;
-    }
-    return nulls;
-  }
-
   public TTypeId getType() {
     return type;
   }
@@ -283,146 +135,4 @@ public class ColumnBuffer extends AbstractList {
   public int size() {
     return size;
   }
-
-  public TColumn toTColumn() {
-    TColumn value = new TColumn();
-    ByteBuffer nullMasks = ByteBuffer.wrap(toBinary(nulls));
-    switch (type) {
-      case BOOLEAN_TYPE:
-        value.setBoolVal(
-            new TBoolColumn(Booleans.asList(Arrays.copyOfRange(boolVars, 0, size)), nullMasks));
-        break;
-      case TINYINT_TYPE:
-        value.setByteVal(
-            new TByteColumn(Bytes.asList(Arrays.copyOfRange(byteVars, 0, size)), nullMasks));
-        break;
-      case SMALLINT_TYPE:
-        value.setI16Val(
-            new TI16Column(Shorts.asList(Arrays.copyOfRange(shortVars, 0, size)), nullMasks));
-        break;
-      case INT_TYPE:
-        value.setI32Val(
-            new TI32Column(Ints.asList(Arrays.copyOfRange(intVars, 0, size)), nullMasks));
-        break;
-      case BIGINT_TYPE:
-        value.setI64Val(
-            new TI64Column(Longs.asList(Arrays.copyOfRange(longVars, 0, size)), nullMasks));
-        break;
-      case FLOAT_TYPE:
-      case DOUBLE_TYPE:
-        value.setDoubleVal(
-            new TDoubleColumn(Doubles.asList(Arrays.copyOfRange(doubleVars, 0, size)), nullMasks));
-        break;
-      case STRING_TYPE:
-        value.setStringVal(new TStringColumn(stringVars, nullMasks));
-        break;
-      case BINARY_TYPE:
-        value.setBinaryVal(new TBinaryColumn(binaryVars, nullMasks));
-        break;
-    }
-    return value;
-  }
-
-  private static final ByteBuffer EMPTY_BINARY = ByteBuffer.allocate(0);
-  private static final String EMPTY_STRING = "";
-
-  public void addValue(Object field) {
-    addValue(this.type, field);
-  }
-
-  public void addValue(TTypeId type, Object field) {
-    switch (type) {
-      case BOOLEAN_TYPE:
-        nulls.set(size, field == null);
-        boolVars()[size] = field == null ? true : (Boolean) field;
-        break;
-      case TINYINT_TYPE:
-        nulls.set(size, field == null);
-        byteVars()[size] = field == null ? 0 : (Byte) field;
-        break;
-      case SMALLINT_TYPE:
-        nulls.set(size, field == null);
-        shortVars()[size] = field == null ? 0 : (Short) field;
-        break;
-      case INT_TYPE:
-        nulls.set(size, field == null);
-        intVars()[size] = field == null ? 0 : (Integer) field;
-        break;
-      case BIGINT_TYPE:
-        nulls.set(size, field == null);
-        longVars()[size] = field == null ? 0 : (Long) field;
-        break;
-      case FLOAT_TYPE:
-        nulls.set(size, field == null);
-        doubleVars()[size] = field == null ? 0 : new Double(field.toString());
-        break;
-      case DOUBLE_TYPE:
-        nulls.set(size, field == null);
-        doubleVars()[size] = field == null ? 0 : (Double) field;
-        break;
-      case BINARY_TYPE:
-        nulls.set(binaryVars.size(), field == null);
-        binaryVars.add(field == null ? EMPTY_BINARY : ByteBuffer.wrap((byte[]) field));
-        break;
-      default:
-        nulls.set(stringVars.size(), field == null);
-        stringVars.add(field == null ? EMPTY_STRING : String.valueOf(field));
-        break;
-    }
-    size++;
-  }
-
-  private boolean[] boolVars() {
-    if (boolVars.length == size) {
-      boolean[] newVars = new boolean[size << 1];
-      System.arraycopy(boolVars, 0, newVars, 0, size);
-      return boolVars = newVars;
-    }
-    return boolVars;
-  }
-
-  private byte[] byteVars() {
-    if (byteVars.length == size) {
-      byte[] newVars = new byte[size << 1];
-      System.arraycopy(byteVars, 0, newVars, 0, size);
-      return byteVars = newVars;
-    }
-    return byteVars;
-  }
-
-  private short[] shortVars() {
-    if (shortVars.length == size) {
-      short[] newVars = new short[size << 1];
-      System.arraycopy(shortVars, 0, newVars, 0, size);
-      return shortVars = newVars;
-    }
-    return shortVars;
-  }
-
-  private int[] intVars() {
-    if (intVars.length == size) {
-      int[] newVars = new int[size << 1];
-      System.arraycopy(intVars, 0, newVars, 0, size);
-      return intVars = newVars;
-    }
-    return intVars;
-  }
-
-  private long[] longVars() {
-    if (longVars.length == size) {
-      long[] newVars = new long[size << 1];
-      System.arraycopy(longVars, 0, newVars, 0, size);
-      return longVars = newVars;
-    }
-    return longVars;
-  }
-
-  private double[] doubleVars() {
-    if (doubleVars.length == size) {
-      double[] newVars = new double[size << 1];
-      System.arraycopy(doubleVars, 0, newVars, 0, size);
-      return doubleVars = newVars;
-    }
-    return doubleVars;
-  }
 }
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/ColumnValue.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/ColumnValue.java
index 79d2d7238..291b791c8 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/ColumnValue.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/ColumnValue.java
@@ -17,191 +17,11 @@
 
 package org.apache.kyuubi.jdbc.hive.cli;
 
-import java.sql.Date;
-import java.sql.Timestamp;
 import org.apache.hive.service.rpc.thrift.*;
-import org.apache.kyuubi.jdbc.hive.common.*;
 
 /** Protocols before HIVE_CLI_SERVICE_PROTOCOL_V6 (used by RowBasedSet) */
 public class ColumnValue {
 
-  private static TColumnValue booleanValue(Boolean value) {
-    TBoolValue tBoolValue = new TBoolValue();
-    if (value != null) {
-      tBoolValue.setValue(value);
-    }
-    return TColumnValue.boolVal(tBoolValue);
-  }
-
-  private static TColumnValue byteValue(Byte value) {
-    TByteValue tByteValue = new TByteValue();
-    if (value != null) {
-      tByteValue.setValue(value);
-    }
-    return TColumnValue.byteVal(tByteValue);
-  }
-
-  private static TColumnValue shortValue(Short value) {
-    TI16Value tI16Value = new TI16Value();
-    if (value != null) {
-      tI16Value.setValue(value);
-    }
-    return TColumnValue.i16Val(tI16Value);
-  }
-
-  private static TColumnValue intValue(Integer value) {
-    TI32Value tI32Value = new TI32Value();
-    if (value != null) {
-      tI32Value.setValue(value);
-    }
-    return TColumnValue.i32Val(tI32Value);
-  }
-
-  private static TColumnValue longValue(Long value) {
-    TI64Value tI64Value = new TI64Value();
-    if (value != null) {
-      tI64Value.setValue(value);
-    }
-    return TColumnValue.i64Val(tI64Value);
-  }
-
-  private static TColumnValue floatValue(Float value) {
-    TDoubleValue tDoubleValue = new TDoubleValue();
-    if (value != null) {
-      tDoubleValue.setValue(value);
-    }
-    return TColumnValue.doubleVal(tDoubleValue);
-  }
-
-  private static TColumnValue doubleValue(Double value) {
-    TDoubleValue tDoubleValue = new TDoubleValue();
-    if (value != null) {
-      tDoubleValue.setValue(value);
-    }
-    return TColumnValue.doubleVal(tDoubleValue);
-  }
-
-  private static TColumnValue stringValue(String value) {
-    TStringValue tStringValue = new TStringValue();
-    if (value != null) {
-      tStringValue.setValue(value);
-    }
-    return TColumnValue.stringVal(tStringValue);
-  }
-
-  private static TColumnValue stringValue(HiveChar value) {
-    TStringValue tStringValue = new TStringValue();
-    if (value != null) {
-      tStringValue.setValue(value.toString());
-    }
-    return TColumnValue.stringVal(tStringValue);
-  }
-
-  private static TColumnValue stringValue(HiveVarchar value) {
-    TStringValue tStringValue = new TStringValue();
-    if (value != null) {
-      tStringValue.setValue(value.toString());
-    }
-    return TColumnValue.stringVal(tStringValue);
-  }
-
-  private static TColumnValue dateValue(Date value) {
-    TStringValue tStringValue = new TStringValue();
-    if (value != null) {
-      tStringValue.setValue(value.toString());
-    }
-    return new TColumnValue(TColumnValue.stringVal(tStringValue));
-  }
-
-  private static TColumnValue timestampValue(Timestamp value) {
-    TStringValue tStringValue = new TStringValue();
-    if (value != null) {
-      tStringValue.setValue(value.toString());
-    }
-    return TColumnValue.stringVal(tStringValue);
-  }
-
-  private static TColumnValue timestampTZValue(TimestampTZ value) {
-    TStringValue tStringValue = new TStringValue();
-    if (value != null) {
-      tStringValue.setValue(value.toString());
-    }
-    return TColumnValue.stringVal(tStringValue);
-  }
-
-  private static TColumnValue stringValue(HiveDecimal value, TypeDescriptor typeDescriptor) {
-    TStringValue tStrValue = new TStringValue();
-    if (value != null) {
-      int scale = typeDescriptor.getDecimalDigits();
-      tStrValue.setValue(value.toFormatString(scale));
-    }
-    return TColumnValue.stringVal(tStrValue);
-  }
-
-  private static TColumnValue stringValue(HiveIntervalYearMonth value) {
-    TStringValue tStrValue = new TStringValue();
-    if (value != null) {
-      tStrValue.setValue(value.toString());
-    }
-    return TColumnValue.stringVal(tStrValue);
-  }
-
-  private static TColumnValue stringValue(HiveIntervalDayTime value) {
-    TStringValue tStrValue = new TStringValue();
-    if (value != null) {
-      tStrValue.setValue(value.toString());
-    }
-    return TColumnValue.stringVal(tStrValue);
-  }
-
-  public static TColumnValue toTColumnValue(TypeDescriptor typeDescriptor, Object value) {
-    TTypeId type = typeDescriptor.getType();
-
-    switch (type) {
-      case BOOLEAN_TYPE:
-        return booleanValue((Boolean) value);
-      case TINYINT_TYPE:
-        return byteValue((Byte) value);
-      case SMALLINT_TYPE:
-        return shortValue((Short) value);
-      case INT_TYPE:
-        return intValue((Integer) value);
-      case BIGINT_TYPE:
-        return longValue((Long) value);
-      case FLOAT_TYPE:
-        return floatValue((Float) value);
-      case DOUBLE_TYPE:
-        return doubleValue((Double) value);
-      case CHAR_TYPE:
-        return stringValue((HiveChar) value);
-      case VARCHAR_TYPE:
-        return stringValue((HiveVarchar) value);
-      case DATE_TYPE:
-        return dateValue((Date) value);
-      case TIMESTAMP_TYPE:
-        return timestampValue((Timestamp) value);
-      case TIMESTAMPLOCALTZ_TYPE:
-        return timestampTZValue((TimestampTZ) value);
-      case INTERVAL_YEAR_MONTH_TYPE:
-        return stringValue((HiveIntervalYearMonth) value);
-      case INTERVAL_DAY_TIME_TYPE:
-        return stringValue((HiveIntervalDayTime) value);
-      case DECIMAL_TYPE:
-        return stringValue((HiveDecimal) value, typeDescriptor);
-      case BINARY_TYPE:
-      case ARRAY_TYPE:
-      case MAP_TYPE:
-      case STRUCT_TYPE:
-      case UNION_TYPE:
-      case USER_DEFINED_TYPE:
-      case NULL_TYPE:
-      case STRING_TYPE:
-        return stringValue((String) value);
-      default:
-        return null;
-    }
-  }
-
   private static Boolean getBooleanValue(TBoolValue tBoolValue) {
     if (tBoolValue.isSetValue()) {
       return tBoolValue.isValue();
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/FetchType.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/FetchType.java
index 2e81ec580..0c7d46302 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/FetchType.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/FetchType.java
@@ -31,15 +31,6 @@ public enum FetchType {
     this.tFetchType = tFetchType;
   }
 
-  public static FetchType getFetchType(short tFetchType) {
-    for (FetchType fetchType : values()) {
-      if (tFetchType == fetchType.toTFetchType()) {
-        return fetchType;
-      }
-    }
-    return QUERY_OUTPUT;
-  }
-
   public short toTFetchType() {
     return tFetchType;
   }
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/RowBasedSet.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/RowBasedSet.java
index 42bc87580..de553872e 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/RowBasedSet.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/RowBasedSet.java
@@ -27,33 +27,15 @@ import org.apache.hive.service.rpc.thrift.TRowSet;
 /** RowBasedSet */
 public class RowBasedSet implements RowSet {
 
-  private long startOffset;
+  private final long startOffset;
 
-  private final TypeDescriptor[] descriptors; // non-null only for writing (server-side)
   private final RemovableList<TRow> rows;
 
   public RowBasedSet(TRowSet tRowSet) {
-    descriptors = null;
-    rows = new RemovableList<TRow>(tRowSet.getRows());
+    rows = new RemovableList<>(tRowSet.getRows());
     startOffset = tRowSet.getStartRowOffset();
   }
 
-  private RowBasedSet(TypeDescriptor[] descriptors, List<TRow> rows, long startOffset) {
-    this.descriptors = descriptors;
-    this.rows = new RemovableList<TRow>(rows);
-    this.startOffset = startOffset;
-  }
-
-  @Override
-  public RowBasedSet addRow(Object[] fields) {
-    TRow tRow = new TRow();
-    for (int i = 0; i < fields.length; i++) {
-      tRow.addToColVals(ColumnValue.toTColumnValue(descriptors[i], fields[i]));
-    }
-    rows.add(tRow);
-    return this;
-  }
-
   @Override
   public int numColumns() {
     return rows.isEmpty() ? 0 : rows.get(0).getColVals().size();
@@ -64,33 +46,11 @@ public class RowBasedSet implements RowSet {
     return rows.size();
   }
 
-  public RowBasedSet extractSubset(int maxRows) {
-    int numRows = Math.min(numRows(), maxRows);
-    RowBasedSet result = new RowBasedSet(descriptors, rows.subList(0, numRows), startOffset);
-    rows.removeRange(0, numRows);
-    startOffset += numRows;
-    return result;
-  }
-
+  @Override
   public long getStartOffset() {
     return startOffset;
   }
 
-  public void setStartOffset(long startOffset) {
-    this.startOffset = startOffset;
-  }
-
-  public int getSize() {
-    return rows.size();
-  }
-
-  public TRowSet toTRowSet() {
-    TRowSet tRowSet = new TRowSet();
-    tRowSet.setStartRowOffset(startOffset);
-    tRowSet.setRows(new ArrayList<TRow>(rows));
-    return tRowSet;
-  }
-
   @Override
   public Iterator<Object[]> iterator() {
     return new Iterator<Object[]>() {
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/RowSet.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/RowSet.java
index bdf823fc1..b9dba9db6 100644
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/RowSet.java
+++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/RowSet.java
@@ -17,21 +17,11 @@
 
 package org.apache.kyuubi.jdbc.hive.cli;
 
-import org.apache.hive.service.rpc.thrift.TRowSet;
-
 public interface RowSet extends Iterable<Object[]> {
 
-  RowSet addRow(Object[] fields);
-
-  RowSet extractSubset(int maxRows);
-
   int numColumns();
 
   int numRows();
 
   long getStartOffset();
-
-  void setStartOffset(long startOffset);
-
-  TRowSet toTRowSet();
 }
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/TypeDescriptor.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/TypeDescriptor.java
deleted file mode 100644
index 86a480e98..000000000
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/TypeDescriptor.java
+++ /dev/null
@@ -1,172 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kyuubi.jdbc.hive.cli;
-
-import java.util.List;
-import org.apache.hive.service.rpc.thrift.TPrimitiveTypeEntry;
-import org.apache.hive.service.rpc.thrift.TTypeDesc;
-import org.apache.hive.service.rpc.thrift.TTypeEntry;
-import org.apache.hive.service.rpc.thrift.TTypeId;
-import org.apache.kyuubi.jdbc.hive.common.HiveDecimal;
-
-/** TypeDescriptor. */
-public class TypeDescriptor {
-
-  private final TTypeId type;
-  private TypeQualifiers typeQualifiers = null;
-
-  public TypeDescriptor(TTypeDesc tTypeDesc) {
-    List<TTypeEntry> tTypeEntries = tTypeDesc.getTypes();
-    TPrimitiveTypeEntry top = tTypeEntries.get(0).getPrimitiveEntry();
-    this.type = top.getType();
-    if (top.isSetTypeQualifiers()) {
-      setTypeQualifiers(TypeQualifiers.fromTTypeQualifiers(top.getTypeQualifiers()));
-    }
-  }
-
-  public TTypeId getType() {
-    return type;
-  }
-
-  public TTypeDesc toTTypeDesc() {
-    TPrimitiveTypeEntry primitiveEntry = new TPrimitiveTypeEntry(type);
-    if (getTypeQualifiers() != null) {
-      primitiveEntry.setTypeQualifiers(getTypeQualifiers().toTTypeQualifiers());
-    }
-    TTypeEntry entry = TTypeEntry.primitiveEntry(primitiveEntry);
-
-    TTypeDesc desc = new TTypeDesc();
-    desc.addToTypes(entry);
-    return desc;
-  }
-
-  public TypeQualifiers getTypeQualifiers() {
-    return typeQualifiers;
-  }
-
-  public void setTypeQualifiers(TypeQualifiers typeQualifiers) {
-    this.typeQualifiers = typeQualifiers;
-  }
-
-  /**
-   * The column size for this type. For numeric data this is the maximum precision. For character
-   * data this is the length in characters. For datetime types this is the length in characters of
-   * the String representation (assuming the maximum allowed precision of the fractional seconds
-   * component). For binary data this is the length in bytes. Null is returned for for data types
-   * where the column size is not applicable.
-   */
-  public Integer getColumnSize() {
-    if (isNumericType()) {
-      return getPrecision();
-    }
-    switch (type) {
-      case STRING_TYPE:
-      case BINARY_TYPE:
-        return Integer.MAX_VALUE;
-      case CHAR_TYPE:
-      case VARCHAR_TYPE:
-        return typeQualifiers.getCharacterMaximumLength();
-      case DATE_TYPE:
-        return 10;
-      case TIMESTAMP_TYPE:
-        return 29;
-      case TIMESTAMPLOCALTZ_TYPE:
-        return 31;
-      default:
-        return null;
-    }
-  }
-
-  /**
-   * Maximum precision for numeric types. Returns null for non-numeric types.
-   *
-   * @return
-   */
-  public Integer getPrecision() {
-    if (this.type == TTypeId.DECIMAL_TYPE) {
-      return typeQualifiers.getPrecision();
-    }
-    return getMaxPrecision();
-  }
-
-  /**
-   * The number of fractional digits for this type. Null is returned for data types where this is
-   * not applicable.
-   */
-  public Integer getDecimalDigits() {
-    switch (this.type) {
-      case BOOLEAN_TYPE:
-      case TINYINT_TYPE:
-      case SMALLINT_TYPE:
-      case INT_TYPE:
-      case BIGINT_TYPE:
-        return 0;
-      case FLOAT_TYPE:
-        return 7;
-      case DOUBLE_TYPE:
-        return 15;
-      case DECIMAL_TYPE:
-        return typeQualifiers.getScale();
-      case TIMESTAMP_TYPE:
-        return 9;
-      default:
-        return null;
-    }
-  }
-
-  private boolean isNumericType() {
-    switch (type) {
-      case TINYINT_TYPE:
-      case SMALLINT_TYPE:
-      case INT_TYPE:
-      case BIGINT_TYPE:
-      case FLOAT_TYPE:
-      case DOUBLE_TYPE:
-      case DECIMAL_TYPE:
-        return true;
-      default:
-        return false;
-    }
-  }
-
-  /**
-   * Maximum precision for numeric types. Returns null for non-numeric types.
-   *
-   * @return
-   */
-  private Integer getMaxPrecision() {
-    switch (type) {
-      case TINYINT_TYPE:
-        return 3;
-      case SMALLINT_TYPE:
-        return 5;
-      case INT_TYPE:
-        return 10;
-      case BIGINT_TYPE:
-        return 19;
-      case FLOAT_TYPE:
-        return 7;
-      case DOUBLE_TYPE:
-        return 15;
-      case DECIMAL_TYPE:
-        return HiveDecimal.MAX_PRECISION;
-      default:
-        return null;
-    }
-  }
-}
diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/TypeQualifiers.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/TypeQualifiers.java
deleted file mode 100644
index 56a959869..000000000
--- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/cli/TypeQualifiers.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kyuubi.jdbc.hive.cli;
-
-import java.util.HashMap;
-import java.util.Map;
-import org.apache.hive.service.rpc.thrift.TCLIServiceConstants;
-import org.apache.hive.service.rpc.thrift.TTypeQualifierValue;
-import org.apache.hive.service.rpc.thrift.TTypeQualifiers;
-
-/**
- * This class holds type qualifier information for a primitive type, such as char/varchar length or
- * decimal precision/scale.
- */
-public class TypeQualifiers {
-  private Integer characterMaximumLength;
-  private Integer precision;
-  private Integer scale;
-
-  public TypeQualifiers() {}
-
-  public Integer getCharacterMaximumLength() {
-    return characterMaximumLength;
-  }
-
-  public void setCharacterMaximumLength(int characterMaximumLength) {
-    this.characterMaximumLength = characterMaximumLength;
-  }
-
-  public TTypeQualifiers toTTypeQualifiers() {
-    TTypeQualifiers ret = null;
-
-    Map<String, TTypeQualifierValue> qMap = new HashMap<String, TTypeQualifierValue>();
-    if (getCharacterMaximumLength() != null) {
-      TTypeQualifierValue val = new TTypeQualifierValue();
-      val.setI32Value(getCharacterMaximumLength().intValue());
-      qMap.put(TCLIServiceConstants.CHARACTER_MAXIMUM_LENGTH, val);
-    }
-
-    if (precision != null) {
-      TTypeQualifierValue val = new TTypeQualifierValue();
-      val.setI32Value(precision.intValue());
-      qMap.put(TCLIServiceConstants.PRECISION, val);
-    }
-
-    if (scale != null) {
-      TTypeQualifierValue val = new TTypeQualifierValue();
-      val.setI32Value(scale.intValue());
-      qMap.put(TCLIServiceConstants.SCALE, val);
-    }
-
-    if (qMap.size() > 0) {
-      ret = new TTypeQualifiers(qMap);
-    }
-
-    return ret;
-  }
-
-  public static TypeQualifiers fromTTypeQualifiers(TTypeQualifiers ttq) {
-    TypeQualifiers ret = null;
-    if (ttq != null) {
-      ret = new TypeQualifiers();
-      Map<String, TTypeQualifierValue> tqMap = ttq.getQualifiers();
-
-      if (tqMap.containsKey(TCLIServiceConstants.CHARACTER_MAXIMUM_LENGTH)) {
-        ret.setCharacterMaximumLength(
-            tqMap.get(TCLIServiceConstants.CHARACTER_MAXIMUM_LENGTH).getI32Value());
-      }
-
-      if (tqMap.containsKey(TCLIServiceConstants.PRECISION)) {
-        ret.setPrecision(tqMap.get(TCLIServiceConstants.PRECISION).getI32Value());
-      }
-
-      if (tqMap.containsKey(TCLIServiceConstants.SCALE)) {
-        ret.setScale(tqMap.get(TCLIServiceConstants.SCALE).getI32Value());
-      }
-    }
-    return ret;
-  }
-
-  public Integer getPrecision() {
-    return precision;
-  }
-
-  public void setPrecision(Integer precision) {
-    this.precision = precision;
-  }
-
-  public Integer getScale() {
-    return scale;
-  }
-
-  public void setScale(Integer scale) {
-    this.scale = scale;
-  }
-}