You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@griffin.apache.org by gu...@apache.org on 2019/07/22 07:12:31 UTC

[griffin] branch master updated: [GRIFFIN-256] Alternatives way to access hive metadata

This is an automated email from the ASF dual-hosted git repository.

guoyp pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/griffin.git


The following commit(s) were added to refs/heads/master by this push:
     new 2b90d3e  [GRIFFIN-256] Alternatives way to access hive metadata
2b90d3e is described below

commit 2b90d3edc0959e1ee9fbe3596de8be410d17678f
Author: qwang6 <qw...@gmail.com>
AuthorDate: Mon Jul 22 15:12:15 2019 +0800

    [GRIFFIN-256] Alternatives way to access hive metadata
    
    In some company's production environment, the access to Hive Metastore is restricted. If we want to get the Hive metadata, we need through Hive JDBC.
    
    Author: qwang6 <qw...@gmail.com>
    
    Closes #513 from qwang6/master.
---
 service/pom.xml                                    |  47 ++-
 .../metastore/hive/HiveMetaStoreController.java    |   8 +-
 .../metastore/hive/HiveMetaStoreServiceImpl.java   |  20 +-
 .../hive/HiveMetaStoreServiceJdbcImpl.java         | 334 +++++++++++++++++++++
 service/src/main/resources/application.properties  |   6 +
 .../hive/HiveMetaStoreControllerTest.java          |  26 +-
 .../hive/HiveMetaStoreServiceImplTest.java         |  29 +-
 .../hive/HiveMetastoreServiceJDBCImplTest.java     | 147 +++++++++
 8 files changed, 568 insertions(+), 49 deletions(-)

diff --git a/service/pom.xml b/service/pom.xml
index 511c811..b6c4ade 100644
--- a/service/pom.xml
+++ b/service/pom.xml
@@ -36,6 +36,7 @@ under the License.
         <hive.version>2.2.0</hive.version>
         <scala.version>2.10</scala.version>
         <spring.boot.version>1.5.1.RELEASE</spring.boot.version>
+        <spring.security.kerberos.version>1.0.0.RELEASE</spring.security.kerberos.version>
         <confluent.version>3.2.0</confluent.version>
         <quartz.version>2.2.2</quartz.version>
         <start-class>org.apache.griffin.core.GriffinWebApplication</start-class>
@@ -99,6 +100,11 @@ under the License.
             <groupId>org.springframework</groupId>
             <artifactId>spring-aspects</artifactId>
         </dependency>
+        <dependency>
+            <groupId>org.springframework.security.kerberos</groupId>
+            <artifactId>spring-security-kerberos-client</artifactId>
+            <version>${spring.security.kerberos.version}</version>
+        </dependency>
         <!--eclipse link-->
         <dependency>
             <groupId>org.eclipse.persistence</groupId>
@@ -111,9 +117,9 @@ under the License.
             <version>${postgresql.version}</version>
         </dependency>
         <!--<dependency>-->
-            <!--<groupId>mysql</groupId>-->
-            <!--<artifactId>mysql-connector-java</artifactId>-->
-            <!--<version>${mysql.java.version}</version>-->
+        <!--<groupId>mysql</groupId>-->
+        <!--<artifactId>mysql-connector-java</artifactId>-->
+        <!--<version>${mysql.java.version}</version>-->
         <!--</dependency>-->
         <dependency>
             <groupId>com.h2database</groupId>
@@ -167,6 +173,27 @@ under the License.
             </exclusions>
         </dependency>
 
+        <!-- to access Hive using JDBC -->
+        <dependency>
+            <groupId>org.apache.hive</groupId>
+            <artifactId>hive-jdbc</artifactId>
+            <version>${hive.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.eclipse.jetty.aggregate</groupId>
+                    <artifactId>*</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.eclipse.jetty.orbit</groupId>
+                    <artifactId>javax.servlet</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>javax.servlet</groupId>
+                    <artifactId>servlet-api</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
         <!-- to access confluent schema registry -->
         <dependency>
             <groupId>io.confluent</groupId>
@@ -231,12 +258,12 @@ under the License.
     <profiles>
         <!--if you need mysql, please uncomment mysql-connector-java -->
         <!--<profile>-->
-            <!--<id>mysql</id>-->
-            <!--<activation>-->
-                <!--<property>-->
-                    <!--<name>mysql</name>-->
-                <!--</property>-->
-            <!--</activation>-->
+        <!--<id>mysql</id>-->
+        <!--<activation>-->
+        <!--<property>-->
+        <!--<name>mysql</name>-->
+        <!--</property>-->
+        <!--</activation>-->
         <!--</profile>-->
         <profile>
             <id>dev</id>
@@ -317,4 +344,4 @@ under the License.
             </plugin>
         </plugins>
     </build>
-</project>
+</project>
\ No newline at end of file
diff --git a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreController.java b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreController.java
index ba425ec..755b30d 100644
--- a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreController.java
+++ b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreController.java
@@ -19,21 +19,23 @@ under the License.
 package org.apache.griffin.core.metastore.hive;
 
 
-import java.util.List;
-import java.util.Map;
-
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.RequestParam;
 import org.springframework.web.bind.annotation.RestController;
 
+import java.util.List;
+import java.util.Map;
+
 @RestController
 @RequestMapping("/api/v1/metadata/hive")
 public class HiveMetaStoreController {
 
     @Autowired
+    @Qualifier(value = "metastoreSvc")
     private HiveMetaStoreService hiveMetaStoreService;
 
     @RequestMapping(value = "/dbs", method = RequestMethod.GET)
diff --git a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImpl.java b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImpl.java
index 48a78a4..26ad14a 100644
--- a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImpl.java
+++ b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImpl.java
@@ -19,17 +19,13 @@ under the License.
 
 package org.apache.griffin.core.metastore.hive;
 
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
 import com.google.common.collect.Lists;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.beans.factory.annotation.Value;
 import org.springframework.cache.annotation.CacheConfig;
 import org.springframework.cache.annotation.CacheEvict;
@@ -38,8 +34,14 @@ import org.springframework.scheduling.annotation.Scheduled;
 import org.springframework.stereotype.Service;
 import org.springframework.util.StringUtils;
 
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
 
 @Service
+@Qualifier(value = "metastoreSvc")
 @CacheConfig(cacheNames = "hive", keyGenerator = "cacheKeyGenerator")
 public class HiveMetaStoreServiceImpl implements HiveMetaStoreService {
 
@@ -56,6 +58,10 @@ public class HiveMetaStoreServiceImpl implements HiveMetaStoreService {
     public HiveMetaStoreServiceImpl() {
     }
 
+    public void setClient(IMetaStoreClient client) {
+        this.client = client;
+    }
+
     @Override
     @Cacheable(unless = "#result==null")
     public Iterable<String> getAllDatabases() {
@@ -100,7 +106,7 @@ public class HiveMetaStoreServiceImpl implements HiveMetaStoreService {
     public List<Table> getAllTable(String db) {
         return getTables(db);
     }
-    
+
     @Override
     @Cacheable(unless = "#result==null || #result.isEmpty()")
     public Map<String, List<String>> getAllTableNames() {
@@ -110,7 +116,7 @@ public class HiveMetaStoreServiceImpl implements HiveMetaStoreService {
         }
         return result;
     }
-    
+
     @Override
     @Cacheable(unless = "#result==null")
     public Map<String, List<Table>> getAllTable() {
diff --git a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceJdbcImpl.java b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceJdbcImpl.java
new file mode 100644
index 0000000..7db94e3
--- /dev/null
+++ b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceJdbcImpl.java
@@ -0,0 +1,334 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.metastore.hive;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.cache.annotation.CacheConfig;
+import org.springframework.cache.annotation.CacheEvict;
+import org.springframework.cache.annotation.Cacheable;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.stereotype.Service;
+
+import javax.annotation.PostConstruct;
+import java.io.IOException;
+import java.sql.*;
+import java.util.*;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+
+@Service
+@Qualifier(value = "jdbcSvc")
+@CacheConfig(cacheNames = "jdbcHive", keyGenerator = "cacheKeyGenerator")
+public class HiveMetaStoreServiceJdbcImpl implements HiveMetaStoreService {
+
+    private static final Logger LOGGER = LoggerFactory
+            .getLogger(HiveMetaStoreService.class);
+
+    private static final String SHOW_TABLES_IN = "show tables in ";
+
+    private static final String SHOW_DATABASE = "show databases";
+
+    private static final String SHOW_CREATE_TABLE = "show create table ";
+
+    @Value("${hive.jdbc.className}")
+    private String hiveClassName;
+
+    @Value("${hive.jdbc.url}")
+    private String hiveUrl;
+
+    @Value("${hive.need.kerberos}")
+    private String needKerberos;
+
+    @Value("${hive.keytab.user}")
+    private String keytabUser;
+
+    @Value("${hive.keytab.path}")
+    private String keytabPath;
+
+    private Connection conn;
+
+    public void setConn(Connection conn) {
+        this.conn = conn;
+    }
+
+    public void setHiveClassName(String hiveClassName) {
+        this.hiveClassName = hiveClassName;
+    }
+
+    public void setNeedKerberos(String needKerberos) {
+        this.needKerberos = needKerberos;
+    }
+
+    public void setKeytabUser(String keytabUser) {
+        this.keytabUser = keytabUser;
+    }
+
+    public void setKeytabPath(String keytabPath) {
+        this.keytabPath = keytabPath;
+    }
+
+    @PostConstruct
+    public void init() {
+        if (needKerberos != null && needKerberos.equalsIgnoreCase("true")) {
+            LOGGER.info("Hive need Kerberos Auth.");
+
+            Configuration conf = new Configuration();
+            conf.set("hadoop.security.authentication", "Kerberos");
+            UserGroupInformation.setConfiguration(conf);
+            try {
+                UserGroupInformation.loginUserFromKeytab(keytabUser, keytabPath);
+            } catch (IOException e) {
+                LOGGER.error("Register Kerberos has error. {}", e.getMessage());
+            }
+        }
+    }
+
+    @Override
+    @Cacheable(unless = "#result==null")
+    public Iterable<String> getAllDatabases() {
+        return queryHiveString(SHOW_DATABASE);
+    }
+
+    @Override
+    @Cacheable(unless = "#result==null")
+    public Iterable<String> getAllTableNames(String dbName) {
+        return queryHiveString(SHOW_TABLES_IN + dbName);
+    }
+
+    @Override
+    @Cacheable(unless = "#result==null")
+    public Map<String, List<String>> getAllTableNames() {
+        // If there has a lots of databases in Hive, this method will lead to Griffin crash
+        Map<String, List<String>> res = new HashMap<>();
+        for (String dbName : getAllDatabases()) {
+            List<String> list = (List<String>) queryHiveString(SHOW_TABLES_IN + dbName);
+            res.put(dbName, list);
+        }
+        return res;
+    }
+
+    @Override
+    public List<Table> getAllTable(String db) {
+        return null;
+    }
+
+    @Override
+    public Map<String, List<Table>> getAllTable() {
+        return null;
+    }
+
+    @Override
+    @Cacheable(unless = "#result==null")
+    public Table getTable(String dbName, String tableName) {
+        Table result = new Table();
+        result.setDbName(dbName);
+        result.setTableName(tableName);
+
+        String sql = SHOW_CREATE_TABLE + dbName + "." + tableName;
+        Statement stmt = null;
+        ResultSet rs = null;
+        StringBuilder sb = new StringBuilder();
+
+        try {
+            Class.forName(hiveClassName);
+            if (conn == null) {
+                conn = DriverManager.getConnection(hiveUrl);
+            }
+            LOGGER.info("got connection");
+
+            stmt = conn.createStatement();
+            rs = stmt.executeQuery(sql);
+            while (rs.next()) {
+                String s = rs.getString(1);
+                sb.append(s);
+            }
+            String location = getLocation(sb.toString());
+            List<FieldSchema> cols = getColums(sb.toString());
+            StorageDescriptor sd = new StorageDescriptor();
+            sd.setLocation(location);
+            sd.setCols(cols);
+            result.setSd(sd);
+        } catch (Exception e) {
+            LOGGER.error("Query Hive Table metadata has error. {}", e.getMessage());
+        } finally {
+            closeConnection(stmt, rs);
+        }
+        return result;
+    }
+
+    @Scheduled(fixedRateString =
+            "${cache.evict.hive.fixedRate.in.milliseconds}")
+    @CacheEvict(
+            cacheNames = "jdbcHive",
+            allEntries = true,
+            beforeInvocation = true)
+    public void evictHiveCache() {
+        LOGGER.info("Evict hive cache");
+    }
+
+    /**
+     * Query Hive for Show tables or show databases, which will return List of String
+     * @param sql sql string
+     * @return
+     */
+    private Iterable<String> queryHiveString(String sql) {
+        List<String> res = new ArrayList<>();
+        Statement stmt = null;
+        ResultSet rs = null;
+
+        try {
+            Class.forName(hiveClassName);
+            if (conn == null) {
+                conn = DriverManager.getConnection(hiveUrl);
+            }
+            LOGGER.info("got connection");
+            stmt = conn.createStatement();
+            rs = stmt.executeQuery(sql);
+            while (rs.next()) {
+                res.add(rs.getString(1));
+            }
+        } catch (Exception e) {
+            LOGGER.error("Query Hive JDBC has error, {}", e.getMessage());
+        } finally {
+            closeConnection(stmt, rs);
+        }
+        return res;
+    }
+
+
+    private void closeConnection(Statement stmt, ResultSet rs) {
+        try {
+            if (rs != null) {
+                rs.close();
+            }
+            if (stmt != null) {
+                stmt.close();
+            }
+            if (conn != null) {
+                conn.close();
+                conn = null;
+            }
+        } catch (SQLException e) {
+            LOGGER.error("Close JDBC connection has problem. {}", e.getMessage());
+        }
+    }
+
+    /**
+     * Get the Hive table location from hive table metadata string
+     * @param tableMetadata hive table metadata string
+     * @return Hive table location
+     */
+    public String getLocation(String tableMetadata) {
+        tableMetadata = tableMetadata.toLowerCase();
+        int index = tableMetadata.indexOf("location");
+        if (index == -1) {
+            return "";
+        }
+
+        int start = tableMetadata.indexOf("\'", index);
+        int end = tableMetadata.indexOf("\'", start + 1);
+
+        if (start == -1 || end == -1) {
+            return "";
+        }
+
+        return tableMetadata.substring(start + 1, end);
+    }
+
+    /**
+     * Get the Hive table schema: column name, column type, column comment
+     * The input String looks like following:
+     *
+     * CREATE TABLE `employee`(
+     *   `eid` int,
+     *   `name` string,
+     *   `salary` string,
+     *   `destination` string)
+     * COMMENT 'Employee details'
+     * ROW FORMAT SERDE
+     *   'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+     * WITH SERDEPROPERTIES (
+     *   'field.delim'='\t',
+     *   'line.delim'='\n',
+     *   'serialization.format'='\t')
+     * STORED AS INPUTFORMAT
+     *   'org.apache.hadoop.mapred.TextInputFormat'
+     * OUTPUTFORMAT
+     *   'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+     * LOCATION
+     *   'file:/user/hive/warehouse/employee'
+     * TBLPROPERTIES (
+     *   'bucketing_version'='2',
+     *   'transient_lastDdlTime'='1562086077')
+     *
+     * @param tableMetadata hive table metadata string
+     * @return List of FieldSchema
+     */
+    public List<FieldSchema> getColums(String tableMetadata) {
+        List<FieldSchema> res = new ArrayList<>();
+        int start = tableMetadata.indexOf("(") + 1; // index of the first '('
+        int end = tableMetadata.indexOf(")", start); // index of the first ')'
+        String[] colsArr = tableMetadata.substring(start, end).split(",");
+        for (String colStr : colsArr) {
+            colStr = colStr.trim();
+            String[] parts = colStr.split(" ");
+            String colName = parts[0].trim().substring(1, parts[0].trim().length() - 1);
+            String colType = parts[1].trim();
+            String comment = getComment(colStr);
+            FieldSchema schema = new FieldSchema(colName, colType, comment);
+            res.add(schema);
+        }
+        return res;
+    }
+
+    /**
+     * Parse one column string
+     *
+     * Input example:
+     *  `merch_date` string COMMENT 'this is merch process date'
+     *
+     * @param colStr column string
+     * @return
+     */
+    public String getComment(String colStr) {
+        String pattern = "'([^\"|^\']|\"|\')*'";
+        Matcher m = Pattern.compile(pattern).matcher(colStr.toLowerCase());
+        if (m.find()) {
+            String text = m.group();
+            String result = text.substring(1, text.length() - 1);
+            if (!result.isEmpty()) {
+                LOGGER.info("Found value: " + result);
+            }
+            return result;
+        } else {
+            LOGGER.info("NO MATCH");
+            return "";
+        }
+    }
+}
\ No newline at end of file
diff --git a/service/src/main/resources/application.properties b/service/src/main/resources/application.properties
index cb1dae5..a6c61bd 100644
--- a/service/src/main/resources/application.properties
+++ b/service/src/main/resources/application.properties
@@ -27,6 +27,12 @@ hive.metastore.uris=thrift://localhost:9083
 hive.metastore.dbname=default
 hive.hmshandler.retry.attempts=15
 hive.hmshandler.retry.interval=2000ms
+#Hive jdbc
+hive.jdbc.className=org.apache.hive.jdbc.HiveDriver
+hive.jdbc.url=jdbc:hive2://localhost:10000/
+hive.need.kerberos=false
+hive.keytab.user=xxx@xx.com
+hive.keytab.path=/path/to/keytab/file
 # Hive cache time
 cache.evict.hive.fixedRate.in.milliseconds=900000
 # Kafka schema registry
diff --git a/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreControllerTest.java b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreControllerTest.java
index e78bf3f..df98423 100644
--- a/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreControllerTest.java
+++ b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreControllerTest.java
@@ -19,31 +19,26 @@ under the License.
 
 package org.apache.griffin.core.metastore.hive;
 
-import static org.hamcrest.Matchers.hasSize;
-import static org.hamcrest.Matchers.is;
-import static org.hamcrest.Matchers.nullValue;
-import static org.mockito.BDDMockito.given;
-import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
 import org.apache.griffin.core.util.URLHelper;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
 import org.springframework.boot.test.mock.mockito.MockBean;
 import org.springframework.test.context.junit4.SpringRunner;
 import org.springframework.test.web.servlet.MockMvc;
 
+import java.util.*;
+
+import static org.hamcrest.Matchers.*;
+import static org.mockito.BDDMockito.given;
+import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
+import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
+import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
+
 @RunWith(SpringRunner.class)
 @WebMvcTest(value = HiveMetaStoreController.class, secure = false)
 public class HiveMetaStoreControllerTest {
@@ -52,6 +47,7 @@ public class HiveMetaStoreControllerTest {
     private MockMvc mockMvc;
 
     @MockBean
+    @Qualifier(value = "metastoreSvc")
     private HiveMetaStoreService hiveMetaStoreService;
 
 
@@ -117,7 +113,7 @@ public class HiveMetaStoreControllerTest {
         String tableName = "table";
         given(hiveMetaStoreService.getTable(dbName, tableName)).willReturn(
                 new Table(tableName, null, null, 0, 0, 0, null, null,
-                null, null, null, null));
+                        null, null, null, null));
 
         mockMvc.perform(get(URLHelper.API_VERSION_PATH + "/metadata/hive/table")
                 .param("db", dbName).param("table",
diff --git a/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImplTest.java b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImplTest.java
index a24f9b3..113ebf2 100644
--- a/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImplTest.java
+++ b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImplTest.java
@@ -19,41 +19,42 @@ under the License.
 
 package org.apache.griffin.core.metastore.hive;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.mockito.BDDMockito.given;
-import static org.mockito.Mockito.*;
-
-import java.util.Arrays;
-import java.util.List;
-
 import org.apache.griffin.core.config.CacheConfig;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.thrift.TException;
 import org.junit.Before;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.test.context.TestConfiguration;
 import org.springframework.boot.test.mock.mockito.MockBean;
 import org.springframework.cache.CacheManager;
 import org.springframework.cache.annotation.EnableCaching;
 import org.springframework.cache.concurrent.ConcurrentMapCacheManager;
 import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.test.context.ContextConfiguration;
 import org.springframework.test.context.junit4.SpringRunner;
 
+import java.util.Arrays;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.BDDMockito.given;
+import static org.mockito.Mockito.*;
+
 
 @RunWith(SpringRunner.class)
+@ContextConfiguration
 public class HiveMetaStoreServiceImplTest {
 
-    @TestConfiguration
+    @Configuration
     @EnableCaching
     public static class HiveMetaStoreServiceConfiguration extends CacheConfig {
         @Bean("hiveMetaStoreServiceImpl")
-        public HiveMetaStoreService service() {
+        public HiveMetaStoreServiceImpl service() {
             return new HiveMetaStoreServiceImpl();
         }
 
@@ -78,13 +79,13 @@ public class HiveMetaStoreServiceImplTest {
     }
 
     @Test
-    public void testGetAllDatabasesForNormalRun() throws MetaException {
+    public void testGetAllDatabasesForNormalRun() throws TException {
         given(client.getAllDatabases()).willReturn(Arrays.asList("default"));
         assertEquals(service.getAllDatabases().iterator().hasNext(), true);
     }
 
     @Test
-    public void testGetAllDatabasesForMetaException() throws MetaException {
+    public void testGetAllDatabasesForMetaException() throws TException {
         given(client.getAllDatabases()).willThrow(MetaException.class);
         doNothing().when(client).reconnect();
         assertTrue(service.getAllDatabases() == null);
diff --git a/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetastoreServiceJDBCImplTest.java b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetastoreServiceJDBCImplTest.java
new file mode 100644
index 0000000..faad170
--- /dev/null
+++ b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetastoreServiceJDBCImplTest.java
@@ -0,0 +1,147 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.metastore.hive;
+
+
+import org.apache.griffin.core.config.CacheConfig;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.springframework.boot.test.context.TestConfiguration;
+import org.springframework.cache.CacheManager;
+import org.springframework.cache.annotation.EnableCaching;
+import org.springframework.cache.concurrent.ConcurrentMapCacheManager;
+import org.springframework.context.annotation.Bean;
+import org.springframework.test.context.junit4.SpringRunner;
+
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.List;
+
+import static org.mockito.Matchers.anyInt;
+import static org.mockito.Matchers.anyString;
+import static org.powermock.api.mockito.PowerMockito.when;
+
+
+@RunWith(SpringRunner.class)
+public class HiveMetastoreServiceJDBCImplTest {
+
+    @TestConfiguration
+    @EnableCaching
+    public static class HiveMetaStoreServiceConfiguration extends CacheConfig {
+        @Bean("hiveMetaStoreServiceJdbcImpl")
+        public HiveMetaStoreServiceJdbcImpl serviceJDBC() {
+            return new HiveMetaStoreServiceJdbcImpl();
+        }
+
+        @Bean
+        CacheManager cacheManager() {
+            return new ConcurrentMapCacheManager("jdbcHive");
+        }
+    }
+
+    private HiveMetaStoreServiceJdbcImpl serviceJdbc = new HiveMetaStoreServiceJdbcImpl();
+
+    @Mock
+    private Connection conn;
+
+    @Mock
+    private Statement stmt;
+
+    @Mock
+    private ResultSet rs;
+
+    @Before
+    public void setUp() throws SQLException {
+        serviceJdbc.setConn(conn);
+        serviceJdbc.setHiveClassName("org.apache.hive.jdbc.HiveDriver");
+        serviceJdbc.setNeedKerberos("true");
+        serviceJdbc.setKeytabPath("/path/to/keytab");
+        serviceJdbc.setKeytabUser("user");
+    }
+
+    @Test
+    public void testGetComment() {
+        String colStr = "`session_date` string COMMENT 'this is session date'";
+        String comment = serviceJdbc.getComment(colStr);
+        assert (comment.equals("this is session date"));
+
+        colStr = "`session_date` string COMMENT ''";
+        comment = serviceJdbc.getComment(colStr);
+        Assert.assertTrue(comment.isEmpty());
+    }
+
+    @Test
+    public void testgetAllDatabases() throws SQLException {
+        when(conn.createStatement()).thenReturn(stmt);
+        when(stmt.executeQuery(anyString())).thenReturn(rs);
+        when(rs.next()).thenReturn(true).thenReturn(false);
+        when(rs.getString(anyInt())).thenReturn("default");
+
+        Iterable<String> res = serviceJdbc.getAllDatabases();
+        for (String s : res) {
+            Assert.assertEquals(s, "default");
+            break;
+        }
+    }
+
+    @Test
+    public void testGetAllTableNames() throws SQLException {
+        when(conn.createStatement()).thenReturn(stmt);
+        when(stmt.executeQuery(anyString())).thenReturn(rs);
+        when(rs.next()).thenReturn(true).thenReturn(true).thenReturn(false);
+        when(rs.getString(anyInt())).thenReturn("session_data").thenReturn("session_summary");
+
+        Iterable<String> res = serviceJdbc.getAllTableNames("default");
+        StringBuilder sb = new StringBuilder();
+        for (String s : res) {
+            sb.append(s).append(",");
+        }
+        Assert.assertEquals(sb.toString(), "session_data,session_summary,");
+    }
+
+    @Test
+    public void testGetTable() throws SQLException {
+        String meta = "CREATE EXTERNAL TABLE `default.session_data`(  `session_date` string COMMENT 'this is session date',   `site_id` int COMMENT '',   `guid` string COMMENT '',   `user_id` string COMMENT '')COMMENT 'session_data for session team' PARTITIONED BY (   `dt` string,   `place` int) ROW FORMAT SERDE   'org.apache.hadoop.hive.serde2.avro.AvroSerDe' STORED AS INPUTFORMAT   'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat' OUTPUTFORMAT   'org.apache.hadoop.hive.ql.io [...]
+        when(conn.createStatement()).thenReturn(stmt);
+        when(stmt.executeQuery(anyString())).thenReturn(rs);
+        when(rs.next()).thenReturn(true).thenReturn(false);
+        when(rs.getString(anyInt())).thenReturn(meta);
+
+        Table res = serviceJdbc.getTable("default", "session_data");
+
+        assert (res.getDbName().equals("default"));
+        assert (res.getTableName().equals("session_data"));
+        assert (res.getSd().getLocation().equals("hdfs://localhost/session/common/session_data"));
+        List<FieldSchema> fieldSchemas = res.getSd().getCols();
+        for (FieldSchema fieldSchema : fieldSchemas) {
+            Assert.assertEquals(fieldSchema.getName(),"session_date");
+            Assert.assertEquals(fieldSchema.getType(),"string");
+            Assert.assertEquals(fieldSchema.getComment(),"this is session date");
+            break;
+        }
+    }
+}
\ No newline at end of file