You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by ma...@apache.org on 2016/10/31 02:23:35 UTC

[03/13] kylin git commit: KYLIN-2125 add BeelineHiveClient

KYLIN-2125 add BeelineHiveClient


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/6fc1c865
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/6fc1c865
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/6fc1c865

Branch: refs/heads/yang21-hbase1.x
Commit: 6fc1c865ae35a86839e226ae2ca7b3f88661c1dd
Parents: d5565f7
Author: Hongbin Ma <ma...@apache.org>
Authored: Thu Oct 13 15:31:11 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Wed Oct 26 18:43:34 2016 +0800

----------------------------------------------------------------------
 .../kylin/rest/controller/TableController.java  |   9 +-
 source-hive/pom.xml                             |   5 +
 .../kylin/source/hive/BeelineHiveClient.java    | 214 +++++++++++++++++++
 .../source/hive/BeelineOptionsProcessor.java    |  47 ++++
 .../apache/kylin/source/hive/CLIHiveClient.java | 169 +++++++++++++++
 .../apache/kylin/source/hive/HiveClient.java    | 170 ---------------
 .../kylin/source/hive/HiveClientFactory.java    |  33 +++
 .../source/hive/HiveSourceTableLoader.java      |  88 ++++----
 .../org/apache/kylin/source/hive/HiveTable.java |  16 +-
 .../apache/kylin/source/hive/HiveTableMeta.java |  71 ++++++
 .../kylin/source/hive/HiveTableMetaBuilder.java | 102 +++++++++
 .../apache/kylin/source/hive/HqlExecutable.java | 107 ----------
 .../apache/kylin/source/hive/IHiveClient.java   |  36 ++++
 .../hive/BeelineOptionsProcessorTest.java       |  38 ++++
 14 files changed, 769 insertions(+), 336 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/6fc1c865/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java
index eefeba8..b7dd728 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java
@@ -47,7 +47,8 @@ import org.apache.kylin.rest.service.KafkaConfigService;
 import org.apache.kylin.rest.service.ModelService;
 import org.apache.kylin.rest.service.ProjectService;
 import org.apache.kylin.rest.service.StreamingService;
-import org.apache.kylin.source.hive.HiveClient;
+import org.apache.kylin.source.hive.HiveClientFactory;
+import org.apache.kylin.source.hive.IHiveClient;
 import org.apache.kylin.source.kafka.config.KafkaConfig;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -189,7 +190,7 @@ public class TableController extends BasicController {
         String[] dbTableName = HadoopUtil.parseHiveTableName(tableName);
         tableName = dbTableName[0] + "." + dbTableName[1];
         TableDesc desc = cubeMgmtService.getMetadataManager().getTableDesc(tableName);
-        if(desc == null)
+        if (desc == null)
             return false;
         tableType = desc.getSourceType();
 
@@ -312,7 +313,7 @@ public class TableController extends BasicController {
     @RequestMapping(value = "/hive", method = { RequestMethod.GET })
     @ResponseBody
     private static List<String> showHiveDatabases() throws IOException {
-        HiveClient hiveClient = new HiveClient();
+        IHiveClient hiveClient = HiveClientFactory.getHiveClient();
         List<String> results = null;
 
         try {
@@ -333,7 +334,7 @@ public class TableController extends BasicController {
     @RequestMapping(value = "/hive/{database}", method = { RequestMethod.GET })
     @ResponseBody
     private static List<String> showHiveTables(@PathVariable String database) throws IOException {
-        HiveClient hiveClient = new HiveClient();
+        IHiveClient hiveClient = HiveClientFactory.getHiveClient();
         List<String> results = null;
 
         try {

http://git-wip-us.apache.org/repos/asf/kylin/blob/6fc1c865/source-hive/pom.xml
----------------------------------------------------------------------
diff --git a/source-hive/pom.xml b/source-hive/pom.xml
index 08019d0..16cb3b4 100644
--- a/source-hive/pom.xml
+++ b/source-hive/pom.xml
@@ -65,6 +65,11 @@
             <scope>provided</scope>
         </dependency>
         <dependency>
+            <groupId>org.apache.hive</groupId>
+            <artifactId>hive-jdbc</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
             <groupId>org.apache.mrunit</groupId>
             <artifactId>mrunit</artifactId>
             <classifier>hadoop2</classifier>

http://git-wip-us.apache.org/repos/asf/kylin/blob/6fc1c865/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineHiveClient.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineHiveClient.java b/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineHiveClient.java
new file mode 100644
index 0000000..0fbc39b
--- /dev/null
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineHiveClient.java
@@ -0,0 +1,214 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.hive;
+
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.List;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+
+public class BeelineHiveClient implements IHiveClient {
+
+    private Connection cnct;
+    private Statement stmt;
+    private DatabaseMetaData metaData;
+
+    public BeelineHiveClient(String beelineParams) {
+        if (StringUtils.isEmpty(beelineParams)) {
+            throw new IllegalArgumentException("BeelineParames cannot be empty");
+        }
+        String[] splits = StringUtils.split(beelineParams);
+        String url = null, username = null, password = null;
+        for (int i = 0; i < splits.length; i++) {
+            if ("-u".equals(splits[i])) {
+                url = stripQuotes(splits[i + 1]);
+            }
+            if ("-n".equals(splits[i])) {
+                username = stripQuotes(splits[i + 1]);
+            }
+            if ("-p".equals(splits[i])) {
+                password = stripQuotes(splits[i + 1]);
+            }
+        }
+        this.init(url, username, password);
+    }
+
+    private void init(String url, String username, String password) {
+        try {
+            Class.forName("org.apache.hive.jdbc.HiveDriver");
+            cnct = DriverManager.getConnection(url, username, password);
+            stmt = cnct.createStatement();
+            metaData = cnct.getMetaData();
+        } catch (SQLException | ClassNotFoundException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    private String stripQuotes(String input) {
+        if (input.startsWith("'") && input.endsWith("'")) {
+            return StringUtils.strip(input, "'");
+        } else if (input.startsWith("\"") && input.endsWith("\"")) {
+            return StringUtils.strip(input, "\"");
+        } else {
+            return input;
+        }
+    }
+
+    public List<String> getHiveDbNames() throws Exception {
+        List<String> ret = Lists.newArrayList();
+        ResultSet schemas = metaData.getSchemas();
+        while (schemas.next()) {
+            ret.add(String.valueOf(schemas.getObject(1)));
+        }
+        return ret;
+    }
+
+    public List<String> getHiveTableNames(String database) throws Exception {
+        List<String> ret = Lists.newArrayList();
+        ResultSet tables = metaData.getTables(null, database, null, null);
+        while (tables.next()) {
+            ret.add(String.valueOf(tables.getObject(3)));
+        }
+        return ret;
+    }
+
+    @Override
+    public void executeHQL(String hql) throws CommandNeedRetryException, IOException {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public void executeHQL(String[] hqls) throws CommandNeedRetryException, IOException {
+        throw new UnsupportedOperationException();
+    }
+
+    public HiveTableMeta getHiveTableMeta(String database, String tableName) throws SQLException {
+        ResultSet columns = metaData.getColumns(null, database, tableName, null);
+        HiveTableMetaBuilder builder = new HiveTableMetaBuilder();
+        builder.setTableName(tableName);
+
+        List<HiveTableMeta.HiveTableColumnMeta> allColumns = Lists.newArrayList();
+        while (columns.next()) {
+            allColumns.add(new HiveTableMeta.HiveTableColumnMeta(columns.getString(4), columns.getString(6)));
+        }
+        builder.setAllColumns(allColumns);
+
+        stmt.execute("use " + database);
+        ResultSet resultSet = stmt.executeQuery("describe formatted " + tableName);
+        extractHiveTableMeta(resultSet, builder);
+
+        return builder.createHiveTableMeta();
+    }
+
+    private void extractHiveTableMeta(ResultSet resultSet, HiveTableMetaBuilder builder) throws SQLException {
+        while (resultSet.next()) {
+
+            List<HiveTableMeta.HiveTableColumnMeta> partitionColumns = Lists.newArrayList();
+            if ("# Partition Information".equals(resultSet.getString(1).trim())) {
+                resultSet.next();
+                Preconditions.checkArgument("# col_name".equals(resultSet.getString(1).trim()));
+                resultSet.next();
+                Preconditions.checkArgument("".equals(resultSet.getString(1).trim()));
+                while (resultSet.next()) {
+                    if ("".equals(resultSet.getString(1).trim())) {
+                        break;
+                    }
+                    partitionColumns.add(new HiveTableMeta.HiveTableColumnMeta(resultSet.getString(1).trim(), resultSet.getString(2).trim()));
+                }
+                builder.setPartitionColumns(partitionColumns);
+            }
+
+            if ("Owner:".equals(resultSet.getString(1).trim())) {
+                builder.setOwner(resultSet.getString(2).trim());
+            }
+            if ("LastAccessTime:".equals(resultSet.getString(1).trim())) {
+                try {
+                    int i = Integer.parseInt(resultSet.getString(2).trim());
+                    builder.setLastAccessTime(i);
+                } catch (NumberFormatException e) {
+                    builder.setLastAccessTime(0);
+                }
+            }
+            if ("Location:".equals(resultSet.getString(1).trim())) {
+                builder.setSdLocation(resultSet.getString(2).trim());
+            }
+            if ("Table Type:".equals(resultSet.getString(1).trim())) {
+                builder.setTableType(resultSet.getString(2).trim());
+            }
+            if ("Table Parameters:".equals(resultSet.getString(1).trim())) {
+                while (resultSet.next()) {
+                    if (resultSet.getString(2) == null) {
+                        break;
+                    }
+                    if ("storage_handler".equals(resultSet.getString(2).trim())) {
+                        builder.setIsNative(false);//default is true
+                    }
+                    if ("totalSize".equals(resultSet.getString(2).trim())) {
+                        builder.setFileSize(Long.parseLong(resultSet.getString(3).trim()));//default is false
+                    }
+                    if ("numFiles".equals(resultSet.getString(2).trim())) {
+                        builder.setFileNum(Long.parseLong(resultSet.getString(3).trim()));
+                    }
+                }
+            }
+            if ("InputFormat:".equals(resultSet.getString(1).trim())) {
+                builder.setSdInputFormat(resultSet.getString(2).trim());
+            }
+            if ("OutputFormat:".equals(resultSet.getString(1).trim())) {
+                builder.setSdOutputFormat(resultSet.getString(2).trim());
+            }
+        }
+    }
+
+    public void close() {
+        if (this.stmt != null) {
+            try {
+                this.stmt.close();
+            } catch (SQLException e) {
+                //
+            }
+        }
+        if (this.cnct != null) {
+            try {
+                this.cnct.close();
+            } catch (SQLException e) {
+                //
+            }
+        }
+    }
+
+    public static void main(String[] args) throws SQLException {
+
+        //BeelineHiveClient loader = new BeelineHiveClient("-n root --hiveconf hive.security.authorization.sqlstd.confwhitelist.append='mapreduce.job.*|dfs.*' -u 'jdbc:hive2://sandbox:10000'");
+        BeelineHiveClient loader = new BeelineHiveClient(StringUtils.join(args, " "));
+        HiveTableMeta hiveTableMeta = loader.getHiveTableMeta("default", "events");
+        System.out.println(hiveTableMeta);
+        loader.close();
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/6fc1c865/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineOptionsProcessor.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineOptionsProcessor.java b/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineOptionsProcessor.java
new file mode 100644
index 0000000..68cb352
--- /dev/null
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineOptionsProcessor.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.hive;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+
+public class BeelineOptionsProcessor {
+    private final Options options = new Options();
+
+    public BeelineOptionsProcessor() {
+
+        options.addOption(OptionBuilder.hasArg().withArgName("url").create('u'));
+        options.addOption(OptionBuilder.hasArg().withArgName("username").create('n'));
+        options.addOption(OptionBuilder.hasArg().withArgName("password").create('p'));
+
+    }
+
+    public CommandLine process(String[] argv) {
+        try {
+            return new GnuParser().parse(options, argv);
+
+        } catch (ParseException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/6fc1c865/source-hive/src/main/java/org/apache/kylin/source/hive/CLIHiveClient.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/CLIHiveClient.java b/source-hive/src/main/java/org/apache/kylin/source/hive/CLIHiveClient.java
new file mode 100644
index 0000000..ea74470
--- /dev/null
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/CLIHiveClient.java
@@ -0,0 +1,169 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.source.hive;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.hive.cli.CliSessionState;
+import org.apache.hadoop.hive.common.StatsSetupConst;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.apache.hadoop.hive.ql.session.SessionState;
+
+import com.google.common.collect.Lists;
+
+/**
+ * Hive meta API client for Kylin
+ * @author shaoshi
+ *
+ */
+public class CLIHiveClient implements IHiveClient {
+
+    protected HiveConf hiveConf = null;
+    protected Driver driver = null;
+    protected HiveMetaStoreClient metaStoreClient = null;
+
+    public CLIHiveClient() {
+        hiveConf = new HiveConf(CLIHiveClient.class);
+    }
+
+    /**
+     * only used by Deploy Util
+     */
+    @Override
+    public void executeHQL(String hql) throws CommandNeedRetryException, IOException {
+        CommandProcessorResponse response = getDriver().run(hql);
+        int retCode = response.getResponseCode();
+        if (retCode != 0) {
+            String err = response.getErrorMessage();
+            throw new IOException("Failed to execute hql [" + hql + "], error message is: " + err);
+        }
+    }
+
+    /**
+     * only used by Deploy Util
+     */
+    @Override
+    public void executeHQL(String[] hqls) throws CommandNeedRetryException, IOException {
+        for (String sql : hqls)
+            executeHQL(sql);
+    }
+
+    @Override
+    public HiveTableMeta getHiveTableMeta(String database, String tableName) throws Exception {
+        HiveTableMetaBuilder builder = new HiveTableMetaBuilder();
+        Table table = getMetaStoreClient().getTable(database, tableName);
+
+        List<FieldSchema> allFields = getMetaStoreClient().getFields(database, tableName);
+        List<FieldSchema> partitionFields = table.getPartitionKeys();
+        if (allFields == null) {
+            allFields = Lists.newArrayList();
+        }
+        if (partitionFields != null && partitionFields.size() > 0) {
+            allFields.addAll(partitionFields);
+        }
+        List<HiveTableMeta.HiveTableColumnMeta> allColumns = Lists.newArrayList();
+        List<HiveTableMeta.HiveTableColumnMeta> partitionColumns = Lists.newArrayList();
+        for (FieldSchema fieldSchema : allFields) {
+            allColumns.add(new HiveTableMeta.HiveTableColumnMeta(fieldSchema.getName(), fieldSchema.getType()));
+        }
+        if (partitionFields != null && partitionFields.size() > 0) {
+            for (FieldSchema fieldSchema : partitionFields) {
+                partitionColumns.add(new HiveTableMeta.HiveTableColumnMeta(fieldSchema.getName(), fieldSchema.getType()));
+            }
+        }
+        builder.setAllColumns(allColumns);
+        builder.setPartitionColumns(partitionColumns);
+
+        builder.setSdLocation(table.getSd().getLocation());
+        builder.setFileSize(getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.TOTAL_SIZE));
+        builder.setFileNum(getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.NUM_FILES));
+        builder.setIsNative(!MetaStoreUtils.isNonNativeTable(table));
+        builder.setTableName(tableName);
+        builder.setSdInputFormat(table.getSd().getInputFormat());
+        builder.setSdOutputFormat(table.getSd().getOutputFormat());
+        builder.setOwner(table.getOwner());
+        builder.setLastAccessTime(table.getLastAccessTime());
+        builder.setTableType(table.getTableType());
+
+        return builder.createHiveTableMeta();
+    }
+
+    @Override
+    public List<String> getHiveDbNames() throws Exception {
+        return getMetaStoreClient().getAllDatabases();
+    }
+
+    @Override
+    public List<String> getHiveTableNames(String database) throws Exception {
+        return getMetaStoreClient().getAllTables(database);
+    }
+
+    private HiveMetaStoreClient getMetaStoreClient() throws Exception {
+        if (metaStoreClient == null) {
+            metaStoreClient = new HiveMetaStoreClient(hiveConf);
+        }
+        return metaStoreClient;
+    }
+
+    /**
+     * COPIED FROM org.apache.hadoop.hive.ql.stats.StatsUtil for backward compatibility
+     * 
+     * Get basic stats of table
+     * @param table
+     *          - table
+     * @param statType
+     *          - type of stats
+     * @return value of stats
+     */
+    private long getBasicStatForTable(org.apache.hadoop.hive.ql.metadata.Table table, String statType) {
+        Map<String, String> params = table.getParameters();
+        long result = 0;
+
+        if (params != null) {
+            try {
+                result = Long.parseLong(params.get(statType));
+            } catch (NumberFormatException e) {
+                result = 0;
+            }
+        }
+        return result;
+    }
+
+    /**
+     * Get the hive ql driver to execute ddl or dml
+     * @return
+     */
+    private Driver getDriver() {
+        if (driver == null) {
+            driver = new Driver(hiveConf);
+            SessionState.start(new CliSessionState(hiveConf));
+        }
+
+        return driver;
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/6fc1c865/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
deleted file mode 100644
index a99b304..0000000
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
+++ /dev/null
@@ -1,170 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.source.hive;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.hive.cli.CliSessionState;
-import org.apache.hadoop.hive.common.StatsSetupConst;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Driver;
-import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
-import org.apache.hadoop.hive.ql.session.SessionState;
-
-/**
- * Hive meta API client for Kylin
- * @author shaoshi
- *
- */
-public class HiveClient {
-
-    protected HiveConf hiveConf = null;
-    protected Driver driver = null;
-    protected HiveMetaStoreClient metaStoreClient = null;
-
-    public HiveClient() {
-        hiveConf = new HiveConf(HiveClient.class);
-    }
-
-    public HiveClient(Map<String, String> configMap) {
-        this();
-        appendConfiguration(configMap);
-    }
-
-    public HiveConf getHiveConf() {
-        return hiveConf;
-    }
-
-    /**
-     * Get the hive ql driver to execute ddl or dml
-     * @return
-     */
-    private Driver getDriver() {
-        if (driver == null) {
-            driver = new Driver(hiveConf);
-            SessionState.start(new CliSessionState(hiveConf));
-        }
-
-        return driver;
-    }
-
-    /**
-     * Append or overwrite the default hive client configuration; You need call this before invoke #executeHQL;
-     * @param configMap
-     */
-    public void appendConfiguration(Map<String, String> configMap) {
-        if (configMap != null && configMap.size() > 0) {
-            for (Entry<String, String> e : configMap.entrySet()) {
-                hiveConf.set(e.getKey(), e.getValue());
-            }
-        }
-    }
-
-    /**
-     * 
-     * @param hql
-     * @throws CommandNeedRetryException
-     * @throws IOException
-     */
-    public void executeHQL(String hql) throws CommandNeedRetryException, IOException {
-        CommandProcessorResponse response = getDriver().run(hql);
-        int retCode = response.getResponseCode();
-        if (retCode != 0) {
-            String err = response.getErrorMessage();
-            throw new IOException("Failed to execute hql [" + hql + "], error message is: " + err);
-        }
-    }
-
-    public void executeHQL(String[] hqls) throws CommandNeedRetryException, IOException {
-        for (String sql : hqls)
-            executeHQL(sql);
-    }
-
-    private HiveMetaStoreClient getMetaStoreClient() throws Exception {
-        if (metaStoreClient == null) {
-            metaStoreClient = new HiveMetaStoreClient(hiveConf);
-        }
-        return metaStoreClient;
-    }
-
-    public Table getHiveTable(String database, String tableName) throws Exception {
-        return getMetaStoreClient().getTable(database, tableName);
-    }
-
-    public List<FieldSchema> getHiveTableFields(String database, String tableName) throws Exception {
-        return getMetaStoreClient().getFields(database, tableName);
-    }
-
-    public String getHiveTableLocation(String database, String tableName) throws Exception {
-        Table t = getHiveTable(database, tableName);
-        return t.getSd().getLocation();
-    }
-
-    public long getFileSizeForTable(Table table) {
-        return getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.TOTAL_SIZE);
-    }
-
-    public long getFileNumberForTable(Table table) {
-        return getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.NUM_FILES);
-    }
-
-    public List<String> getHiveDbNames() throws Exception {
-        return getMetaStoreClient().getAllDatabases();
-    }
-
-    public List<String> getHiveTableNames(String database) throws Exception {
-        return getMetaStoreClient().getAllTables(database);
-    }
-
-    /**
-     * COPIED FROM org.apache.hadoop.hive.ql.stats.StatsUtil for backward compatibility
-     * 
-     * Get basic stats of table
-     * @param table
-     *          - table
-     * @param statType
-     *          - type of stats
-     * @return value of stats
-     */
-    public static long getBasicStatForTable(org.apache.hadoop.hive.ql.metadata.Table table, String statType) {
-        Map<String, String> params = table.getParameters();
-        long result = 0;
-
-        if (params != null) {
-            try {
-                result = Long.parseLong(params.get(statType));
-            } catch (NumberFormatException e) {
-                result = 0;
-            }
-        }
-        return result;
-    }
-
-    public boolean isNativeTable(String database, String tableName) throws Exception {
-        return !MetaStoreUtils.isNonNativeTable(getMetaStoreClient().getTable(database, tableName));
-    }
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/6fc1c865/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClientFactory.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClientFactory.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClientFactory.java
new file mode 100644
index 0000000..8c883af
--- /dev/null
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClientFactory.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.hive;
+
+import org.apache.kylin.common.KylinConfig;
+
+public class HiveClientFactory {
+    public static IHiveClient getHiveClient() {
+        if ("cli".equals(KylinConfig.getInstanceFromEnv().getHiveClientMode())) {
+            return new CLIHiveClient();
+        } else if ("beeline".equals(KylinConfig.getInstanceFromEnv().getHiveClientMode())) {
+            return new BeelineHiveClient(KylinConfig.getInstanceFromEnv().getHiveBeelineParams());
+        } else {
+            throw new RuntimeException("cannot recognize hive client mode");
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/6fc1c865/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
index 8b98e7b..346d278 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
@@ -25,10 +25,7 @@ import java.util.Map;
 import java.util.Set;
 import java.util.UUID;
 
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.engine.mr.HadoopUtil;
 import org.apache.kylin.metadata.MetadataConstants;
 import org.apache.kylin.metadata.MetadataManager;
@@ -37,10 +34,8 @@ import org.apache.kylin.metadata.model.TableDesc;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.LinkedHashMultimap;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-import com.google.common.collect.SetMultimap;
 import com.google.common.collect.Sets;
 
 /**
@@ -54,25 +49,27 @@ public class HiveSourceTableLoader {
     @SuppressWarnings("unused")
     private static final Logger logger = LoggerFactory.getLogger(HiveSourceTableLoader.class);
 
-    public static Set<String> reloadHiveTables(String[] hiveTables, KylinConfig config) throws IOException {
+    public static final String OUTPUT_SURFIX = "json";
+    public static final String TABLE_FOLDER_NAME = "table";
+    public static final String TABLE_EXD_FOLDER_NAME = "table_exd";
 
-        SetMultimap<String, String> db2tables = LinkedHashMultimap.create();
-        for (String fullTableName : hiveTables) {
-            String[] parts = HadoopUtil.parseHiveTableName(fullTableName);
-            db2tables.put(parts[0], parts[1]);
-        }
+    public static Set<String> reloadHiveTables(String[] hiveTables, KylinConfig config) throws IOException {
 
-        HiveClient hiveClient = new HiveClient();
-        SchemaChecker checker = new SchemaChecker(hiveClient, MetadataManager.getInstance(config), CubeManager.getInstance(config));
-        for (Map.Entry<String, String> entry : db2tables.entries()) {
-            SchemaChecker.CheckResult result = checker.allowReload(entry.getKey(), entry.getValue());
-            result.raiseExceptionWhenInvalid();
+        Map<String, Set<String>> db2tables = Maps.newHashMap();
+        for (String table : hiveTables) {
+            String[] parts = HadoopUtil.parseHiveTableName(table);
+            Set<String> set = db2tables.get(parts[0]);
+            if (set == null) {
+                set = Sets.newHashSet();
+                db2tables.put(parts[0], set);
+            }
+            set.add(parts[1]);
         }
 
         // extract from hive
         Set<String> loadedTables = Sets.newHashSet();
         for (String database : db2tables.keySet()) {
-            List<String> loaded = extractHiveTables(database, db2tables.get(database), hiveClient);
+            List<String> loaded = extractHiveTables(database, db2tables.get(database), config);
             loadedTables.addAll(loaded);
         }
 
@@ -85,29 +82,19 @@ public class HiveSourceTableLoader {
         metaMgr.removeTableExd(hiveTable);
     }
 
-    private static List<String> extractHiveTables(String database, Set<String> tables, HiveClient hiveClient) throws IOException {
+    private static List<String> extractHiveTables(String database, Set<String> tables, KylinConfig config) throws IOException {
 
         List<String> loadedTables = Lists.newArrayList();
         MetadataManager metaMgr = MetadataManager.getInstance(KylinConfig.getInstanceFromEnv());
         for (String tableName : tables) {
-            Table table = null;
-            List<FieldSchema> partitionFields = null;
-            List<FieldSchema> fields = null;
+            IHiveClient hiveClient = HiveClientFactory.getHiveClient();
+            HiveTableMeta hiveTableMeta;
             try {
-                table = hiveClient.getHiveTable(database, tableName);
-                partitionFields = table.getPartitionKeys();
-                fields = hiveClient.getHiveTableFields(database, tableName);
+                hiveTableMeta = hiveClient.getHiveTableMeta(database, tableName);
             } catch (Exception e) {
-                e.printStackTrace();
-                throw new IOException(e);
+                throw new RuntimeException("cannot get HiveTableMeta", e);
             }
 
-            if (fields != null && partitionFields != null && partitionFields.size() > 0) {
-                fields.addAll(partitionFields);
-            }
-
-            long tableSize = hiveClient.getFileSizeForTable(table);
-            long tableFileNum = hiveClient.getFileNumberForTable(table);
             TableDesc tableDesc = metaMgr.getTableDesc(database + "." + tableName);
             if (tableDesc == null) {
                 tableDesc = new TableDesc();
@@ -116,21 +103,21 @@ public class HiveSourceTableLoader {
                 tableDesc.setUuid(UUID.randomUUID().toString());
                 tableDesc.setLastModified(0);
             }
-            if (table.getTableType() != null) {
-                tableDesc.setTableType(table.getTableType());
+            if (hiveTableMeta.tableType != null) {
+                tableDesc.setTableType(hiveTableMeta.tableType);
             }
 
-            int columnNumber = fields.size();
+            int columnNumber = hiveTableMeta.allColumns.size();
             List<ColumnDesc> columns = new ArrayList<ColumnDesc>(columnNumber);
             for (int i = 0; i < columnNumber; i++) {
-                FieldSchema field = fields.get(i);
+                HiveTableMeta.HiveTableColumnMeta field = hiveTableMeta.allColumns.get(i);
                 ColumnDesc cdesc = new ColumnDesc();
-                cdesc.setName(field.getName().toUpperCase());
+                cdesc.setName(field.name.toUpperCase());
                 // use "double" in kylin for "float"
-                if ("float".equalsIgnoreCase(field.getType())) {
+                if ("float".equalsIgnoreCase(field.dataType)) {
                     cdesc.setDatatype("double");
                 } else {
-                    cdesc.setDatatype(field.getType());
+                    cdesc.setDatatype(field.dataType);
                 }
                 cdesc.setId(String.valueOf(i + 1));
                 columns.add(cdesc);
@@ -138,10 +125,10 @@ public class HiveSourceTableLoader {
             tableDesc.setColumns(columns.toArray(new ColumnDesc[columnNumber]));
 
             StringBuffer partitionColumnString = new StringBuffer();
-            for (int i = 0, n = partitionFields.size(); i < n; i++) {
+            for (int i = 0, n = hiveTableMeta.partitionColumns.size(); i < n; i++) {
                 if (i > 0)
                     partitionColumnString.append(", ");
-                partitionColumnString.append(partitionFields.get(i).getName().toUpperCase());
+                partitionColumnString.append(hiveTableMeta.partitionColumns.get(i).name.toUpperCase());
             }
 
             Map<String, String> map = metaMgr.getTableDescExd(tableDesc.getIdentity());
@@ -149,16 +136,16 @@ public class HiveSourceTableLoader {
             if (map == null) {
                 map = Maps.newHashMap();
             }
-            map.put(MetadataConstants.TABLE_EXD_TABLENAME, table.getTableName());
-            map.put(MetadataConstants.TABLE_EXD_LOCATION, table.getSd().getLocation());
-            map.put(MetadataConstants.TABLE_EXD_IF, table.getSd().getInputFormat());
-            map.put(MetadataConstants.TABLE_EXD_OF, table.getSd().getOutputFormat());
-            map.put(MetadataConstants.TABLE_EXD_OWNER, table.getOwner());
-            map.put(MetadataConstants.TABLE_EXD_LAT, String.valueOf(table.getLastAccessTime()));
+            map.put(MetadataConstants.TABLE_EXD_TABLENAME, hiveTableMeta.tableName);
+            map.put(MetadataConstants.TABLE_EXD_LOCATION, hiveTableMeta.sdLocation);
+            map.put(MetadataConstants.TABLE_EXD_IF, hiveTableMeta.sdInputFormat);
+            map.put(MetadataConstants.TABLE_EXD_OF, hiveTableMeta.sdOutputFormat);
+            map.put(MetadataConstants.TABLE_EXD_OWNER, hiveTableMeta.owner);
+            map.put(MetadataConstants.TABLE_EXD_LAT, String.valueOf(hiveTableMeta.lastAccessTime));
             map.put(MetadataConstants.TABLE_EXD_PC, partitionColumnString.toString());
-            map.put(MetadataConstants.TABLE_EXD_TFS, String.valueOf(tableSize));
-            map.put(MetadataConstants.TABLE_EXD_TNF, String.valueOf(tableFileNum));
-            map.put(MetadataConstants.TABLE_EXD_PARTITIONED, Boolean.valueOf(partitionFields != null && partitionFields.size() > 0).toString());
+            map.put(MetadataConstants.TABLE_EXD_TFS, String.valueOf(hiveTableMeta.fileSize));
+            map.put(MetadataConstants.TABLE_EXD_TNF, String.valueOf(hiveTableMeta.fileNum));
+            map.put(MetadataConstants.TABLE_EXD_PARTITIONED, Boolean.valueOf(hiveTableMeta.partitionColumns.size() > 0).toString());
 
             metaMgr.saveSourceTable(tableDesc);
             metaMgr.saveTableExd(tableDesc.getIdentity(), map);
@@ -167,4 +154,5 @@ public class HiveSourceTableLoader {
 
         return loadedTables;
     }
+
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/6fc1c865/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTable.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTable.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTable.java
index dcc43ff..97e9990 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTable.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTable.java
@@ -37,11 +37,17 @@ public class HiveTable implements ReadableTable {
     final private String database;
     final private String hiveTable;
 
-    private HiveClient hiveClient;
+    private IHiveClient hiveClient;
+    private HiveTableMeta hiveTableMeta;
 
     public HiveTable(TableDesc tableDesc) {
         this.database = tableDesc.getDatabase();
         this.hiveTable = tableDesc.getName();
+        try {
+            this.hiveTableMeta = getHiveClient().getHiveTableMeta(database, hiveTable);
+        } catch (Exception e) {
+            throw new RuntimeException("cannot get HiveTableMeta", e);
+        }
     }
 
     @Override
@@ -58,7 +64,7 @@ public class HiveTable implements ReadableTable {
             long lastModified = sizeAndLastModified.getSecond();
 
             // for non-native hive table, cannot rely on size & last modified on HDFS
-            if (getHiveClient().isNativeTable(database, hiveTable) == false) {
+            if (this.hiveTableMeta.isNative == false) {
                 lastModified = System.currentTimeMillis(); // assume table is ever changing
             }
 
@@ -80,13 +86,13 @@ public class HiveTable implements ReadableTable {
             return override;
         }
 
-        return getHiveClient().getHiveTableLocation(database, hiveTable);
+        return this.hiveTableMeta.sdLocation;
     }
 
-    public HiveClient getHiveClient() {
+    public IHiveClient getHiveClient() {
 
         if (hiveClient == null) {
-            hiveClient = new HiveClient();
+            hiveClient = HiveClientFactory.getHiveClient();
         }
         return hiveClient;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/6fc1c865/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMeta.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMeta.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMeta.java
new file mode 100644
index 0000000..c2b7c96
--- /dev/null
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMeta.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.hive;
+
+import java.util.List;
+
+class HiveTableMeta {
+    static class HiveTableColumnMeta {
+        String name;
+        String dataType;
+
+        public HiveTableColumnMeta(String name, String dataType) {
+            this.name = name;
+            this.dataType = dataType;
+        }
+
+        @Override
+        public String toString() {
+            return "HiveTableColumnMeta{" + "name='" + name + '\'' + ", dataType='" + dataType + '\'' + '}';
+        }
+    }
+
+    String tableName;
+    String sdLocation;//sd is short for storage descriptor
+    String sdInputFormat;
+    String sdOutputFormat;
+    String owner;
+    String tableType;
+    int lastAccessTime;
+    long fileSize;
+    long fileNum;
+    boolean isNative;
+    List<HiveTableColumnMeta> allColumns;
+    List<HiveTableColumnMeta> partitionColumns;
+
+    public HiveTableMeta(String tableName, String sdLocation, String sdInputFormat, String sdOutputFormat, String owner, String tableType, int lastAccessTime, long fileSize, long fileNum, boolean isNative, List<HiveTableColumnMeta> allColumns, List<HiveTableColumnMeta> partitionColumns) {
+        this.tableName = tableName;
+        this.sdLocation = sdLocation;
+        this.sdInputFormat = sdInputFormat;
+        this.sdOutputFormat = sdOutputFormat;
+        this.owner = owner;
+        this.tableType = tableType;
+        this.lastAccessTime = lastAccessTime;
+        this.fileSize = fileSize;
+        this.fileNum = fileNum;
+        this.isNative = isNative;
+        this.allColumns = allColumns;
+        this.partitionColumns = partitionColumns;
+    }
+
+    @Override
+    public String toString() {
+        return "HiveTableMeta{" + "tableName='" + tableName + '\'' + ", sdLocation='" + sdLocation + '\'' + ", sdInputFormat='" + sdInputFormat + '\'' + ", sdOutputFormat='" + sdOutputFormat + '\'' + ", owner='" + owner + '\'' + ", tableType='" + tableType + '\'' + ", lastAccessTime=" + lastAccessTime + ", fileSize=" + fileSize + ", fileNum=" + fileNum + ", isNative=" + isNative + ", allColumns=" + allColumns + ", partitionColumns=" + partitionColumns + '}';
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/6fc1c865/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMetaBuilder.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMetaBuilder.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMetaBuilder.java
new file mode 100644
index 0000000..7a3e5d6
--- /dev/null
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMetaBuilder.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.hive;
+
+import java.util.List;
+
+import com.google.common.collect.Lists;
+
+public class HiveTableMetaBuilder {
+    private String tableName;
+    private String sdLocation;
+    private String sdInputFormat;
+    private String sdOutputFormat;
+    private String owner;
+    private String tableType;
+    private int lastAccessTime;
+    private long fileSize;
+    private long fileNum;
+    private boolean isNative = true;
+    private List<HiveTableMeta.HiveTableColumnMeta> allColumns = Lists.newArrayList();
+    private List<HiveTableMeta.HiveTableColumnMeta> partitionColumns = Lists.newArrayList();
+
+    public HiveTableMetaBuilder setTableName(String tableName) {
+        this.tableName = tableName;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setSdLocation(String sdLocation) {
+        this.sdLocation = sdLocation;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setSdInputFormat(String sdInputFormat) {
+        this.sdInputFormat = sdInputFormat;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setSdOutputFormat(String sdOutputFormat) {
+        this.sdOutputFormat = sdOutputFormat;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setOwner(String owner) {
+        this.owner = owner;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setTableType(String tableType) {
+        this.tableType = tableType;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setLastAccessTime(int lastAccessTime) {
+        this.lastAccessTime = lastAccessTime;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setFileSize(long fileSize) {
+        this.fileSize = fileSize;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setFileNum(long fileNum) {
+        this.fileNum = fileNum;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setIsNative(boolean isNative) {
+        this.isNative = isNative;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setAllColumns(List<HiveTableMeta.HiveTableColumnMeta> allColumns) {
+        this.allColumns = allColumns;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setPartitionColumns(List<HiveTableMeta.HiveTableColumnMeta> partitionColumns) {
+        this.partitionColumns = partitionColumns;
+        return this;
+    }
+
+    public HiveTableMeta createHiveTableMeta() {
+        return new HiveTableMeta(tableName, sdLocation, sdInputFormat, sdOutputFormat, owner, tableType, lastAccessTime, fileSize, fileNum, isNative, allColumns, partitionColumns);
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/6fc1c865/source-hive/src/main/java/org/apache/kylin/source/hive/HqlExecutable.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HqlExecutable.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HqlExecutable.java
deleted file mode 100644
index 79493a4..0000000
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HqlExecutable.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.source.hive;
-
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.kylin.common.util.JsonUtil;
-import org.apache.kylin.job.exception.ExecuteException;
-import org.apache.kylin.job.execution.AbstractExecutable;
-import org.apache.kylin.job.execution.ExecutableContext;
-import org.apache.kylin.job.execution.ExecuteResult;
-import org.datanucleus.store.types.backed.HashMap;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.google.common.collect.Lists;
-
-/**
- */
-public class HqlExecutable extends AbstractExecutable {
-
-    private static final Logger logger = LoggerFactory.getLogger(HqlExecutable.class);
-
-    private static final String HQL = "hql";
-    private static final String HIVE_CONFIG = "hive-config";
-
-    public HqlExecutable() {
-        super();
-    }
-
-    @Override
-    protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
-        try {
-            Map<String, String> configMap = getConfiguration();
-            HiveClient hiveClient = new HiveClient(configMap);
-
-            for (String hql : getHqls()) {
-                hiveClient.executeHQL(hql);
-            }
-            return new ExecuteResult(ExecuteResult.State.SUCCEED);
-        } catch (Exception e) {
-            logger.error("error run hive query:" + getHqls(), e);
-            return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());
-        }
-    }
-
-    public void setConfiguration(Map<String, String> configMap) {
-        if (configMap != null) {
-            String configStr = "";
-            try {
-                configStr = JsonUtil.writeValueAsString(configMap);
-            } catch (JsonProcessingException e) {
-                e.printStackTrace();
-            }
-            setParam(HIVE_CONFIG, configStr);
-        }
-    }
-
-    @SuppressWarnings("unchecked")
-    private Map<String, String> getConfiguration() {
-        String configStr = getParam(HIVE_CONFIG);
-        Map<String, String> result = null;
-        if (configStr != null) {
-            try {
-                result = JsonUtil.readValue(configStr, HashMap.class);
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-
-        return result;
-    }
-
-    public void setHqls(List<String> hqls) {
-        setParam(HQL, StringUtils.join(hqls, ";"));
-    }
-
-    private List<String> getHqls() {
-        final String hqls = getParam(HQL);
-        if (hqls != null) {
-            return Lists.newArrayList(StringUtils.split(hqls, ";"));
-        } else {
-            return Collections.emptyList();
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/6fc1c865/source-hive/src/main/java/org/apache/kylin/source/hive/IHiveClient.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/IHiveClient.java b/source-hive/src/main/java/org/apache/kylin/source/hive/IHiveClient.java
new file mode 100644
index 0000000..f218cce
--- /dev/null
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/IHiveClient.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.hive;
+
+import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+
+import java.io.IOException;
+import java.util.List;
+
+public interface IHiveClient {
+    void executeHQL(String hql) throws CommandNeedRetryException, IOException;
+
+    void executeHQL(String[] hqls) throws CommandNeedRetryException, IOException;
+
+    HiveTableMeta getHiveTableMeta(String database, String tableName) throws Exception;
+
+    List<String> getHiveDbNames() throws Exception;
+
+    List<String> getHiveTableNames(String database) throws Exception;
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/6fc1c865/source-hive/src/test/java/org/apache/kylin/source/hive/BeelineOptionsProcessorTest.java
----------------------------------------------------------------------
diff --git a/source-hive/src/test/java/org/apache/kylin/source/hive/BeelineOptionsProcessorTest.java b/source-hive/src/test/java/org/apache/kylin/source/hive/BeelineOptionsProcessorTest.java
new file mode 100644
index 0000000..84da0a2
--- /dev/null
+++ b/source-hive/src/test/java/org/apache/kylin/source/hive/BeelineOptionsProcessorTest.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.hive;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.lang.StringUtils;
+import org.junit.Ignore;
+import org.junit.Test;
+
+public class BeelineOptionsProcessorTest {
+    @Ignore
+    @Test
+    public void foo() {
+        String param = "-n root --hiveconf hive.security.authorization.sqlstd.confwhitelist.append='mapreduce.job.*|dfs.*' -u 'jdbc:hive2://localhost:10000'";
+        BeelineOptionsProcessor processor = new BeelineOptionsProcessor();
+        CommandLine commandLine = processor.process(StringUtils.split(param));
+        String n = commandLine.getOptionValue('n');
+        String u = commandLine.getOptionValue('u');
+        String p = commandLine.getOptionValue('p');
+
+    }
+}