You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tajo.apache.org by bl...@apache.org on 2013/11/13 05:36:45 UTC

git commit: TAJO-16: Enable Tajo catalog to access Hive metastore. (jaehwa)

Updated Branches:
  refs/heads/master f4672e104 -> 943899360


TAJO-16: Enable Tajo catalog to access Hive metastore. (jaehwa)


Project: http://git-wip-us.apache.org/repos/asf/incubator-tajo/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-tajo/commit/94389936
Tree: http://git-wip-us.apache.org/repos/asf/incubator-tajo/tree/94389936
Diff: http://git-wip-us.apache.org/repos/asf/incubator-tajo/diff/94389936

Branch: refs/heads/master
Commit: 943899360a29fdab2ee1aefa7f1708f5666939c6
Parents: f4672e1
Author: blrunner <jh...@gruter.com>
Authored: Wed Nov 13 13:34:43 2013 +0900
Committer: blrunner <jh...@gruter.com>
Committed: Wed Nov 13 13:34:43 2013 +0900

----------------------------------------------------------------------
 CHANGES.txt                                     |   2 +
 pom.xml                                         |   7 +
 tajo-catalog/tajo-catalog-server/pom.xml        | 129 ++++++
 .../tajo/catalog/store/HCatalogStore.java       | 390 +++++++++++++++++++
 .../apache/tajo/catalog/store/HCatalogUtil.java | 169 ++++++++
 .../tajo/pullserver/TajoPullServerService.java  |   3 +
 tajo-dist/pom.xml                               |   2 +
 7 files changed, 702 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/94389936/CHANGES.txt
----------------------------------------------------------------------
diff --git a/CHANGES.txt b/CHANGES.txt
index ec1c228..3e55e2e 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -4,6 +4,8 @@ Release 0.2.0 - unreleased
 
   NEW FEATURES
 
+    TAJO-16: Enable Tajo catalog to access Hive metastore. (jaehwa)
+    
     TAJO-285: Add CREATE TABLE... BY PARTITION statement to parser. (hyunsik)
 
     TAJO-267: Implement equals() and deepEquals() functions at LogicalNode. 

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/94389936/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 7038e40..e0dd349 100644
--- a/pom.xml
+++ b/pom.xml
@@ -68,6 +68,13 @@
         <enabled>false</enabled>
       </snapshots>
     </repository>
+    <repository>
+      <id>cloudera</id>
+      <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
+      <snapshots>
+        <enabled>false</enabled>
+      </snapshots>
+    </repository>
   </repositories>
 
   <properties>

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/94389936/tajo-catalog/tajo-catalog-server/pom.xml
----------------------------------------------------------------------
diff --git a/tajo-catalog/tajo-catalog-server/pom.xml b/tajo-catalog/tajo-catalog-server/pom.xml
index 14adba8..a79e954 100644
--- a/tajo-catalog/tajo-catalog-server/pom.xml
+++ b/tajo-catalog/tajo-catalog-server/pom.xml
@@ -33,6 +33,8 @@
   <properties>
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
     <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+    <hcatalog.version>0.5.0-incubating</hcatalog.version>
+    <mapred.version>2.0.0-mr1-cdh4.3.0</mapred.version>
   </properties>
 
   <build>
@@ -149,10 +151,137 @@
       <artifactId>derby</artifactId>
       <version>10.8.2.2</version>
     </dependency>
+
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+      <version>${hive.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>javax.jdo</groupId>
+          <artifactId>jdo2-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-builtins</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-cli</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-common</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-pdk</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-service</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-shims</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-metastore</artifactId>
+      <version>${hive.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>javax.jdo</groupId>
+          <artifactId>jdo2-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-builtins</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-cli</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-common</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-pdk</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-service</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-shims</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hcatalog</groupId>
+      <artifactId>hcatalog-core</artifactId>
+      <version>${hcatalog.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>javax.jdo</groupId>
+          <artifactId>jdo2-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-builtins</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-cli</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-common</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-pdk</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-service</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-shims</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-core</artifactId>
+      <version>${mapred.version}</version>
+      <scope>provided</scope>
+    </dependency>
   </dependencies>
 
   <profiles>
     <profile>
+      <id>hive-0.11.0</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <properties>
+        <hive.version>0.11.0</hive.version>
+      </properties>
+    </profile>
+    <profile>
+      <id>hive-0.10.0-cdh4.3.0</id>
+      <properties>
+        <hive.version>0.10.0-cdh4.3.0</hive.version>
+      </properties>
+    </profile>
+   <profile>
       <id>docs</id>
       <activation>
         <activeByDefault>false</activeByDefault>

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/94389936/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/HCatalogStore.java
----------------------------------------------------------------------
diff --git a/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/HCatalogStore.java b/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/HCatalogStore.java
new file mode 100644
index 0000000..f54990b
--- /dev/null
+++ b/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/HCatalogStore.java
@@ -0,0 +1,390 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.catalog.store;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.*;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hcatalog.common.HCatUtil;
+import org.apache.hcatalog.data.Pair;
+import org.apache.hcatalog.data.schema.HCatFieldSchema;
+import org.apache.hcatalog.data.schema.HCatSchema;
+import org.apache.tajo.catalog.*;
+import org.apache.tajo.catalog.Schema;
+import org.apache.tajo.catalog.proto.CatalogProtos;
+import org.apache.tajo.catalog.statistics.TableStats;
+import org.apache.tajo.common.TajoDataTypes;
+import org.apache.tajo.exception.InternalException;
+
+import java.io.IOException;
+import java.util.*;
+
+public class HCatalogStore extends CatalogConstants implements CatalogStore {
+  protected final Log LOG = LogFactory.getLog(getClass());
+  protected Configuration conf;
+  protected String catalogUri;
+  private Map<Pair<String, String>, Table> tableMap = new HashMap<Pair<String, String>, Table>();
+
+  public HCatalogStore(final Configuration conf)
+      throws InternalException {
+    this.conf = conf;
+    this.catalogUri = conf.get(CATALOG_URI);
+  }
+
+  @Override
+  public final boolean existTable(final String name) throws IOException {
+    boolean exist = false;
+
+    String dbName = null, tableName = null;
+    Pair<String, String> tablePair = null;
+    org.apache.hadoop.hive.ql.metadata.Table table = null;
+    HiveMetaStoreClient client = null;
+
+    // get db name and table name.
+    try {
+      tablePair = HCatUtil.getDbAndTableName(name);
+      dbName = tablePair.first;
+      tableName = tablePair.second;
+    } catch (IOException ioe) {
+      throw new InternalException("Table name is wrong.", ioe);
+    }
+
+    // get table
+    try {
+      try {
+        client = HCatalogUtil.getHiveMetaClient(catalogUri, null);
+        table = HCatUtil.getTable(client, dbName, tableName);
+        if (table != null) {
+          exist = true;
+        }
+      } catch (NoSuchObjectException nsoe) {
+        exist = false;
+      } catch (Exception e) {
+        throw new IOException(e);
+      }
+    } finally {
+      HCatUtil.closeHiveClientQuietly(client);
+    }
+
+    return exist;
+  }
+
+  @Override
+  public final TableDesc getTable(final String name) throws IOException {
+    String dbName = null, tableName = null;
+    Pair<String, String> tablePair = null;
+    org.apache.hadoop.hive.ql.metadata.Table table = null;
+    HiveMetaStoreClient client = null;
+    Path path = null;
+    CatalogProtos.StoreType storeType = null;
+    Schema schema = null;
+    Options options = null;
+    TableStats stats = null;
+
+    // get db name and table name.
+    try {
+      tablePair = HCatUtil.getDbAndTableName(name);
+      dbName = tablePair.first;
+      tableName = tablePair.second;
+    } catch (IOException ioe) {
+      throw new InternalException("Table name is wrong.", ioe);
+    }
+
+    //////////////////////////////////
+    // set tajo table schema.
+    //////////////////////////////////
+    try {
+      // get hive table schema
+      try {
+        client = HCatalogUtil.getHiveMetaClient(catalogUri, null);
+        table = HCatUtil.getTable(client, dbName, tableName);
+        path = table.getPath();
+      } catch (NoSuchObjectException nsoe) {
+        throw new InternalException("Table not found. - tableName:" + name, nsoe);
+      } catch (Exception e) {
+        throw new IOException(e);
+      }
+
+      // convert hcatalog field schema into tajo field schema.
+      schema = new Schema();
+      HCatSchema tableSchema = HCatUtil.getTableSchemaWithPtnCols(table);
+      List<HCatFieldSchema> fieldSchemaList = tableSchema.getFields();
+      for (HCatFieldSchema eachField : fieldSchemaList) {
+        String fieldName = tableName + "." + eachField.getName();
+        TajoDataTypes.Type dataType = HCatalogUtil.getTajoFieldType(eachField.getType().toString());
+        schema.addColumn(fieldName, dataType);
+      }
+
+      // validate field schema.
+      try {
+        HCatalogUtil.validateHCatTableAndTajoSchema(tableSchema);
+      } catch (IOException e) {
+        throw new InternalException(
+            "HCatalog cannot support schema. - schema:" + tableSchema.toString(), e);
+      }
+
+      stats = new TableStats();
+      options = Options.create();
+      Properties properties = table.getMetadata();
+      if (properties != null) {
+        // set field delimiter
+        String fieldDelimiter = "", fileOutputformat = "";
+        if (properties.getProperty("field.delim") != null) {
+          fieldDelimiter = properties.getProperty("field.delim");
+        }
+        // set file output format
+        fileOutputformat = properties.getProperty("file.outputformat");
+        storeType = CatalogUtil.getStoreType(HCatalogUtil.getStoreType(fileOutputformat,
+            fieldDelimiter));
+
+        // TODO: another stored file
+        if (storeType.equals(CatalogProtos.StoreType.CSV) && fieldDelimiter != null) {
+          options.put("csvfile.delimiter", fieldDelimiter);
+        }
+
+        // set data size
+        if(properties.getProperty("totalSize") != null) {
+          stats.setNumBytes(new Long(properties.getProperty("totalSize")));
+        }
+      }
+
+    } finally {
+      HCatUtil.closeHiveClientQuietly(client);
+    }
+    TableMeta meta = new TableMeta(storeType, options);
+
+    TableDesc tableDesc = new TableDesc(tableName, schema, meta, path);
+    if (stats != null) {
+      tableDesc.setStats(stats);
+    }
+
+    return tableDesc;
+  }
+
+  private TajoDataTypes.Type getDataType(final String typeStr) {
+    try {
+      return Enum.valueOf(TajoDataTypes.Type.class, typeStr);
+    } catch (IllegalArgumentException iae) {
+      LOG.error("Cannot find a matched type aginst from '" + typeStr + "'");
+      return null;
+    }
+  }
+
+  @Override
+  public final List<String> getAllTableNames() throws IOException {
+    List<String> dbs = null;
+    List<String> tables = null;
+    List<String> allTables = new ArrayList<String>();
+    HiveMetaStoreClient client = null;
+
+    try {
+      try {
+        client = HCatalogUtil.getHiveMetaClient(catalogUri, null);
+        dbs = client.getAllDatabases();
+        for(String eachDB: dbs) {
+          tables = client.getAllTables(eachDB);
+          for(String eachTable: tables) {
+            allTables.add(eachDB + "." + eachTable);
+          }
+        }
+      } catch (Exception e) {
+        throw new IOException(e);
+      }
+
+    } finally {
+      HCatUtil.closeHiveClientQuietly(client);
+    }
+    return allTables;
+  }
+
+  @Override
+  public final void addTable(final TableDesc tableDesc) throws IOException {
+    String dbName = null, tableName = null;
+    Pair<String, String> tablePair = null;
+    HiveMetaStoreClient client = null;
+
+    // get db name and table name.
+    try {
+      tablePair = HCatUtil.getDbAndTableName(tableDesc.getName());
+      dbName = tablePair.first;
+      tableName = tablePair.second;
+    } catch (IOException ioe) {
+      throw new InternalException("Table name is wrong.", ioe);
+    }
+
+    try {
+      try {
+        client = HCatalogUtil.getHiveMetaClient(catalogUri, null);
+
+        org.apache.hadoop.hive.metastore.api.Table table = new org.apache.hadoop.hive.metastore.api
+            .Table();
+
+        table.setDbName(dbName);
+        table.setTableName(tableName);
+        // TODO: set owner
+        //table.setOwner();
+
+        StorageDescriptor sd = new StorageDescriptor();
+
+        // if tajo set location method, thrift client make exception as follows:
+        // Caused by: MetaException(message:java.lang.NullPointerException)
+        // If you want to modify table path, you have to modify on Hive cli.
+        //sd.setLocation(tableDesc.getPath().toString());
+
+        // set column information
+        ArrayList<FieldSchema> cols = new ArrayList<FieldSchema>(tableDesc.getSchema().getColumns
+            ().size());
+        for (Column col : tableDesc.getSchema().getColumns()) {
+          cols.add(new FieldSchema(col.getColumnName(), HCatalogUtil.getHiveFieldType(col
+              .getDataType
+                  ().getType().name()), ""));
+        }
+        sd.setCols(cols);
+
+        // TODO: compression tyoe
+        // n table type
+        sd.setCompressed(false);
+
+        sd.setParameters(new HashMap<String, String>());
+        sd.setSerdeInfo(new SerDeInfo());
+        sd.getSerdeInfo().setName(table.getTableName());
+
+        // TODO: another Serialization librarys
+        sd.getSerdeInfo().setSerializationLib(
+            org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
+
+        sd.getSerdeInfo().setParameters(new HashMap<String, String>());
+//      sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1");
+        sd.getSerdeInfo().getParameters().put(serdeConstants.FIELD_DELIM, "|");
+
+        // TODO: another input format classes
+        sd.setInputFormat(org.apache.hadoop.mapred.TextInputFormat.class.getName());
+
+        // TODO: another output format classes
+        sd.setOutputFormat(org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat.class.getName
+            ());
+
+        sd.setSortCols(new ArrayList<Order>());
+
+        table.setSd(sd);
+        client.createTable(table);
+      } catch (Exception e) {
+        throw new IOException(e);
+      }
+    } finally {
+      HCatUtil.closeHiveClientQuietly(client);
+    }
+  }
+
+  @Override
+  public final void deleteTable(final String name) throws IOException {
+    String dbName = null, tableName = null;
+    Pair<String, String> tablePair = null;
+    HiveMetaStoreClient client = null;
+
+    // get db name and table name.
+    try {
+      tablePair = HCatUtil.getDbAndTableName(name);
+      dbName = tablePair.first;
+      tableName = tablePair.second;
+    } catch (IOException ioe) {
+      throw new InternalException("Table name is wrong.", ioe);
+    }
+    catalogUri = "thrift://localhost:10001";
+
+    try {
+      client = HCatalogUtil.getHiveMetaClient(catalogUri, null);
+      client.dropTable(dbName, tableName);
+    } catch (NoSuchObjectException nsoe) {
+    } catch (Exception e) {
+      throw new IOException(e);
+    } finally {
+      HCatUtil.closeHiveClientQuietly(client);
+    }
+  }
+  @Override
+  public final void addFunction(final FunctionDesc func) throws IOException {
+    // TODO - not implemented yet
+  }
+
+  @Override
+  public final void deleteFunction(final FunctionDesc func) throws IOException {
+    // TODO - not implemented yet
+  }
+
+  @Override
+  public final void existFunction(final FunctionDesc func) throws IOException {
+    // TODO - not implemented yet
+  }
+
+  @Override
+  public final List<String> getAllFunctionNames() throws IOException {
+    // TODO - not implemented yet
+    return null;
+  }
+
+  @Override
+  public void delIndex(String indexName) throws IOException {
+    // TODO - not implemented yet
+  }
+
+  @Override
+  public boolean existIndex(String indexName) throws IOException {
+    // TODO - not implemented yet
+    return false;
+  }
+
+  @Override
+  public CatalogProtos.IndexDescProto[] getIndexes(String tableName) throws IOException {
+    // TODO - not implemented yet
+    return null;
+  }
+
+  @Override
+  public void addIndex(CatalogProtos.IndexDescProto proto) throws IOException {
+    // TODO - not implemented yet
+  }
+
+  @Override
+  public CatalogProtos.IndexDescProto getIndex(String indexName) throws IOException {
+    // TODO - not implemented yet
+    return null;
+  }
+
+  @Override
+  public CatalogProtos.IndexDescProto getIndex(String tableName, String columnName)
+      throws IOException {
+    // TODO - not implemented yet
+    return null;
+  }
+
+  @Override
+  public boolean existIndex(String tableName, String columnName) {
+    // TODO - not implemented yet
+    return false;
+  }
+
+  @Override
+  public final void close() {
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/94389936/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/HCatalogUtil.java
----------------------------------------------------------------------
diff --git a/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/HCatalogUtil.java b/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/HCatalogUtil.java
new file mode 100644
index 0000000..b92cbf2
--- /dev/null
+++ b/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/HCatalogUtil.java
@@ -0,0 +1,169 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.tajo.catalog.store;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hcatalog.common.HCatException;
+import org.apache.hcatalog.common.HCatUtil;
+import org.apache.hcatalog.data.schema.HCatFieldSchema;
+import org.apache.hcatalog.data.schema.HCatSchema;
+import org.apache.tajo.catalog.proto.CatalogProtos;
+import org.apache.tajo.common.TajoDataTypes;
+import org.apache.tajo.exception.InternalException;
+
+import java.io.IOException;
+
+public class HCatalogUtil {
+  protected final Log LOG = LogFactory.getLog(getClass());
+
+  public static void validateHCatTableAndTajoSchema(HCatSchema tblSchema) throws InternalException {
+    for (HCatFieldSchema hcatField : tblSchema.getFields()) {
+      validateHCatFieldAndTajoSchema(hcatField);
+    }
+  }
+
+  private static void validateHCatFieldAndTajoSchema(HCatFieldSchema fieldSchema) throws
+      InternalException {
+    try {
+      HCatFieldSchema.Type fieldType = fieldSchema.getType();
+      switch (fieldType) {
+        case ARRAY:
+          throw new HCatException("Tajo cannot support array field type.");
+        case STRUCT:
+          throw new HCatException("Tajo cannot support struct field type.");
+        case MAP:
+          throw new HCatException("Tajo cannot support map field type.");
+      }
+    } catch (HCatException e) {
+      throw new InternalException("incompatible hcatalog types when assigning to tajo type. - " +
+          "HCatFieldSchema:" + fieldSchema, e);
+    }
+  }
+
+  public static HiveMetaStoreClient getHiveMetaClient(String metaStoreUri,
+                                                      String metaStoreKerberosPrincipal)
+                                                      //Class<?> cls)
+  throws Exception {
+//    HiveConf hiveConf = new HiveConf(cls);
+
+    HiveConf hiveConf = new HiveConf();
+
+    if (metaStoreUri != null) {
+      hiveConf.set("hive.metastore.local", "false");
+      hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreUri.trim());
+    }
+
+    if (metaStoreKerberosPrincipal != null) {
+      hiveConf.setBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL, true);
+      hiveConf.setVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL, metaStoreKerberosPrincipal);
+    }
+
+    try {
+      return HCatUtil.getHiveClient(hiveConf);
+    } catch (Exception e) {
+      throw new InternalException("Tajo cannot connect Hive metastore. - serverUri:" +
+          metaStoreUri, e);
+    }
+  }
+
+  public static TajoDataTypes.Type getTajoFieldType(String fieldType) throws IOException {
+    if(fieldType == null) {
+      throw new InternalException("Hive field type is null.");
+    }
+    String typeStr = null;
+
+    if(fieldType.equalsIgnoreCase(serdeConstants.INT_TYPE_NAME))
+      typeStr = "INT4";
+    else if(fieldType.equalsIgnoreCase(serdeConstants.TINYINT_TYPE_NAME))
+      typeStr = "INT1";
+    else if(fieldType.equalsIgnoreCase(serdeConstants.SMALLINT_TYPE_NAME))
+      typeStr = "INT2";
+    else if(fieldType.equalsIgnoreCase(serdeConstants.BIGINT_TYPE_NAME))
+      typeStr = "INT8";
+    else if(fieldType.equalsIgnoreCase(serdeConstants.BOOLEAN_TYPE_NAME))
+      typeStr = "BOOLEAN";
+    else if(fieldType.equalsIgnoreCase(serdeConstants.FLOAT_TYPE_NAME))
+      typeStr = "FLOAT4";
+    else if(fieldType.equalsIgnoreCase(serdeConstants.DOUBLE_TYPE_NAME))
+      typeStr = "FLOAT8";
+    else if(fieldType.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME))
+      typeStr = "TEXT";
+    else if(fieldType.equalsIgnoreCase(serdeConstants.BINARY_TYPE_NAME))
+      typeStr = "BLOB";
+
+    try {
+      return Enum.valueOf(TajoDataTypes.Type.class, typeStr);
+    } catch (IllegalArgumentException iae) {
+      System.out.println("Cannot find a matched type aginst from '" + typeStr + "'");
+      return null;
+    }
+  }
+
+  public static String getHiveFieldType(String fieldType) throws IOException {
+    if(fieldType == null) {
+      throw new InternalException("Tajo field type is null.");
+    }
+    String typeStr = null;
+
+    if(fieldType.equalsIgnoreCase("INT4"))
+      typeStr = serdeConstants.INT_TYPE_NAME;
+    else if(fieldType.equalsIgnoreCase("INT1"))
+      typeStr = serdeConstants.TINYINT_TYPE_NAME;
+    else if(fieldType.equalsIgnoreCase("INT2"))
+      typeStr = serdeConstants.SMALLINT_TYPE_NAME;
+    else if(fieldType.equalsIgnoreCase("INT8"))
+      typeStr = serdeConstants.BIGINT_TYPE_NAME;
+    else if(fieldType.equalsIgnoreCase("BOOLEAN"))
+      typeStr = serdeConstants.BOOLEAN_TYPE_NAME;
+    else if(fieldType.equalsIgnoreCase("FLOAT4"))
+      typeStr = serdeConstants.FLOAT_TYPE_NAME;
+    else if(fieldType.equalsIgnoreCase("FLOAT8"))
+      typeStr = serdeConstants.DOUBLE_TYPE_NAME;
+    else if(fieldType.equalsIgnoreCase("TEXT"))
+      typeStr = serdeConstants.STRING_TYPE_NAME;
+    else if(fieldType.equalsIgnoreCase("BLOB"))
+      typeStr = serdeConstants.BINARY_TYPE_NAME;
+
+    return typeStr;
+  }
+
+  public static String getStoreType(String fileFormat, String delimiter) throws IOException{
+    if(fileFormat == null) {
+      throw new InternalException("Hive file output format is null.");
+    }
+
+    String[] fileFormatArrary = fileFormat.split("\\.");
+    if(fileFormatArrary.length < 1) {
+      throw new InternalException("Hive file output format is wrong. - file output format:" + fileFormat);
+    }
+
+    String inputFormatClass = fileFormatArrary[fileFormatArrary.length-1];
+
+    if(inputFormatClass.equals("HiveIgnoreKeyTextOutputFormat")) {
+      return CatalogProtos.StoreType.CSV.name();
+    } else {
+      //TODO: other file format
+      return null;
+    }
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/94389936/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/TajoPullServerService.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/TajoPullServerService.java b/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/TajoPullServerService.java
index 256f99c..d67efc1 100644
--- a/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/TajoPullServerService.java
+++ b/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/TajoPullServerService.java
@@ -216,6 +216,9 @@ public class TajoPullServerService extends AbstractService {
 
       localFS = new LocalFileSystem();
       super.init(new Configuration(conf));
+
+      this.getConfig().setInt(TajoConf.ConfVars.PULLSERVER_PORT.varname
+          , TajoConf.ConfVars.PULLSERVER_PORT.defaultIntVal);
     } catch (Throwable t) {
       LOG.error(t);
     }

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/94389936/tajo-dist/pom.xml
----------------------------------------------------------------------
diff --git a/tajo-dist/pom.xml b/tajo-dist/pom.xml
index 6d34682..bdbd9b6 100644
--- a/tajo-dist/pom.xml
+++ b/tajo-dist/pom.xml
@@ -106,6 +106,8 @@
                       run cp -r $ROOT/tajo-core/target/tajo-core-${project.version}/* .
                       run cp -r ${project.basedir}/src/main/bin .
                       run cp -r ${project.basedir}/src/main/conf .
+                      run rm -rf lib/hive-*.jar
+                      run cp -r $ROOT/tajo-catalog/tajo-catalog-server/target/lib/hive-*.jar  lib/
                       run rm -rf lib/tajo-*-${project.version}.jar
                       echo
                       echo "Tajo dist layout available at: ${project.build.directory}/tajo-${project.version}"