You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tajo.apache.org by bl...@apache.org on 2013/12/10 07:32:09 UTC

git commit: TAJO-336: Separate catalog stores into separate modules. (jaehwa)

Updated Branches:
  refs/heads/master 34a296f95 -> 60f7df201


TAJO-336: Separate catalog stores into separate modules. (jaehwa)


Project: http://git-wip-us.apache.org/repos/asf/incubator-tajo/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-tajo/commit/60f7df20
Tree: http://git-wip-us.apache.org/repos/asf/incubator-tajo/tree/60f7df20
Diff: http://git-wip-us.apache.org/repos/asf/incubator-tajo/diff/60f7df20

Branch: refs/heads/master
Commit: 60f7df201d560e2693c29cd75674f2bccccf9092
Parents: 34a296f
Author: blrunner <jh...@gruter.com>
Authored: Tue Dec 10 15:31:37 2013 +0900
Committer: blrunner <jh...@gruter.com>
Committed: Tue Dec 10 15:31:37 2013 +0900

----------------------------------------------------------------------
 CHANGES.txt                                     |   2 +
 tajo-catalog/pom.xml                            |   5 +
 tajo-catalog/tajo-catalog-drivers/pom.xml       |  78 ++++
 .../tajo-catalog-drivers/tajo-hcatalog/pom.xml  | 406 +++++++++++++++++++
 .../tajo/catalog/store/HCatalogStore.java       | 396 ++++++++++++++++++
 .../apache/tajo/catalog/store/HCatalogUtil.java | 169 ++++++++
 tajo-catalog/tajo-catalog-server/pom.xml        | 128 ------
 .../tajo/catalog/store/HCatalogStore.java       | 396 ------------------
 .../apache/tajo/catalog/store/HCatalogUtil.java | 169 --------
 tajo-dist/pom.xml                               |   2 -
 tajo-dist/src/main/bin/tajo                     |  24 ++
 tajo-dist/src/main/conf/tajo-env.sh             |   3 +
 12 files changed, 1083 insertions(+), 695 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/60f7df20/CHANGES.txt
----------------------------------------------------------------------
diff --git a/CHANGES.txt b/CHANGES.txt
index 4037859..0aacb5a 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -82,6 +82,8 @@ Release 0.8.0 - unreleased
 
   IMPROVEMENTS
 
+    TAJO-336: Separate catalog stores into separate modules. (jaehwa)
+
     TAJO-329: Implement physical operator to store in column-partitioned table. (jaehwa)
 
     TAJO-391: Change the default type of real values from FLOAT4 to FLOAT8 

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/60f7df20/tajo-catalog/pom.xml
----------------------------------------------------------------------
diff --git a/tajo-catalog/pom.xml b/tajo-catalog/pom.xml
index 1900b66..37d62ad 100644
--- a/tajo-catalog/pom.xml
+++ b/tajo-catalog/pom.xml
@@ -39,6 +39,7 @@
     <module>tajo-catalog-common</module>
     <module>tajo-catalog-server</module>
     <module>tajo-catalog-client</module>
+    <module>tajo-catalog-drivers</module>
   </modules>
 
   <build>
@@ -148,6 +149,10 @@
                       run cp -r ${basedir}/tajo-catalog-common/target/tajo-catalog-common-${project.version}*.jar .
                       run cp -r ${basedir}/tajo-catalog-client/target/tajo-catalog-client-${project.version}*.jar .
                       run cp -r ${basedir}/tajo-catalog-server/target/tajo-catalog-server-${project.version}*.jar .
+                      if [ -f ${basedir}/tajo-catalog-drivers/tajo-hcatalog/target/tajo-hcatalog-${project.version}.jar ]
+                      then
+                      run cp -r ${basedir}/tajo-catalog-drivers/tajo-hcatalog/target/tajo-hcatalog-${project.version}*.jar .
+                      fi
                       echo
                       echo "Tajo Catalog dist layout available at: ${project.build.directory}/tajo-catalog-${project.version}"
                       echo

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/60f7df20/tajo-catalog/tajo-catalog-drivers/pom.xml
----------------------------------------------------------------------
diff --git a/tajo-catalog/tajo-catalog-drivers/pom.xml b/tajo-catalog/tajo-catalog-drivers/pom.xml
new file mode 100644
index 0000000..157de68
--- /dev/null
+++ b/tajo-catalog/tajo-catalog-drivers/pom.xml
@@ -0,0 +1,78 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <parent>
+    <artifactId>tajo-project</artifactId>
+    <groupId>org.apache.tajo</groupId>
+    <version>0.8.0-SNAPSHOT</version>
+	  <relativePath>../../tajo-project</relativePath>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>tajo-catalog-drivers</artifactId>
+  <packaging>pom</packaging>
+  <name>Tajo Catalog Drivers</name>
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+    <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+  </properties>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-report-plugin</artifactId>
+        <version>2.15</version>
+      </plugin>
+    </plugins>
+  </build>
+
+  <profiles>
+    <profile>
+      <id>hcatalog-0.11.0</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <modules>
+        <module>tajo-hcatalog</module>
+      </modules>
+    </profile>
+    <profile>
+      <id>hcatalog-0.12.0</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <modules>
+        <module>tajo-hcatalog</module>
+      </modules>
+    </profile>
+  </profiles>
+
+  <reporting>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-report-plugin</artifactId>
+        <version>2.15</version>
+      </plugin>
+    </plugins>
+  </reporting>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/60f7df20/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/pom.xml
----------------------------------------------------------------------
diff --git a/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/pom.xml b/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/pom.xml
new file mode 100644
index 0000000..738a9f7
--- /dev/null
+++ b/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/pom.xml
@@ -0,0 +1,406 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <parent>
+    <artifactId>tajo-project</artifactId>
+    <groupId>org.apache.tajo</groupId>
+    <version>0.8.0-SNAPSHOT</version>
+    <relativePath>../../../tajo-project</relativePath>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>tajo-hcatalog</artifactId>
+  <packaging>jar</packaging>
+  <name>Tajo Catalog Drivers HCatalog</name>
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+    <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+  </properties>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <configuration>
+          <source>1.6</source>
+          <target>1.6</target>
+          <encoding>${project.build.sourceEncoding}</encoding>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>copy-dependencies</id>
+            <phase>prepare-package</phase>
+            <goals>
+              <goal>copy-dependencies</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>${project.build.directory}/lib</outputDirectory>
+              <overWriteReleases>false</overWriteReleases>
+              <overWriteSnapshots>false</overWriteSnapshots>
+              <overWriteIfNewer>true</overWriteIfNewer>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-report-plugin</artifactId>
+        <version>2.15</version>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.tajo</groupId>
+      <artifactId>tajo-common</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tajo</groupId>
+      <artifactId>tajo-catalog-common</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tajo</groupId>
+      <artifactId>tajo-catalog-client</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tajo</groupId>
+      <artifactId>tajo-catalog-server</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tajo</groupId>
+      <artifactId>tajo-rpc</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tajo</groupId>
+      <artifactId>tajo-algebra</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.google.protobuf</groupId>
+      <artifactId>protobuf-java</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging-api</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.derby</groupId>
+      <artifactId>derby</artifactId>
+      <version>10.8.2.2</version>
+    </dependency>
+  </dependencies>
+
+
+  <profiles>
+    <profile>
+      <id>hcatalog-0.11.0</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <properties>
+        <hive.version>0.11.0</hive.version>
+      </properties>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-exec</artifactId>
+          <version>${hive.version}</version>
+          <exclusions>
+            <exclusion>
+              <groupId>javax.jdo</groupId>
+              <artifactId>jdo2-api</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-builtins</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-cli</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-common</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-pdk</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-service</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-shims</artifactId>
+            </exclusion>
+          </exclusions>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-metastore</artifactId>
+          <version>${hive.version}</version>
+          <exclusions>
+            <exclusion>
+              <groupId>javax.jdo</groupId>
+              <artifactId>jdo2-api</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-builtins</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-cli</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-common</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-pdk</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-service</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-shims</artifactId>
+            </exclusion>
+          </exclusions>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hcatalog</groupId>
+          <artifactId>hcatalog-core</artifactId>
+          <version>${hive.version}</version>
+          <exclusions>
+            <exclusion>
+              <groupId>javax.jdo</groupId>
+              <artifactId>jdo2-api</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-builtins</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-cli</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-common</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-pdk</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-service</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-shims</artifactId>
+            </exclusion>
+          </exclusions>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+          <version>${hadoop.version}</version>
+          <scope>provided</scope>
+        </dependency>
+      </dependencies>
+    </profile>
+    <profile>
+      <id>hcatalog-0.12.0</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <properties>
+        <hive.version>0.12.0</hive.version>
+      </properties>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-exec</artifactId>
+          <version>${hive.version}</version>
+          <exclusions>
+            <exclusion>
+              <groupId>javax.jdo</groupId>
+              <artifactId>jdo2-api</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-builtins</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-cli</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-common</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-pdk</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-service</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-shims</artifactId>
+            </exclusion>
+          </exclusions>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-metastore</artifactId>
+          <version>${hive.version}</version>
+          <exclusions>
+            <exclusion>
+              <groupId>javax.jdo</groupId>
+              <artifactId>jdo2-api</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-builtins</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-cli</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-common</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-pdk</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-service</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-shims</artifactId>
+            </exclusion>
+          </exclusions>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hive.hcatalog</groupId>
+          <artifactId>hcatalog-core</artifactId>
+          <version>${hive.version}</version>
+          <exclusions>
+            <exclusion>
+              <groupId>javax.jdo</groupId>
+              <artifactId>jdo2-api</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-builtins</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-cli</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-common</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-pdk</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-service</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.hive</groupId>
+              <artifactId>hive-shims</artifactId>
+            </exclusion>
+          </exclusions>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+          <version>${hadoop.version}</version>
+          <scope>provided</scope>
+        </dependency>
+      </dependencies>
+    </profile>
+    <profile>
+      <id>src</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-source-plugin</artifactId>
+            <executions>
+              <execution>
+                <!-- builds source jars and attaches them to the project for publishing -->
+                <id>hadoop-java-sources</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>jar-no-fork</goal>
+                </goals>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
+
+  <reporting>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-report-plugin</artifactId>
+        <version>2.15</version>
+      </plugin>
+    </plugins>
+  </reporting>
+
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/60f7df20/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/main/java/org/apache/tajo/catalog/store/HCatalogStore.java
----------------------------------------------------------------------
diff --git a/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/main/java/org/apache/tajo/catalog/store/HCatalogStore.java b/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/main/java/org/apache/tajo/catalog/store/HCatalogStore.java
new file mode 100644
index 0000000..d8a0e98
--- /dev/null
+++ b/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/main/java/org/apache/tajo/catalog/store/HCatalogStore.java
@@ -0,0 +1,396 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.catalog.store;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.*;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hcatalog.common.HCatUtil;
+import org.apache.hcatalog.data.Pair;
+import org.apache.hcatalog.data.schema.HCatFieldSchema;
+import org.apache.hcatalog.data.schema.HCatSchema;
+import org.apache.tajo.catalog.*;
+import org.apache.tajo.catalog.Schema;
+import org.apache.tajo.catalog.proto.CatalogProtos;
+import org.apache.tajo.catalog.statistics.TableStats;
+import org.apache.tajo.common.TajoDataTypes;
+import org.apache.tajo.exception.InternalException;
+
+import java.io.IOException;
+import java.util.*;
+
+public class HCatalogStore extends CatalogConstants implements CatalogStore {
+  protected final Log LOG = LogFactory.getLog(getClass());
+  protected Configuration conf;
+  protected String catalogUri;
+  private Map<Pair<String, String>, Table> tableMap = new HashMap<Pair<String, String>, Table>();
+
+  public HCatalogStore(final Configuration conf)
+      throws InternalException {
+    this.conf = conf;
+    if(conf.get(CatalogConstants.DEPRECATED_CATALOG_URI) != null) {
+      LOG.warn("Configuration parameter " + CatalogConstants.DEPRECATED_CATALOG_URI + " " +
+          "is deprecated. Use " + CatalogConstants.CATALOG_URI + " instead.");
+      this.catalogUri = conf.get(CatalogConstants.DEPRECATED_CATALOG_URI);
+    } else {
+      this.catalogUri = conf.get(CatalogConstants.CATALOG_URI);
+    }
+  }
+
+  @Override
+  public final boolean existTable(final String name) throws IOException {
+    boolean exist = false;
+
+    String dbName = null, tableName = null;
+    Pair<String, String> tablePair = null;
+    org.apache.hadoop.hive.ql.metadata.Table table = null;
+    HiveMetaStoreClient client = null;
+
+    // get db name and table name.
+    try {
+      tablePair = HCatUtil.getDbAndTableName(name);
+      dbName = tablePair.first;
+      tableName = tablePair.second;
+    } catch (IOException ioe) {
+      throw new InternalException("Table name is wrong.", ioe);
+    }
+
+    // get table
+    try {
+      try {
+        client = HCatalogUtil.getHiveMetaClient(catalogUri, null);
+        table = HCatUtil.getTable(client, dbName, tableName);
+        if (table != null) {
+          exist = true;
+        }
+      } catch (NoSuchObjectException nsoe) {
+        exist = false;
+      } catch (Exception e) {
+        throw new IOException(e);
+      }
+    } finally {
+      HCatUtil.closeHiveClientQuietly(client);
+    }
+
+    return exist;
+  }
+
+  @Override
+  public final TableDesc getTable(final String name) throws IOException {
+    String dbName = null, tableName = null;
+    Pair<String, String> tablePair = null;
+    org.apache.hadoop.hive.ql.metadata.Table table = null;
+    HiveMetaStoreClient client = null;
+    Path path = null;
+    CatalogProtos.StoreType storeType = null;
+    Schema schema = null;
+    Options options = null;
+    TableStats stats = null;
+
+    // get db name and table name.
+    try {
+      tablePair = HCatUtil.getDbAndTableName(name);
+      dbName = tablePair.first;
+      tableName = tablePair.second;
+    } catch (IOException ioe) {
+      throw new InternalException("Table name is wrong.", ioe);
+    }
+
+    //////////////////////////////////
+    // set tajo table schema.
+    //////////////////////////////////
+    try {
+      // get hive table schema
+      try {
+        client = HCatalogUtil.getHiveMetaClient(catalogUri, null);
+        table = HCatUtil.getTable(client, dbName, tableName);
+        path = table.getPath();
+      } catch (NoSuchObjectException nsoe) {
+        throw new InternalException("Table not found. - tableName:" + name, nsoe);
+      } catch (Exception e) {
+        throw new IOException(e);
+      }
+
+      // convert hcatalog field schema into tajo field schema.
+      schema = new Schema();
+      HCatSchema tableSchema = HCatUtil.getTableSchemaWithPtnCols(table);
+      List<HCatFieldSchema> fieldSchemaList = tableSchema.getFields();
+      for (HCatFieldSchema eachField : fieldSchemaList) {
+        String fieldName = tableName + "." + eachField.getName();
+        TajoDataTypes.Type dataType = HCatalogUtil.getTajoFieldType(eachField.getType().toString());
+        schema.addColumn(fieldName, dataType);
+      }
+
+      // validate field schema.
+      try {
+        HCatalogUtil.validateHCatTableAndTajoSchema(tableSchema);
+      } catch (IOException e) {
+        throw new InternalException(
+            "HCatalog cannot support schema. - schema:" + tableSchema.toString(), e);
+      }
+
+      stats = new TableStats();
+      options = Options.create();
+      Properties properties = table.getMetadata();
+      if (properties != null) {
+        // set field delimiter
+        String fieldDelimiter = "", fileOutputformat = "";
+        if (properties.getProperty("field.delim") != null) {
+          fieldDelimiter = properties.getProperty("field.delim");
+        }
+        // set file output format
+        fileOutputformat = properties.getProperty("file.outputformat");
+        storeType = CatalogUtil.getStoreType(HCatalogUtil.getStoreType(fileOutputformat,
+            fieldDelimiter));
+
+        // TODO: another stored file
+        if (storeType.equals(CatalogProtos.StoreType.CSV) && fieldDelimiter != null) {
+          options.put("csvfile.delimiter", fieldDelimiter);
+        }
+
+        // set data size
+        if(properties.getProperty("totalSize") != null) {
+          stats.setNumBytes(new Long(properties.getProperty("totalSize")));
+        }
+      }
+
+    } finally {
+      HCatUtil.closeHiveClientQuietly(client);
+    }
+    TableMeta meta = new TableMeta(storeType, options);
+
+    TableDesc tableDesc = new TableDesc(tableName, schema, meta, path);
+    if (stats != null) {
+      tableDesc.setStats(stats);
+    }
+
+    return tableDesc;
+  }
+
+  private TajoDataTypes.Type getDataType(final String typeStr) {
+    try {
+      return Enum.valueOf(TajoDataTypes.Type.class, typeStr);
+    } catch (IllegalArgumentException iae) {
+      LOG.error("Cannot find a matched type aginst from '" + typeStr + "'");
+      return null;
+    }
+  }
+
+  @Override
+  public final List<String> getAllTableNames() throws IOException {
+    List<String> dbs = null;
+    List<String> tables = null;
+    List<String> allTables = new ArrayList<String>();
+    HiveMetaStoreClient client = null;
+
+    try {
+      try {
+        client = HCatalogUtil.getHiveMetaClient(catalogUri, null);
+        dbs = client.getAllDatabases();
+        for(String eachDB: dbs) {
+          tables = client.getAllTables(eachDB);
+          for(String eachTable: tables) {
+            allTables.add(eachDB + "." + eachTable);
+          }
+        }
+      } catch (Exception e) {
+        throw new IOException(e);
+      }
+
+    } finally {
+      HCatUtil.closeHiveClientQuietly(client);
+    }
+    return allTables;
+  }
+
+  @Override
+  public final void addTable(final TableDesc tableDesc) throws IOException {
+    String dbName = null, tableName = null;
+    Pair<String, String> tablePair = null;
+    HiveMetaStoreClient client = null;
+
+    // get db name and table name.
+    try {
+      tablePair = HCatUtil.getDbAndTableName(tableDesc.getName());
+      dbName = tablePair.first;
+      tableName = tablePair.second;
+    } catch (IOException ioe) {
+      throw new InternalException("Table name is wrong.", ioe);
+    }
+
+    try {
+      try {
+        client = HCatalogUtil.getHiveMetaClient(catalogUri, null);
+
+        org.apache.hadoop.hive.metastore.api.Table table = new org.apache.hadoop.hive.metastore.api
+            .Table();
+
+        table.setDbName(dbName);
+        table.setTableName(tableName);
+        // TODO: set owner
+        //table.setOwner();
+
+        StorageDescriptor sd = new StorageDescriptor();
+
+        // if tajo set location method, thrift client make exception as follows:
+        // Caused by: MetaException(message:java.lang.NullPointerException)
+        // If you want to modify table path, you have to modify on Hive cli.
+        //sd.setLocation(tableDesc.getPath().toString());
+
+        // set column information
+        ArrayList<FieldSchema> cols = new ArrayList<FieldSchema>(tableDesc.getSchema().getColumns
+            ().size());
+        for (Column col : tableDesc.getSchema().getColumns()) {
+          cols.add(new FieldSchema(col.getColumnName(), HCatalogUtil.getHiveFieldType(col
+              .getDataType
+                  ().getType().name()), ""));
+        }
+        sd.setCols(cols);
+
+        // TODO: compression tyoe
+        // n table type
+        sd.setCompressed(false);
+
+        sd.setParameters(new HashMap<String, String>());
+        sd.setSerdeInfo(new SerDeInfo());
+        sd.getSerdeInfo().setName(table.getTableName());
+
+        // TODO: another Serialization librarys
+        sd.getSerdeInfo().setSerializationLib(
+            org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
+
+        sd.getSerdeInfo().setParameters(new HashMap<String, String>());
+//      sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1");
+        sd.getSerdeInfo().getParameters().put(serdeConstants.FIELD_DELIM, "|");
+
+        // TODO: another input format classes
+        sd.setInputFormat(org.apache.hadoop.mapred.TextInputFormat.class.getName());
+
+        // TODO: another output format classes
+        sd.setOutputFormat(org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat.class.getName
+            ());
+
+        sd.setSortCols(new ArrayList<Order>());
+
+        table.setSd(sd);
+        client.createTable(table);
+      } catch (Exception e) {
+        throw new IOException(e);
+      }
+    } finally {
+      HCatUtil.closeHiveClientQuietly(client);
+    }
+  }
+
+  @Override
+  public final void deleteTable(final String name) throws IOException {
+    String dbName = null, tableName = null;
+    Pair<String, String> tablePair = null;
+    HiveMetaStoreClient client = null;
+
+    // get db name and table name.
+    try {
+      tablePair = HCatUtil.getDbAndTableName(name);
+      dbName = tablePair.first;
+      tableName = tablePair.second;
+    } catch (IOException ioe) {
+      throw new InternalException("Table name is wrong.", ioe);
+    }
+    catalogUri = "thrift://localhost:10001";
+
+    try {
+      client = HCatalogUtil.getHiveMetaClient(catalogUri, null);
+      client.dropTable(dbName, tableName);
+    } catch (NoSuchObjectException nsoe) {
+    } catch (Exception e) {
+      throw new IOException(e);
+    } finally {
+      HCatUtil.closeHiveClientQuietly(client);
+    }
+  }
+  @Override
+  public final void addFunction(final FunctionDesc func) throws IOException {
+    // TODO - not implemented yet
+  }
+
+  @Override
+  public final void deleteFunction(final FunctionDesc func) throws IOException {
+    // TODO - not implemented yet
+  }
+
+  @Override
+  public final void existFunction(final FunctionDesc func) throws IOException {
+    // TODO - not implemented yet
+  }
+
+  @Override
+  public final List<String> getAllFunctionNames() throws IOException {
+    // TODO - not implemented yet
+    return null;
+  }
+
+  @Override
+  public void delIndex(String indexName) throws IOException {
+    // TODO - not implemented yet
+  }
+
+  @Override
+  public boolean existIndex(String indexName) throws IOException {
+    // TODO - not implemented yet
+    return false;
+  }
+
+  @Override
+  public CatalogProtos.IndexDescProto[] getIndexes(String tableName) throws IOException {
+    // TODO - not implemented yet
+    return null;
+  }
+
+  @Override
+  public void addIndex(CatalogProtos.IndexDescProto proto) throws IOException {
+    // TODO - not implemented yet
+  }
+
+  @Override
+  public CatalogProtos.IndexDescProto getIndex(String indexName) throws IOException {
+    // TODO - not implemented yet
+    return null;
+  }
+
+  @Override
+  public CatalogProtos.IndexDescProto getIndex(String tableName, String columnName)
+      throws IOException {
+    // TODO - not implemented yet
+    return null;
+  }
+
+  @Override
+  public boolean existIndex(String tableName, String columnName) {
+    // TODO - not implemented yet
+    return false;
+  }
+
+  @Override
+  public final void close() {
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/60f7df20/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/main/java/org/apache/tajo/catalog/store/HCatalogUtil.java
----------------------------------------------------------------------
diff --git a/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/main/java/org/apache/tajo/catalog/store/HCatalogUtil.java b/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/main/java/org/apache/tajo/catalog/store/HCatalogUtil.java
new file mode 100644
index 0000000..b92cbf2
--- /dev/null
+++ b/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/main/java/org/apache/tajo/catalog/store/HCatalogUtil.java
@@ -0,0 +1,169 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.tajo.catalog.store;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hcatalog.common.HCatException;
+import org.apache.hcatalog.common.HCatUtil;
+import org.apache.hcatalog.data.schema.HCatFieldSchema;
+import org.apache.hcatalog.data.schema.HCatSchema;
+import org.apache.tajo.catalog.proto.CatalogProtos;
+import org.apache.tajo.common.TajoDataTypes;
+import org.apache.tajo.exception.InternalException;
+
+import java.io.IOException;
+
+public class HCatalogUtil {
+  protected final Log LOG = LogFactory.getLog(getClass());
+
+  public static void validateHCatTableAndTajoSchema(HCatSchema tblSchema) throws InternalException {
+    for (HCatFieldSchema hcatField : tblSchema.getFields()) {
+      validateHCatFieldAndTajoSchema(hcatField);
+    }
+  }
+
+  private static void validateHCatFieldAndTajoSchema(HCatFieldSchema fieldSchema) throws
+      InternalException {
+    try {
+      HCatFieldSchema.Type fieldType = fieldSchema.getType();
+      switch (fieldType) {
+        case ARRAY:
+          throw new HCatException("Tajo cannot support array field type.");
+        case STRUCT:
+          throw new HCatException("Tajo cannot support struct field type.");
+        case MAP:
+          throw new HCatException("Tajo cannot support map field type.");
+      }
+    } catch (HCatException e) {
+      throw new InternalException("incompatible hcatalog types when assigning to tajo type. - " +
+          "HCatFieldSchema:" + fieldSchema, e);
+    }
+  }
+
+  public static HiveMetaStoreClient getHiveMetaClient(String metaStoreUri,
+                                                      String metaStoreKerberosPrincipal)
+                                                      //Class<?> cls)
+  throws Exception {
+//    HiveConf hiveConf = new HiveConf(cls);
+
+    HiveConf hiveConf = new HiveConf();
+
+    if (metaStoreUri != null) {
+      hiveConf.set("hive.metastore.local", "false");
+      hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreUri.trim());
+    }
+
+    if (metaStoreKerberosPrincipal != null) {
+      hiveConf.setBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL, true);
+      hiveConf.setVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL, metaStoreKerberosPrincipal);
+    }
+
+    try {
+      return HCatUtil.getHiveClient(hiveConf);
+    } catch (Exception e) {
+      throw new InternalException("Tajo cannot connect Hive metastore. - serverUri:" +
+          metaStoreUri, e);
+    }
+  }
+
+  public static TajoDataTypes.Type getTajoFieldType(String fieldType) throws IOException {
+    if(fieldType == null) {
+      throw new InternalException("Hive field type is null.");
+    }
+    String typeStr = null;
+
+    if(fieldType.equalsIgnoreCase(serdeConstants.INT_TYPE_NAME))
+      typeStr = "INT4";
+    else if(fieldType.equalsIgnoreCase(serdeConstants.TINYINT_TYPE_NAME))
+      typeStr = "INT1";
+    else if(fieldType.equalsIgnoreCase(serdeConstants.SMALLINT_TYPE_NAME))
+      typeStr = "INT2";
+    else if(fieldType.equalsIgnoreCase(serdeConstants.BIGINT_TYPE_NAME))
+      typeStr = "INT8";
+    else if(fieldType.equalsIgnoreCase(serdeConstants.BOOLEAN_TYPE_NAME))
+      typeStr = "BOOLEAN";
+    else if(fieldType.equalsIgnoreCase(serdeConstants.FLOAT_TYPE_NAME))
+      typeStr = "FLOAT4";
+    else if(fieldType.equalsIgnoreCase(serdeConstants.DOUBLE_TYPE_NAME))
+      typeStr = "FLOAT8";
+    else if(fieldType.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME))
+      typeStr = "TEXT";
+    else if(fieldType.equalsIgnoreCase(serdeConstants.BINARY_TYPE_NAME))
+      typeStr = "BLOB";
+
+    try {
+      return Enum.valueOf(TajoDataTypes.Type.class, typeStr);
+    } catch (IllegalArgumentException iae) {
+      System.out.println("Cannot find a matched type aginst from '" + typeStr + "'");
+      return null;
+    }
+  }
+
+  public static String getHiveFieldType(String fieldType) throws IOException {
+    if(fieldType == null) {
+      throw new InternalException("Tajo field type is null.");
+    }
+    String typeStr = null;
+
+    if(fieldType.equalsIgnoreCase("INT4"))
+      typeStr = serdeConstants.INT_TYPE_NAME;
+    else if(fieldType.equalsIgnoreCase("INT1"))
+      typeStr = serdeConstants.TINYINT_TYPE_NAME;
+    else if(fieldType.equalsIgnoreCase("INT2"))
+      typeStr = serdeConstants.SMALLINT_TYPE_NAME;
+    else if(fieldType.equalsIgnoreCase("INT8"))
+      typeStr = serdeConstants.BIGINT_TYPE_NAME;
+    else if(fieldType.equalsIgnoreCase("BOOLEAN"))
+      typeStr = serdeConstants.BOOLEAN_TYPE_NAME;
+    else if(fieldType.equalsIgnoreCase("FLOAT4"))
+      typeStr = serdeConstants.FLOAT_TYPE_NAME;
+    else if(fieldType.equalsIgnoreCase("FLOAT8"))
+      typeStr = serdeConstants.DOUBLE_TYPE_NAME;
+    else if(fieldType.equalsIgnoreCase("TEXT"))
+      typeStr = serdeConstants.STRING_TYPE_NAME;
+    else if(fieldType.equalsIgnoreCase("BLOB"))
+      typeStr = serdeConstants.BINARY_TYPE_NAME;
+
+    return typeStr;
+  }
+
+  public static String getStoreType(String fileFormat, String delimiter) throws IOException{
+    if(fileFormat == null) {
+      throw new InternalException("Hive file output format is null.");
+    }
+
+    String[] fileFormatArrary = fileFormat.split("\\.");
+    if(fileFormatArrary.length < 1) {
+      throw new InternalException("Hive file output format is wrong. - file output format:" + fileFormat);
+    }
+
+    String inputFormatClass = fileFormatArrary[fileFormatArrary.length-1];
+
+    if(inputFormatClass.equals("HiveIgnoreKeyTextOutputFormat")) {
+      return CatalogProtos.StoreType.CSV.name();
+    } else {
+      //TODO: other file format
+      return null;
+    }
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/60f7df20/tajo-catalog/tajo-catalog-server/pom.xml
----------------------------------------------------------------------
diff --git a/tajo-catalog/tajo-catalog-server/pom.xml b/tajo-catalog/tajo-catalog-server/pom.xml
index e1105eb..ab3139b 100644
--- a/tajo-catalog/tajo-catalog-server/pom.xml
+++ b/tajo-catalog/tajo-catalog-server/pom.xml
@@ -127,10 +127,6 @@
       <groupId>org.apache.tajo</groupId>
       <artifactId>tajo-rpc</artifactId>
     </dependency>
-    <dependency>
-      <groupId>org.apache.tajo</groupId>
-      <artifactId>tajo-algebra</artifactId>
-    </dependency>
 
     <dependency>
       <groupId>com.google.protobuf</groupId>
@@ -155,132 +151,8 @@
     </dependency>
   </dependencies>
 
-
   <profiles>
     <profile>
-      <id>hive-0.11.0</id>
-      <activation>
-        <activeByDefault>true</activeByDefault>
-      </activation>
-      <properties>
-        <hive.version>0.11.0</hive.version>
-        <mapred.version>1.2.1</mapred.version>
-      </properties>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hive</groupId>
-          <artifactId>hive-exec</artifactId>
-          <version>${hive.version}</version>
-          <exclusions>
-            <exclusion>
-              <groupId>javax.jdo</groupId>
-              <artifactId>jdo2-api</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>org.apache.hive</groupId>
-              <artifactId>hive-builtins</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>org.apache.hive</groupId>
-              <artifactId>hive-cli</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>org.apache.hive</groupId>
-              <artifactId>hive-common</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>org.apache.hive</groupId>
-              <artifactId>hive-pdk</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>org.apache.hive</groupId>
-              <artifactId>hive-service</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>org.apache.hive</groupId>
-              <artifactId>hive-shims</artifactId>
-            </exclusion>
-          </exclusions>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hive</groupId>
-          <artifactId>hive-metastore</artifactId>
-          <version>${hive.version}</version>
-          <exclusions>
-            <exclusion>
-              <groupId>javax.jdo</groupId>
-              <artifactId>jdo2-api</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>org.apache.hive</groupId>
-              <artifactId>hive-builtins</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>org.apache.hive</groupId>
-              <artifactId>hive-cli</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>org.apache.hive</groupId>
-              <artifactId>hive-common</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>org.apache.hive</groupId>
-              <artifactId>hive-pdk</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>org.apache.hive</groupId>
-              <artifactId>hive-service</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>org.apache.hive</groupId>
-              <artifactId>hive-shims</artifactId>
-            </exclusion>
-          </exclusions>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hcatalog</groupId>
-          <artifactId>hcatalog-core</artifactId>
-          <version>${hive.version}</version>
-          <exclusions>
-            <exclusion>
-              <groupId>javax.jdo</groupId>
-              <artifactId>jdo2-api</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>org.apache.hive</groupId>
-              <artifactId>hive-builtins</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>org.apache.hive</groupId>
-              <artifactId>hive-cli</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>org.apache.hive</groupId>
-              <artifactId>hive-common</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>org.apache.hive</groupId>
-              <artifactId>hive-pdk</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>org.apache.hive</groupId>
-              <artifactId>hive-service</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>org.apache.hive</groupId>
-              <artifactId>hive-shims</artifactId>
-            </exclusion>
-          </exclusions>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${mapred.version}</version>
-          <scope>provided</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-    <profile>
       <id>docs</id>
       <activation>
         <activeByDefault>false</activeByDefault>

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/60f7df20/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/HCatalogStore.java
----------------------------------------------------------------------
diff --git a/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/HCatalogStore.java b/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/HCatalogStore.java
deleted file mode 100644
index d8a0e98..0000000
--- a/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/HCatalogStore.java
+++ /dev/null
@@ -1,396 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.tajo.catalog.store;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.api.*;
-import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hcatalog.common.HCatUtil;
-import org.apache.hcatalog.data.Pair;
-import org.apache.hcatalog.data.schema.HCatFieldSchema;
-import org.apache.hcatalog.data.schema.HCatSchema;
-import org.apache.tajo.catalog.*;
-import org.apache.tajo.catalog.Schema;
-import org.apache.tajo.catalog.proto.CatalogProtos;
-import org.apache.tajo.catalog.statistics.TableStats;
-import org.apache.tajo.common.TajoDataTypes;
-import org.apache.tajo.exception.InternalException;
-
-import java.io.IOException;
-import java.util.*;
-
-public class HCatalogStore extends CatalogConstants implements CatalogStore {
-  protected final Log LOG = LogFactory.getLog(getClass());
-  protected Configuration conf;
-  protected String catalogUri;
-  private Map<Pair<String, String>, Table> tableMap = new HashMap<Pair<String, String>, Table>();
-
-  public HCatalogStore(final Configuration conf)
-      throws InternalException {
-    this.conf = conf;
-    if(conf.get(CatalogConstants.DEPRECATED_CATALOG_URI) != null) {
-      LOG.warn("Configuration parameter " + CatalogConstants.DEPRECATED_CATALOG_URI + " " +
-          "is deprecated. Use " + CatalogConstants.CATALOG_URI + " instead.");
-      this.catalogUri = conf.get(CatalogConstants.DEPRECATED_CATALOG_URI);
-    } else {
-      this.catalogUri = conf.get(CatalogConstants.CATALOG_URI);
-    }
-  }
-
-  @Override
-  public final boolean existTable(final String name) throws IOException {
-    boolean exist = false;
-
-    String dbName = null, tableName = null;
-    Pair<String, String> tablePair = null;
-    org.apache.hadoop.hive.ql.metadata.Table table = null;
-    HiveMetaStoreClient client = null;
-
-    // get db name and table name.
-    try {
-      tablePair = HCatUtil.getDbAndTableName(name);
-      dbName = tablePair.first;
-      tableName = tablePair.second;
-    } catch (IOException ioe) {
-      throw new InternalException("Table name is wrong.", ioe);
-    }
-
-    // get table
-    try {
-      try {
-        client = HCatalogUtil.getHiveMetaClient(catalogUri, null);
-        table = HCatUtil.getTable(client, dbName, tableName);
-        if (table != null) {
-          exist = true;
-        }
-      } catch (NoSuchObjectException nsoe) {
-        exist = false;
-      } catch (Exception e) {
-        throw new IOException(e);
-      }
-    } finally {
-      HCatUtil.closeHiveClientQuietly(client);
-    }
-
-    return exist;
-  }
-
-  @Override
-  public final TableDesc getTable(final String name) throws IOException {
-    String dbName = null, tableName = null;
-    Pair<String, String> tablePair = null;
-    org.apache.hadoop.hive.ql.metadata.Table table = null;
-    HiveMetaStoreClient client = null;
-    Path path = null;
-    CatalogProtos.StoreType storeType = null;
-    Schema schema = null;
-    Options options = null;
-    TableStats stats = null;
-
-    // get db name and table name.
-    try {
-      tablePair = HCatUtil.getDbAndTableName(name);
-      dbName = tablePair.first;
-      tableName = tablePair.second;
-    } catch (IOException ioe) {
-      throw new InternalException("Table name is wrong.", ioe);
-    }
-
-    //////////////////////////////////
-    // set tajo table schema.
-    //////////////////////////////////
-    try {
-      // get hive table schema
-      try {
-        client = HCatalogUtil.getHiveMetaClient(catalogUri, null);
-        table = HCatUtil.getTable(client, dbName, tableName);
-        path = table.getPath();
-      } catch (NoSuchObjectException nsoe) {
-        throw new InternalException("Table not found. - tableName:" + name, nsoe);
-      } catch (Exception e) {
-        throw new IOException(e);
-      }
-
-      // convert hcatalog field schema into tajo field schema.
-      schema = new Schema();
-      HCatSchema tableSchema = HCatUtil.getTableSchemaWithPtnCols(table);
-      List<HCatFieldSchema> fieldSchemaList = tableSchema.getFields();
-      for (HCatFieldSchema eachField : fieldSchemaList) {
-        String fieldName = tableName + "." + eachField.getName();
-        TajoDataTypes.Type dataType = HCatalogUtil.getTajoFieldType(eachField.getType().toString());
-        schema.addColumn(fieldName, dataType);
-      }
-
-      // validate field schema.
-      try {
-        HCatalogUtil.validateHCatTableAndTajoSchema(tableSchema);
-      } catch (IOException e) {
-        throw new InternalException(
-            "HCatalog cannot support schema. - schema:" + tableSchema.toString(), e);
-      }
-
-      stats = new TableStats();
-      options = Options.create();
-      Properties properties = table.getMetadata();
-      if (properties != null) {
-        // set field delimiter
-        String fieldDelimiter = "", fileOutputformat = "";
-        if (properties.getProperty("field.delim") != null) {
-          fieldDelimiter = properties.getProperty("field.delim");
-        }
-        // set file output format
-        fileOutputformat = properties.getProperty("file.outputformat");
-        storeType = CatalogUtil.getStoreType(HCatalogUtil.getStoreType(fileOutputformat,
-            fieldDelimiter));
-
-        // TODO: another stored file
-        if (storeType.equals(CatalogProtos.StoreType.CSV) && fieldDelimiter != null) {
-          options.put("csvfile.delimiter", fieldDelimiter);
-        }
-
-        // set data size
-        if(properties.getProperty("totalSize") != null) {
-          stats.setNumBytes(new Long(properties.getProperty("totalSize")));
-        }
-      }
-
-    } finally {
-      HCatUtil.closeHiveClientQuietly(client);
-    }
-    TableMeta meta = new TableMeta(storeType, options);
-
-    TableDesc tableDesc = new TableDesc(tableName, schema, meta, path);
-    if (stats != null) {
-      tableDesc.setStats(stats);
-    }
-
-    return tableDesc;
-  }
-
-  private TajoDataTypes.Type getDataType(final String typeStr) {
-    try {
-      return Enum.valueOf(TajoDataTypes.Type.class, typeStr);
-    } catch (IllegalArgumentException iae) {
-      LOG.error("Cannot find a matched type aginst from '" + typeStr + "'");
-      return null;
-    }
-  }
-
-  @Override
-  public final List<String> getAllTableNames() throws IOException {
-    List<String> dbs = null;
-    List<String> tables = null;
-    List<String> allTables = new ArrayList<String>();
-    HiveMetaStoreClient client = null;
-
-    try {
-      try {
-        client = HCatalogUtil.getHiveMetaClient(catalogUri, null);
-        dbs = client.getAllDatabases();
-        for(String eachDB: dbs) {
-          tables = client.getAllTables(eachDB);
-          for(String eachTable: tables) {
-            allTables.add(eachDB + "." + eachTable);
-          }
-        }
-      } catch (Exception e) {
-        throw new IOException(e);
-      }
-
-    } finally {
-      HCatUtil.closeHiveClientQuietly(client);
-    }
-    return allTables;
-  }
-
-  @Override
-  public final void addTable(final TableDesc tableDesc) throws IOException {
-    String dbName = null, tableName = null;
-    Pair<String, String> tablePair = null;
-    HiveMetaStoreClient client = null;
-
-    // get db name and table name.
-    try {
-      tablePair = HCatUtil.getDbAndTableName(tableDesc.getName());
-      dbName = tablePair.first;
-      tableName = tablePair.second;
-    } catch (IOException ioe) {
-      throw new InternalException("Table name is wrong.", ioe);
-    }
-
-    try {
-      try {
-        client = HCatalogUtil.getHiveMetaClient(catalogUri, null);
-
-        org.apache.hadoop.hive.metastore.api.Table table = new org.apache.hadoop.hive.metastore.api
-            .Table();
-
-        table.setDbName(dbName);
-        table.setTableName(tableName);
-        // TODO: set owner
-        //table.setOwner();
-
-        StorageDescriptor sd = new StorageDescriptor();
-
-        // if tajo set location method, thrift client make exception as follows:
-        // Caused by: MetaException(message:java.lang.NullPointerException)
-        // If you want to modify table path, you have to modify on Hive cli.
-        //sd.setLocation(tableDesc.getPath().toString());
-
-        // set column information
-        ArrayList<FieldSchema> cols = new ArrayList<FieldSchema>(tableDesc.getSchema().getColumns
-            ().size());
-        for (Column col : tableDesc.getSchema().getColumns()) {
-          cols.add(new FieldSchema(col.getColumnName(), HCatalogUtil.getHiveFieldType(col
-              .getDataType
-                  ().getType().name()), ""));
-        }
-        sd.setCols(cols);
-
-        // TODO: compression tyoe
-        // n table type
-        sd.setCompressed(false);
-
-        sd.setParameters(new HashMap<String, String>());
-        sd.setSerdeInfo(new SerDeInfo());
-        sd.getSerdeInfo().setName(table.getTableName());
-
-        // TODO: another Serialization librarys
-        sd.getSerdeInfo().setSerializationLib(
-            org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
-
-        sd.getSerdeInfo().setParameters(new HashMap<String, String>());
-//      sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1");
-        sd.getSerdeInfo().getParameters().put(serdeConstants.FIELD_DELIM, "|");
-
-        // TODO: another input format classes
-        sd.setInputFormat(org.apache.hadoop.mapred.TextInputFormat.class.getName());
-
-        // TODO: another output format classes
-        sd.setOutputFormat(org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat.class.getName
-            ());
-
-        sd.setSortCols(new ArrayList<Order>());
-
-        table.setSd(sd);
-        client.createTable(table);
-      } catch (Exception e) {
-        throw new IOException(e);
-      }
-    } finally {
-      HCatUtil.closeHiveClientQuietly(client);
-    }
-  }
-
-  @Override
-  public final void deleteTable(final String name) throws IOException {
-    String dbName = null, tableName = null;
-    Pair<String, String> tablePair = null;
-    HiveMetaStoreClient client = null;
-
-    // get db name and table name.
-    try {
-      tablePair = HCatUtil.getDbAndTableName(name);
-      dbName = tablePair.first;
-      tableName = tablePair.second;
-    } catch (IOException ioe) {
-      throw new InternalException("Table name is wrong.", ioe);
-    }
-    catalogUri = "thrift://localhost:10001";
-
-    try {
-      client = HCatalogUtil.getHiveMetaClient(catalogUri, null);
-      client.dropTable(dbName, tableName);
-    } catch (NoSuchObjectException nsoe) {
-    } catch (Exception e) {
-      throw new IOException(e);
-    } finally {
-      HCatUtil.closeHiveClientQuietly(client);
-    }
-  }
-  @Override
-  public final void addFunction(final FunctionDesc func) throws IOException {
-    // TODO - not implemented yet
-  }
-
-  @Override
-  public final void deleteFunction(final FunctionDesc func) throws IOException {
-    // TODO - not implemented yet
-  }
-
-  @Override
-  public final void existFunction(final FunctionDesc func) throws IOException {
-    // TODO - not implemented yet
-  }
-
-  @Override
-  public final List<String> getAllFunctionNames() throws IOException {
-    // TODO - not implemented yet
-    return null;
-  }
-
-  @Override
-  public void delIndex(String indexName) throws IOException {
-    // TODO - not implemented yet
-  }
-
-  @Override
-  public boolean existIndex(String indexName) throws IOException {
-    // TODO - not implemented yet
-    return false;
-  }
-
-  @Override
-  public CatalogProtos.IndexDescProto[] getIndexes(String tableName) throws IOException {
-    // TODO - not implemented yet
-    return null;
-  }
-
-  @Override
-  public void addIndex(CatalogProtos.IndexDescProto proto) throws IOException {
-    // TODO - not implemented yet
-  }
-
-  @Override
-  public CatalogProtos.IndexDescProto getIndex(String indexName) throws IOException {
-    // TODO - not implemented yet
-    return null;
-  }
-
-  @Override
-  public CatalogProtos.IndexDescProto getIndex(String tableName, String columnName)
-      throws IOException {
-    // TODO - not implemented yet
-    return null;
-  }
-
-  @Override
-  public boolean existIndex(String tableName, String columnName) {
-    // TODO - not implemented yet
-    return false;
-  }
-
-  @Override
-  public final void close() {
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/60f7df20/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/HCatalogUtil.java
----------------------------------------------------------------------
diff --git a/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/HCatalogUtil.java b/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/HCatalogUtil.java
deleted file mode 100644
index b92cbf2..0000000
--- a/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/HCatalogUtil.java
+++ /dev/null
@@ -1,169 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.tajo.catalog.store;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hcatalog.common.HCatException;
-import org.apache.hcatalog.common.HCatUtil;
-import org.apache.hcatalog.data.schema.HCatFieldSchema;
-import org.apache.hcatalog.data.schema.HCatSchema;
-import org.apache.tajo.catalog.proto.CatalogProtos;
-import org.apache.tajo.common.TajoDataTypes;
-import org.apache.tajo.exception.InternalException;
-
-import java.io.IOException;
-
-public class HCatalogUtil {
-  protected final Log LOG = LogFactory.getLog(getClass());
-
-  public static void validateHCatTableAndTajoSchema(HCatSchema tblSchema) throws InternalException {
-    for (HCatFieldSchema hcatField : tblSchema.getFields()) {
-      validateHCatFieldAndTajoSchema(hcatField);
-    }
-  }
-
-  private static void validateHCatFieldAndTajoSchema(HCatFieldSchema fieldSchema) throws
-      InternalException {
-    try {
-      HCatFieldSchema.Type fieldType = fieldSchema.getType();
-      switch (fieldType) {
-        case ARRAY:
-          throw new HCatException("Tajo cannot support array field type.");
-        case STRUCT:
-          throw new HCatException("Tajo cannot support struct field type.");
-        case MAP:
-          throw new HCatException("Tajo cannot support map field type.");
-      }
-    } catch (HCatException e) {
-      throw new InternalException("incompatible hcatalog types when assigning to tajo type. - " +
-          "HCatFieldSchema:" + fieldSchema, e);
-    }
-  }
-
-  public static HiveMetaStoreClient getHiveMetaClient(String metaStoreUri,
-                                                      String metaStoreKerberosPrincipal)
-                                                      //Class<?> cls)
-  throws Exception {
-//    HiveConf hiveConf = new HiveConf(cls);
-
-    HiveConf hiveConf = new HiveConf();
-
-    if (metaStoreUri != null) {
-      hiveConf.set("hive.metastore.local", "false");
-      hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreUri.trim());
-    }
-
-    if (metaStoreKerberosPrincipal != null) {
-      hiveConf.setBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL, true);
-      hiveConf.setVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL, metaStoreKerberosPrincipal);
-    }
-
-    try {
-      return HCatUtil.getHiveClient(hiveConf);
-    } catch (Exception e) {
-      throw new InternalException("Tajo cannot connect Hive metastore. - serverUri:" +
-          metaStoreUri, e);
-    }
-  }
-
-  public static TajoDataTypes.Type getTajoFieldType(String fieldType) throws IOException {
-    if(fieldType == null) {
-      throw new InternalException("Hive field type is null.");
-    }
-    String typeStr = null;
-
-    if(fieldType.equalsIgnoreCase(serdeConstants.INT_TYPE_NAME))
-      typeStr = "INT4";
-    else if(fieldType.equalsIgnoreCase(serdeConstants.TINYINT_TYPE_NAME))
-      typeStr = "INT1";
-    else if(fieldType.equalsIgnoreCase(serdeConstants.SMALLINT_TYPE_NAME))
-      typeStr = "INT2";
-    else if(fieldType.equalsIgnoreCase(serdeConstants.BIGINT_TYPE_NAME))
-      typeStr = "INT8";
-    else if(fieldType.equalsIgnoreCase(serdeConstants.BOOLEAN_TYPE_NAME))
-      typeStr = "BOOLEAN";
-    else if(fieldType.equalsIgnoreCase(serdeConstants.FLOAT_TYPE_NAME))
-      typeStr = "FLOAT4";
-    else if(fieldType.equalsIgnoreCase(serdeConstants.DOUBLE_TYPE_NAME))
-      typeStr = "FLOAT8";
-    else if(fieldType.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME))
-      typeStr = "TEXT";
-    else if(fieldType.equalsIgnoreCase(serdeConstants.BINARY_TYPE_NAME))
-      typeStr = "BLOB";
-
-    try {
-      return Enum.valueOf(TajoDataTypes.Type.class, typeStr);
-    } catch (IllegalArgumentException iae) {
-      System.out.println("Cannot find a matched type aginst from '" + typeStr + "'");
-      return null;
-    }
-  }
-
-  public static String getHiveFieldType(String fieldType) throws IOException {
-    if(fieldType == null) {
-      throw new InternalException("Tajo field type is null.");
-    }
-    String typeStr = null;
-
-    if(fieldType.equalsIgnoreCase("INT4"))
-      typeStr = serdeConstants.INT_TYPE_NAME;
-    else if(fieldType.equalsIgnoreCase("INT1"))
-      typeStr = serdeConstants.TINYINT_TYPE_NAME;
-    else if(fieldType.equalsIgnoreCase("INT2"))
-      typeStr = serdeConstants.SMALLINT_TYPE_NAME;
-    else if(fieldType.equalsIgnoreCase("INT8"))
-      typeStr = serdeConstants.BIGINT_TYPE_NAME;
-    else if(fieldType.equalsIgnoreCase("BOOLEAN"))
-      typeStr = serdeConstants.BOOLEAN_TYPE_NAME;
-    else if(fieldType.equalsIgnoreCase("FLOAT4"))
-      typeStr = serdeConstants.FLOAT_TYPE_NAME;
-    else if(fieldType.equalsIgnoreCase("FLOAT8"))
-      typeStr = serdeConstants.DOUBLE_TYPE_NAME;
-    else if(fieldType.equalsIgnoreCase("TEXT"))
-      typeStr = serdeConstants.STRING_TYPE_NAME;
-    else if(fieldType.equalsIgnoreCase("BLOB"))
-      typeStr = serdeConstants.BINARY_TYPE_NAME;
-
-    return typeStr;
-  }
-
-  public static String getStoreType(String fileFormat, String delimiter) throws IOException{
-    if(fileFormat == null) {
-      throw new InternalException("Hive file output format is null.");
-    }
-
-    String[] fileFormatArrary = fileFormat.split("\\.");
-    if(fileFormatArrary.length < 1) {
-      throw new InternalException("Hive file output format is wrong. - file output format:" + fileFormat);
-    }
-
-    String inputFormatClass = fileFormatArrary[fileFormatArrary.length-1];
-
-    if(inputFormatClass.equals("HiveIgnoreKeyTextOutputFormat")) {
-      return CatalogProtos.StoreType.CSV.name();
-    } else {
-      //TODO: other file format
-      return null;
-    }
-  }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/60f7df20/tajo-dist/pom.xml
----------------------------------------------------------------------
diff --git a/tajo-dist/pom.xml b/tajo-dist/pom.xml
index e24e4fb..562f21a 100644
--- a/tajo-dist/pom.xml
+++ b/tajo-dist/pom.xml
@@ -106,8 +106,6 @@
                       run cp -r $ROOT/tajo-core/target/tajo-core-${project.version}/* .
                       run cp -r ${project.basedir}/src/main/bin .
                       run cp -r ${project.basedir}/src/main/conf .
-                      run rm -rf lib/hive-*.jar
-                      run cp -r $ROOT/tajo-catalog/tajo-catalog-server/target/lib/hive-*.jar  lib/
                       run rm -rf lib/tajo-*-${project.version}.jar
                       echo
                       echo "Tajo dist layout available at: ${project.build.directory}/tajo-${project.version}"

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/60f7df20/tajo-dist/src/main/bin/tajo
----------------------------------------------------------------------
diff --git a/tajo-dist/src/main/bin/tajo b/tajo-dist/src/main/bin/tajo
index 7f3a0ae..f31c701 100755
--- a/tajo-dist/src/main/bin/tajo
+++ b/tajo-dist/src/main/bin/tajo
@@ -232,6 +232,30 @@ export TAJO_BASE_CLASSPATH
 CLASSPATH="${CLASSPATH}:${TAJO_BASE_CLASSPATH}"
 
 ##############################################################################
+# Find and Set Hive CLASSPATH
+##############################################################################
+
+HIVE_LIB=$HIVE_HOME/lib
+
+if [ -d ${HIVE_LIB} ]; then
+  for f in ${HIVE_LIB}/*.jar; do
+    CLASSPATH=${CLASSPATH}:$f;
+  done
+
+  for f in $HIVE_HOME/hcatalog/share/hcatalog/*.jar; do
+    CLASSPATH=${CLASSPATH}:$f;
+  done
+
+  for f in $HADOOP_HOME/share/hadoop/mapreduce/hadoop-mapreduce-client-core-*.jar; do
+    CLASSPATH=${CLASSPATH}:$f;
+  done
+
+  for f in $HADOOP_HOME/hadoop-core-*.jar; do
+    CLASSPATH=${CLASSPATH}:$f;
+  done
+fi
+
+##############################################################################
 # Find and Set Hadoop CLASSPATH
 ##############################################################################
 

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/60f7df20/tajo-dist/src/main/conf/tajo-env.sh
----------------------------------------------------------------------
diff --git a/tajo-dist/src/main/conf/tajo-env.sh b/tajo-dist/src/main/conf/tajo-env.sh
index 37988bf..bb5cf4b 100755
--- a/tajo-dist/src/main/conf/tajo-env.sh
+++ b/tajo-dist/src/main/conf/tajo-env.sh
@@ -65,3 +65,6 @@
 
 # Tajo cluster mode. the default mode is standby mode.
 export TAJO_WORKER_STANDBY_MODE=true
+
+# It must be required to use HCatalogStore
+# export HIVE_HOME=
\ No newline at end of file