You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tajo.apache.org by jh...@apache.org on 2014/03/28 03:49:42 UTC

git commit: TAJO-694: Bump up hadoop to 2.3.0. (jinho)

Repository: tajo
Updated Branches:
  refs/heads/master 9d00f9fef -> fc92ed5c6


TAJO-694: Bump up hadoop to 2.3.0. (jinho)


Project: http://git-wip-us.apache.org/repos/asf/tajo/repo
Commit: http://git-wip-us.apache.org/repos/asf/tajo/commit/fc92ed5c
Tree: http://git-wip-us.apache.org/repos/asf/tajo/tree/fc92ed5c
Diff: http://git-wip-us.apache.org/repos/asf/tajo/diff/fc92ed5c

Branch: refs/heads/master
Commit: fc92ed5c6399d1ee4d48307e4b6881db6fb33c43
Parents: 9d00f9f
Author: jinossy <ji...@gmail.com>
Authored: Fri Mar 28 11:49:08 2014 +0900
Committer: jinossy <ji...@gmail.com>
Committed: Fri Mar 28 11:49:08 2014 +0900

----------------------------------------------------------------------
 BUILDING.txt                                    |   1 +
 CHANGES.txt                                     |   2 +
 tajo-core/tajo-core-pullserver/pom.xml          |  10 +-
 .../pullserver/listener/FileCloseListener.java  |  39 ++++++
 .../pullserver/listener/FileCloseListener.java  |  42 +++++++
 .../tajo/pullserver/FadvisedChunkedFile.java    |  80 -------------
 .../tajo/pullserver/FadvisedFileRegion.java     |  82 -------------
 .../tajo/pullserver/PullServerAuxService.java   |  12 +-
 .../tajo/pullserver/TajoPullServerService.java  |  12 +-
 tajo-dist/src/main/bin/tajo                     | 118 +++++++++----------
 .../main/sphinx/configuration/cluster_setup.rst |   2 +-
 .../sphinx/getting_started/prerequisites.rst    |   2 +-
 tajo-docs/src/main/sphinx/jdbc_driver.rst       |   2 +-
 tajo-project/pom.xml                            |  51 +++++++-
 14 files changed, 208 insertions(+), 247 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/tajo/blob/fc92ed5c/BUILDING.txt
----------------------------------------------------------------------
diff --git a/BUILDING.txt b/BUILDING.txt
index 8dcc179..0cf0a04 100644
--- a/BUILDING.txt
+++ b/BUILDING.txt
@@ -35,6 +35,7 @@ Maven build goals:
 
  Build options:
   * Use -Dtar to create a TAR with the distribution (using -Pdist)
+  * Use -Dhadoop.version to build with the specific hadoop version (-Dhadoop.version=2.3.0)
 
  Tests options:
   * Use -DskipTests to skip tests when running the following Maven goals:

http://git-wip-us.apache.org/repos/asf/tajo/blob/fc92ed5c/CHANGES.txt
----------------------------------------------------------------------
diff --git a/CHANGES.txt b/CHANGES.txt
index 44d40cf..3af7f5b 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -572,6 +572,8 @@ Release 0.8.0 - unreleased
 
   TASKS
 
+    TAJO-694: Bump up hadoop to 2.3.0. (jinho)
+
     TAJO-684: Add functions about time. (Alvin Henrick via jihoon)
 
     TAJO-669: Add cluster setup documentation. (hyunsik)

http://git-wip-us.apache.org/repos/asf/tajo/blob/fc92ed5c/tajo-core/tajo-core-pullserver/pom.xml
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-pullserver/pom.xml b/tajo-core/tajo-core-pullserver/pom.xml
index 0bdfed2..b5049be 100644
--- a/tajo-core/tajo-core-pullserver/pom.xml
+++ b/tajo-core/tajo-core-pullserver/pom.xml
@@ -34,11 +34,6 @@
   <dependencies>
     <dependency>
       <groupId>org.apache.tajo</groupId>
-      <artifactId>tajo-common</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.tajo</groupId>
       <artifactId>tajo-catalog-common</artifactId>
       <scope>provided</scope>
     </dependency>
@@ -59,6 +54,11 @@
       <scope>provided</scope>
     </dependency>
     <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-shuffle</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
       <groupId>io.netty</groupId>
       <artifactId>netty</artifactId>
       <scope>compile</scope>

http://git-wip-us.apache.org/repos/asf/tajo/blob/fc92ed5c/tajo-core/tajo-core-pullserver/src/main/hadoop-2.2.0/org/apache/tajo/pullserver/listener/FileCloseListener.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-pullserver/src/main/hadoop-2.2.0/org/apache/tajo/pullserver/listener/FileCloseListener.java b/tajo-core/tajo-core-pullserver/src/main/hadoop-2.2.0/org/apache/tajo/pullserver/listener/FileCloseListener.java
new file mode 100644
index 0000000..b7f3e34
--- /dev/null
+++ b/tajo-core/tajo-core-pullserver/src/main/hadoop-2.2.0/org/apache/tajo/pullserver/listener/FileCloseListener.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.pullserver.listener;
+
+import org.apache.hadoop.mapred.FadvisedFileRegion;
+import org.jboss.netty.channel.ChannelFuture;
+import org.jboss.netty.channel.ChannelFutureListener;
+
+public class FileCloseListener implements ChannelFutureListener {
+
+  private FadvisedFileRegion filePart;
+
+  public FileCloseListener(FadvisedFileRegion filePart) {
+    this.filePart = filePart;
+  }
+
+  // TODO error handling; distinguish IO/connection failures,
+  //      attribute to appropriate spill output
+  @Override
+  public void operationComplete(ChannelFuture future) {
+    filePart.releaseExternalResources();
+  }
+}

http://git-wip-us.apache.org/repos/asf/tajo/blob/fc92ed5c/tajo-core/tajo-core-pullserver/src/main/hadoop-2.3.0/org/apache/tajo/pullserver/listener/FileCloseListener.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-pullserver/src/main/hadoop-2.3.0/org/apache/tajo/pullserver/listener/FileCloseListener.java b/tajo-core/tajo-core-pullserver/src/main/hadoop-2.3.0/org/apache/tajo/pullserver/listener/FileCloseListener.java
new file mode 100644
index 0000000..5b2d1b3
--- /dev/null
+++ b/tajo-core/tajo-core-pullserver/src/main/hadoop-2.3.0/org/apache/tajo/pullserver/listener/FileCloseListener.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.pullserver.listener;
+
+import org.apache.hadoop.mapred.FadvisedFileRegion;
+import org.jboss.netty.channel.ChannelFuture;
+import org.jboss.netty.channel.ChannelFutureListener;
+
+public class FileCloseListener implements ChannelFutureListener {
+
+  private FadvisedFileRegion filePart;
+
+  public FileCloseListener(FadvisedFileRegion filePart) {
+    this.filePart = filePart;
+  }
+
+  // TODO error handling; distinguish IO/connection failures,
+  //      attribute to appropriate spill output
+  @Override
+  public void operationComplete(ChannelFuture future) {
+    if(future.isSuccess()){
+      filePart.transferSuccessful();
+    }
+    filePart.releaseExternalResources();
+  }
+}

http://git-wip-us.apache.org/repos/asf/tajo/blob/fc92ed5c/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/FadvisedChunkedFile.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/FadvisedChunkedFile.java b/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/FadvisedChunkedFile.java
deleted file mode 100644
index ec34f00..0000000
--- a/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/FadvisedChunkedFile.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.tajo.pullserver;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.io.ReadaheadPool;
-import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest;
-import org.apache.hadoop.io.nativeio.NativeIO.POSIX;
-import org.jboss.netty.handler.stream.ChunkedFile;
-
-import java.io.FileDescriptor;
-import java.io.IOException;
-import java.io.RandomAccessFile;
-
-public class FadvisedChunkedFile extends ChunkedFile {
-
-  private static final Log LOG = LogFactory.getLog(FadvisedChunkedFile.class);
-
-  private final boolean manageOsCache;
-  private final int readaheadLength;
-  private final ReadaheadPool readaheadPool;
-  private final FileDescriptor fd;
-  private final String identifier;
-
-  private ReadaheadRequest readaheadRequest;
-
-  public FadvisedChunkedFile(RandomAccessFile file, long position, long count,
-      int chunkSize, boolean manageOsCache, int readaheadLength,
-      ReadaheadPool readaheadPool, String identifier) throws IOException {
-    super(file, position, count, chunkSize);
-    this.manageOsCache = manageOsCache;
-    this.readaheadLength = readaheadLength;
-    this.readaheadPool = readaheadPool;
-    this.fd = file.getFD();
-    this.identifier = identifier;
-  }
-
-  @Override
-  public Object nextChunk() throws Exception {
-    if (manageOsCache && readaheadPool != null) {
-      readaheadRequest = readaheadPool
-          .readaheadStream(identifier, fd, getCurrentOffset(), readaheadLength,
-              getEndOffset(), readaheadRequest);
-    }
-    return super.nextChunk();
-  }
-
-  @Override
-  public void close() throws Exception {
-    if (readaheadRequest != null) {
-      readaheadRequest.cancel();
-    }
-    if (manageOsCache && getEndOffset() - getStartOffset() > 0) {
-      try {
-        POSIX.posixFadviseIfPossible(identifier, fd, getStartOffset(), getEndOffset()
-            - getStartOffset(), POSIX.POSIX_FADV_DONTNEED);
-      } catch (Throwable t) {
-        LOG.warn("Failed to manage OS cache for " + identifier, t);
-      }
-    }
-    super.close();
-  }
-}

http://git-wip-us.apache.org/repos/asf/tajo/blob/fc92ed5c/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/FadvisedFileRegion.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/FadvisedFileRegion.java b/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/FadvisedFileRegion.java
deleted file mode 100644
index 81e0bf1..0000000
--- a/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/FadvisedFileRegion.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.tajo.pullserver;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.io.ReadaheadPool;
-import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest;
-import org.apache.hadoop.io.nativeio.NativeIO.POSIX;
-import org.jboss.netty.channel.DefaultFileRegion;
-
-import java.io.FileDescriptor;
-import java.io.IOException;
-import java.io.RandomAccessFile;
-import java.nio.channels.WritableByteChannel;
-
-public class FadvisedFileRegion extends DefaultFileRegion {
-
-  private static final Log LOG = LogFactory.getLog(FadvisedFileRegion.class);
-
-  private final boolean manageOsCache;
-  private final int readaheadLength;
-  private final ReadaheadPool readaheadPool;
-  private final FileDescriptor fd;
-  private final String identifier;
-
-  private ReadaheadRequest readaheadRequest;
-
-  public FadvisedFileRegion(RandomAccessFile file, long position, long count,
-      boolean manageOsCache, int readaheadLength, ReadaheadPool readaheadPool,
-      String identifier) throws IOException {
-    super(file.getChannel(), position, count);
-    this.manageOsCache = manageOsCache;
-    this.readaheadLength = readaheadLength;
-    this.readaheadPool = readaheadPool;
-    this.fd = file.getFD();
-    this.identifier = identifier;
-  }
-
-  @Override
-  public long transferTo(WritableByteChannel target, long position)
-      throws IOException {
-    if (manageOsCache && readaheadPool != null) {
-      readaheadRequest = readaheadPool.readaheadStream(identifier, fd,
-          getPosition() + position, readaheadLength,
-          getPosition() + getCount(), readaheadRequest);
-    }
-    return super.transferTo(target, position);
-  }
-
-  @Override
-  public void releaseExternalResources() {
-    if (readaheadRequest != null) {
-      readaheadRequest.cancel();
-    }
-    if (manageOsCache && getCount() > 0) {
-      try {
-        POSIX.posixFadviseIfPossible(identifier, fd, getPosition(), getCount(),
-            POSIX.POSIX_FADV_DONTNEED);
-      } catch (Throwable t) {
-        LOG.warn("Failed to manage OS cache for " + identifier, t);
-      }
-    }
-    super.releaseExternalResources();
-  }
-}

http://git-wip-us.apache.org/repos/asf/tajo/blob/fc92ed5c/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/PullServerAuxService.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/PullServerAuxService.java b/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/PullServerAuxService.java
index d098797..afdae23 100644
--- a/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/PullServerAuxService.java
+++ b/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/PullServerAuxService.java
@@ -31,6 +31,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.DataInputByteBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.ReadaheadPool;
+import org.apache.hadoop.mapred.FadvisedChunkedFile;
+import org.apache.hadoop.mapred.FadvisedFileRegion;
 import org.apache.hadoop.metrics2.MetricsSystem;
 import org.apache.hadoop.metrics2.annotation.Metric;
 import org.apache.hadoop.metrics2.annotation.Metrics;
@@ -47,6 +49,7 @@ import org.apache.tajo.QueryId;
 import org.apache.tajo.catalog.Schema;
 import org.apache.tajo.conf.TajoConf;
 import org.apache.tajo.conf.TajoConf.ConfVars;
+import org.apache.tajo.pullserver.listener.FileCloseListener;
 import org.apache.tajo.pullserver.retriever.FileChunk;
 import org.apache.tajo.storage.RowStoreUtil.RowStoreDecoder;
 import org.apache.tajo.storage.Tuple;
@@ -490,14 +493,7 @@ public class PullServerAuxService extends AuxiliaryService {
             file.startOffset, file.length(), manageOsCache, readaheadLength,
             readaheadPool, file.getFile().getAbsolutePath());
         writeFuture = ch.write(partition);
-        writeFuture.addListener(new ChannelFutureListener() {
-          // TODO error handling; distinguish IO/connection failures,
-          //      attribute to appropriate spill output
-          @Override
-          public void operationComplete(ChannelFuture future) {
-            partition.releaseExternalResources();
-          }
-        });
+        writeFuture.addListener(new FileCloseListener(partition));
       } else {
         // HTTPS cannot be done with zero copy.
         final FadvisedChunkedFile chunk = new FadvisedChunkedFile(spill,

http://git-wip-us.apache.org/repos/asf/tajo/blob/fc92ed5c/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/TajoPullServerService.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/TajoPullServerService.java b/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/TajoPullServerService.java
index c1fcef1..c416b66 100644
--- a/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/TajoPullServerService.java
+++ b/tajo-core/tajo-core-pullserver/src/main/java/org/apache/tajo/pullserver/TajoPullServerService.java
@@ -30,6 +30,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.DataInputByteBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.ReadaheadPool;
+import org.apache.hadoop.mapred.FadvisedChunkedFile;
+import org.apache.hadoop.mapred.FadvisedFileRegion;
 import org.apache.hadoop.metrics2.MetricsSystem;
 import org.apache.hadoop.metrics2.annotation.Metric;
 import org.apache.hadoop.metrics2.annotation.Metrics;
@@ -43,6 +45,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.tajo.catalog.Schema;
 import org.apache.tajo.conf.TajoConf;
 import org.apache.tajo.conf.TajoConf.ConfVars;
+import org.apache.tajo.pullserver.listener.FileCloseListener;
 import org.apache.tajo.pullserver.retriever.FileChunk;
 import org.apache.tajo.rpc.RpcChannelFactory;
 import org.apache.tajo.storage.RowStoreUtil.RowStoreDecoder;
@@ -487,14 +490,7 @@ public class TajoPullServerService extends AbstractService {
             file.startOffset, file.length(), manageOsCache, readaheadLength,
             readaheadPool, file.getFile().getAbsolutePath());
         writeFuture = ch.write(filePart);
-        writeFuture.addListener(new ChannelFutureListener() {
-          // TODO error handling; distinguish IO/connection failures,
-          //      attribute to appropriate spill output
-          @Override
-          public void operationComplete(ChannelFuture future) {
-            filePart.releaseExternalResources();
-          }
-        });
+        writeFuture.addListener(new FileCloseListener(filePart));
       } else {
         // HTTPS cannot be done with zero copy.
         final FadvisedChunkedFile chunk = new FadvisedChunkedFile(spill,

http://git-wip-us.apache.org/repos/asf/tajo/blob/fc92ed5c/tajo-dist/src/main/bin/tajo
----------------------------------------------------------------------
diff --git a/tajo-dist/src/main/bin/tajo b/tajo-dist/src/main/bin/tajo
index 6b93203..77d4a02 100755
--- a/tajo-dist/src/main/bin/tajo
+++ b/tajo-dist/src/main/bin/tajo
@@ -183,51 +183,30 @@ HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/etc/hadoop}"
 # Hadoop Version Checking Section End
 ##############################################################################
 
-
 ##############################################################################
-# Find and Set Tajo CLASSPATH
+# Find and Set Hadoop CLASSPATH
 ##############################################################################
 
-# Add java common jars to TAJO_BASE_CLASSPATH
-TAJO_BASE_CLASSPATH="${JAVA_HOME}/lib/tools.jar"
-
-# add user-specified CLASSPATH firstly
-if [ "$TAJO_USER_CLASSPATH_FIRST" != "" ] && [ "$TAJO_CLASSPATH" != "" ] ; then
-  TAJO_BASE_CLASSPATH=${TAJO_BASE_CLASSPATH}:${TAJO_CLASSPATH}
-fi
-
-# for developers, add Tajo classes to TAJO_BASE_CLASSPATH
-if [ -d "$TAJO_HOME/target/classes" ]; then
-  TAJO_BASE_CLASSPATH=${TAJO_BASE_CLASSPATH}:$TAJO_HOME/target/classes
-fi
-if [ -d "$TAJO_HOME/target/test/classes" ]; then
-  TAJO_BASE_CLASSPATH=${TAJO_BASE_CLASSPATH}:$TAJO_HOME/target/test/classes
-fi
-
-# so that filenames w/ spaces are handled correctly in loops below
-IFS=$'\n'
-
-# TAJO_BASE_CLASSPATH contains $TAJO_CONF_DIR before containing jars.
-TAJO_BASE_CLASSPATH="${TAJO_CONF_DIR}"
+# HADOOP JAR DIRS
+HADOOP_MODULE_DIRS="$HADOOP_HOME/share/hadoop/common/lib
+$HADOOP_HOME/share/hadoop/common
+$HADOOP_HOME/share/hadoop/hdfs
+$HADOOP_HOME/share/hadoop/hdfs/lib
+$HADOOP_HOME/share/hadoop/yarn
+$HADOOP_HOME/share/hadoop/mapreduce"
 
-# Tajo Jar Directory
-TAJO_JAR_DIRS="$TAJO_HOME
-${TAJO_HOME}/lib"
+HADOOP_EXT_CLASSPATH="${HADOOP_CONF_DIR}"
 
-for d in $TAJO_JAR_DIRS; do
-  for j in `find $d/*.jar ! -name "*test*"`; do
-    TAJO_BASE_CLASSPATH=$TAJO_BASE_CLASSPATH:$j
-  done
-done
+for d in $HADOOP_MODULE_DIRS; do
+  CLASSPATH=${CLASSPATH}:$d/*
+done;
 
-# add user-specified CLASSPATH last
-if [ "$TAJO_USER_CLASSPATH_FIRST" = "" ] && [ "$TAJO_CLASSPATH" != "" ]; then
-  ${TAJO_BASE_CLASSPATH}=${CLASSPATH}:${TAJO_CLASSPATH}
-fi
+export HADOOP_EXT_CLASSPATH
 
-export TAJO_BASE_CLASSPATH
+# Append $HADOOP_JAR_CLASSPATH to $CLASSPATH
+CLASSPATH="${CLASSPATH}:${HADOOP_EXT_CLASSPATH}"
 
-CLASSPATH="${CLASSPATH}:${TAJO_BASE_CLASSPATH}"
+HDFS_LIBRARY_PATH="${HADOOP_HOME}/lib/native/"
 
 ##############################################################################
 # Find and Set Hive CLASSPATH
@@ -247,23 +226,23 @@ if [ -d ${HIVE_LIB} ]; then
     CLASSPATH=${CLASSPATH}:$f;
   done
 
-  for f in ${HIVE_LIB}/libfb303-*.jar; do
+  for f in ${HIVE_LIB}/libthrift-*.jar; do
     CLASSPATH=${CLASSPATH}:$f;
   done
 
-  for f in ${HIVE_LIB}/jdo-api-*.jar; do
-    CLASSPATH=${CLASSPATH}:$f;
+  for f in ${HIVE_LIB}/libfb*.jar; do
+      CLASSPATH=${CLASSPATH}:$f;
   done
 
-  for f in ${HIVE_LIB}/datanucleus-*.jar; do
+  for f in ${HIVE_LIB}/jdo*-api-*.jar; do
     CLASSPATH=${CLASSPATH}:$f;
   done
 
-  for f in $HIVE_HOME/hcatalog/share/hcatalog/hcatalog-core-*.jar; do
+  for f in ${HIVE_LIB}/datanucleus-*.jar; do
     CLASSPATH=${CLASSPATH}:$f;
   done
 
-  for f in $HADOOP_HOME/share/hadoop/mapreduce/hadoop-mapreduce-client-core-*.jar; do
+  for f in $HIVE_HOME/hcatalog/share/hcatalog/hcatalog-core-*.jar; do
     CLASSPATH=${CLASSPATH}:$f;
   done
 fi
@@ -273,29 +252,50 @@ if [ "${HIVE_JDBC_DRIVER_DIR}" != "" ]; then
 fi
 
 ##############################################################################
-# Find and Set Hadoop CLASSPATH
+# Find and Set Tajo CLASSPATH
 ##############################################################################
 
-# HADOOP JAR DIRS
-HADOOP_MODULE_DIRS="$HADOOP_HOME/share/hadoop/common/lib
-$HADOOP_HOME/share/hadoop/common
-$HADOOP_HOME/share/hadoop/hdfs
-$HADOOP_HOME/share/hadoop/hdfs/lib
-$HADOOP_HOME/share/hadoop/yarn/lib
-$HADOOP_HOME/share/hadoop/yarn"
+# Add java common jars to TAJO_BASE_CLASSPATH
+TAJO_BASE_CLASSPATH="${JAVA_HOME}/lib/tools.jar"
 
-HADOOP_EXT_CLASSPATH="${HADOOP_CONF_DIR}"
+# add user-specified CLASSPATH firstly
+if [ "$TAJO_USER_CLASSPATH_FIRST" != "" ] && [ "$TAJO_CLASSPATH" != "" ] ; then
+  TAJO_BASE_CLASSPATH=${TAJO_BASE_CLASSPATH}:${TAJO_CLASSPATH}
+fi
 
-for d in $HADOOP_MODULE_DIRS; do
-  CLASSPATH=${CLASSPATH}:$d/*
-done;
+# for developers, add Tajo classes to TAJO_BASE_CLASSPATH
+if [ -d "$TAJO_HOME/target/classes" ]; then
+  TAJO_BASE_CLASSPATH=${TAJO_BASE_CLASSPATH}:$TAJO_HOME/target/classes
+fi
+if [ -d "$TAJO_HOME/target/test/classes" ]; then
+  TAJO_BASE_CLASSPATH=${TAJO_BASE_CLASSPATH}:$TAJO_HOME/target/test/classes
+fi
 
-export HADOOP_EXT_CLASSPATH
+# so that filenames w/ spaces are handled correctly in loops below
+IFS=$'\n'
 
-# Append $HADOOP_JAR_CLASSPATH to $CLASSPATH
-CLASSPATH="${CLASSPATH}:${HADOOP_EXT_CLASSPATH}"
+# TAJO_BASE_CLASSPATH contains $TAJO_CONF_DIR before containing jars.
+TAJO_BASE_CLASSPATH="${TAJO_CONF_DIR}"
+
+# Tajo Jar Directory
+TAJO_JAR_DIRS="$TAJO_HOME
+${TAJO_HOME}/lib"
+
+for d in $TAJO_JAR_DIRS; do
+  for j in `find $d/*.jar ! -name "*test*"`; do
+    TAJO_BASE_CLASSPATH=$TAJO_BASE_CLASSPATH:$j
+  done
+done
+
+# add user-specified CLASSPATH last
+if [ "$TAJO_USER_CLASSPATH_FIRST" = "" ] && [ "$TAJO_CLASSPATH" != "" ]; then
+  ${TAJO_BASE_CLASSPATH}=${CLASSPATH}:${TAJO_CLASSPATH}
+fi
+
+export TAJO_BASE_CLASSPATH
+
+CLASSPATH="${CLASSPATH}:${TAJO_BASE_CLASSPATH}"
 
-HDFS_LIBRARY_PATH="${HADOOP_HOME}/lib/native/"
 ##############################################################################
 # Hadoop Home Configuration End
 ##############################################################################

http://git-wip-us.apache.org/repos/asf/tajo/blob/fc92ed5c/tajo-docs/src/main/sphinx/configuration/cluster_setup.rst
----------------------------------------------------------------------
diff --git a/tajo-docs/src/main/sphinx/configuration/cluster_setup.rst b/tajo-docs/src/main/sphinx/configuration/cluster_setup.rst
index 89db12c..c95ee8a 100644
--- a/tajo-docs/src/main/sphinx/configuration/cluster_setup.rst
+++ b/tajo-docs/src/main/sphinx/configuration/cluster_setup.rst
@@ -28,7 +28,7 @@ Please add the following configs to tajo-site.xml file:
   </property>
 
   <property>
-    <name>tajo.master.client-rpc.addres</name>
+    <name>tajo.master.client-rpc.address</name>
     <value>hostname:26002</value>
   </property>
 

http://git-wip-us.apache.org/repos/asf/tajo/blob/fc92ed5c/tajo-docs/src/main/sphinx/getting_started/prerequisites.rst
----------------------------------------------------------------------
diff --git a/tajo-docs/src/main/sphinx/getting_started/prerequisites.rst b/tajo-docs/src/main/sphinx/getting_started/prerequisites.rst
index abfd04e..d4e20da 100644
--- a/tajo-docs/src/main/sphinx/getting_started/prerequisites.rst
+++ b/tajo-docs/src/main/sphinx/getting_started/prerequisites.rst
@@ -2,6 +2,6 @@
 Prerequisites
 **********************
 
- * Hadoop 2.2.0 and higher
+ * Hadoop 2.2.0 or higher
  * Java 1.6 or higher
  * Protocol buffer 2.5.0
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/tajo/blob/fc92ed5c/tajo-docs/src/main/sphinx/jdbc_driver.rst
----------------------------------------------------------------------
diff --git a/tajo-docs/src/main/sphinx/jdbc_driver.rst b/tajo-docs/src/main/sphinx/jdbc_driver.rst
index 306cdc1..515e509 100644
--- a/tajo-docs/src/main/sphinx/jdbc_driver.rst
+++ b/tajo-docs/src/main/sphinx/jdbc_driver.rst
@@ -117,7 +117,7 @@ In addition to the following JAR files, please don't forgot including
   * commons-logging-1.1.1.jar
   * guava-11.0.2.jar
   * protobuf-java-2.5.0.jar
-  * netty-3.6.2.Final.jar
+  * netty-3.6.6.Final.jar
   * commons-lang-2.5.jar
   * commons-configuration-1.6.jar
   * slf4j-api-1.7.5.jar

http://git-wip-us.apache.org/repos/asf/tajo/blob/fc92ed5c/tajo-project/pom.xml
----------------------------------------------------------------------
diff --git a/tajo-project/pom.xml b/tajo-project/pom.xml
index cae8602..aade02e 100644
--- a/tajo-project/pom.xml
+++ b/tajo-project/pom.xml
@@ -35,9 +35,9 @@
   <properties>
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
     <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
-    <tajo.version>0.8.0-SNAPSHOT</tajo.version>
-    <hadoop.version>2.2.0</hadoop.version>
+    <hadoop.version>2.3.0</hadoop.version>
     <protobuf.version>2.5.0</protobuf.version>
+    <tajo.version>0.8.0-SNAPSHOT</tajo.version>
     <tajo.root>${project.parent.relativePath}/..</tajo.root>
   </properties>
 
@@ -575,6 +575,38 @@
         <artifactId>maven-surefire-report-plugin</artifactId>
         <version>2.15</version>
       </plugin>
+
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>build-helper-maven-plugin</artifactId>
+        <version>1.8</version>
+        <executions>
+          <execution>
+            <id>add-source</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>add-source</goal>
+            </goals>
+            <configuration>
+              <sources>
+                <source>src/main/hadoop-${hadoop.version}</source>
+              </sources>
+            </configuration>
+          </execution>
+          <execution>
+            <id>add-source-2.3</id>
+            <phase>prepare-package</phase>
+            <goals>
+              <goal>add-source</goal>
+            </goals>
+            <configuration>
+              <sources>
+                <source>src/main/hadoop-${hadoop.version}</source>
+              </sources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
     </plugins>
   </build>
 
@@ -747,6 +779,21 @@
         <type>test-jar</type>
         <scope>test</scope>
       </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-mapreduce-client-shuffle</artifactId>
+        <version>${hadoop.version}</version>
+        <exclusions>
+          <exclusion>
+            <artifactId>hadoop-mapreduce-client-core</artifactId>
+            <groupId>org.apache.hadoop</groupId>
+          </exclusion>
+          <exclusion>
+            <artifactId>hadoop-yarn-server-nodemanager</artifactId>
+            <groupId>org.apache.hadoop</groupId>
+          </exclusion>
+        </exclusions>
+      </dependency>
 
       <dependency>
         <groupId>com.google.protobuf</groupId>