You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tajo.apache.org by hy...@apache.org on 2013/07/02 16:15:59 UTC

[05/51] [partial] TAJO-22: The package prefix should be org.apache.tajo. (DaeMyung Kang via hyunsik)

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/bc6359b8/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/TajoTestingCluster.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/TajoTestingCluster.java b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/TajoTestingCluster.java
new file mode 100644
index 0000000..9f620e8
--- /dev/null
+++ b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/TajoTestingCluster.java
@@ -0,0 +1,570 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo;
+
+import com.google.common.base.Charsets;
+import com.google.common.io.Closeables;
+import com.google.common.io.Files;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.LocalFileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.tajo.catalog.*;
+import org.apache.tajo.catalog.proto.CatalogProtos;
+import org.apache.tajo.client.TajoClient;
+import org.apache.tajo.conf.TajoConf;
+import org.apache.tajo.conf.TajoConf.ConfVars;
+import org.apache.tajo.master.TajoMaster;
+import org.apache.tajo.util.NetUtils;
+
+import java.io.*;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.URL;
+import java.sql.ResultSet;
+import java.util.UUID;
+
+public class TajoTestingCluster {
+	private static Log LOG = LogFactory.getLog(TajoTestingCluster.class);
+	private TajoConf conf;
+
+  protected MiniTajoYarnCluster yarnCluster;
+  private FileSystem defaultFS;
+  private MiniDFSCluster dfsCluster;
+	private MiniCatalogServer catalogServer;
+
+
+  private TajoMaster tajoMaster;
+
+	// If non-null, then already a cluster running.
+	private File clusterTestBuildDir = null;
+
+	/**
+	 * System property key to get test directory value.
+	 * Name is as it is because mini dfs has hard-codings to put test data here.
+	 */
+	public static final String TEST_DIRECTORY_KEY =
+      MiniDFSCluster.PROP_TEST_BUILD_DATA;
+
+	/**
+	 * Default parent directory for test output.
+	 */
+	public static final String DEFAULT_TEST_DIRECTORY = "target/test-data";
+
+	public TajoTestingCluster() {
+		this.conf = new TajoConf();
+	}
+
+	public TajoConf getConfiguration() {
+		return this.conf;
+	}
+
+	public void initTestDir() {
+		if (System.getProperty(TEST_DIRECTORY_KEY) == null) {
+			clusterTestBuildDir = setupClusterTestBuildDir();
+			System.setProperty(TEST_DIRECTORY_KEY,
+          clusterTestBuildDir.getAbsolutePath());
+		}
+	}
+
+	/**
+	 * @return Where to write test data on local filesystem; usually
+	 * {@link #DEFAULT_TEST_DIRECTORY}
+	 * @see #setupClusterTestBuildDir()
+	 */
+	public static File getTestDir() {
+		return new File(System.getProperty(TEST_DIRECTORY_KEY,
+			DEFAULT_TEST_DIRECTORY));
+	}
+
+	/**
+	 * @param subdirName
+	 * @return Path to a subdirectory named <code>subdirName</code> under
+	 * {@link #getTestDir()}.
+	 * @see #setupClusterTestBuildDir()
+	 */
+	public static File getTestDir(final String subdirName) {
+		return new File(getTestDir(), subdirName);
+  }
+
+	public File setupClusterTestBuildDir() {
+		String randomStr = UUID.randomUUID().toString();
+		String dirStr = getTestDir(randomStr).toString();
+		File dir = new File(dirStr).getAbsoluteFile();
+		// Have it cleaned up on exit
+		dir.deleteOnExit();
+		return dir;
+	}
+
+  ////////////////////////////////////////////////////////
+  // HDFS Section
+  ////////////////////////////////////////////////////////
+  /**
+   * Start a minidfscluster.
+   * @param servers How many DNs to start.
+   * @throws Exception
+   * @see {@link #shutdownMiniDFSCluster()}
+   * @return The mini dfs cluster created.
+   */
+  public MiniDFSCluster startMiniDFSCluster(int servers) throws Exception {
+    return startMiniDFSCluster(servers, null, null);
+  }
+
+  /**
+   * Start a minidfscluster.
+   * Can only create one.
+   * @param servers How many DNs to start.
+   * @param dir Where to home your dfs cluster.
+   * @param hosts hostnames DNs to run on.
+   * @throws Exception
+   * @see {@link #shutdownMiniDFSCluster()}
+   * @return The mini dfs cluster created.
+   * @throws java.io.IOException
+   */
+  public MiniDFSCluster startMiniDFSCluster(int servers,
+                                            final File dir,
+                                            final String hosts[])
+      throws IOException {
+    if (dir == null) {
+      this.clusterTestBuildDir = setupClusterTestBuildDir();
+    } else {
+      this.clusterTestBuildDir = dir;
+    }
+
+    System.setProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA,
+        this.clusterTestBuildDir.toString());
+
+    MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
+    builder.hosts(hosts);
+    builder.numDataNodes(servers);
+    builder.format(true);
+    builder.manageNameDfsDirs(true);
+    builder.manageDataDfsDirs(true);
+    this.dfsCluster = builder.build();
+
+    // Set this just-started cluser as our filesystem.
+    this.defaultFS = this.dfsCluster.getFileSystem();
+    this.conf.set("fs.defaultFS", defaultFS.getUri().toString());
+    // Do old style too just to be safe.
+    this.conf.set("fs.default.name", defaultFS.getUri().toString());
+
+    return this.dfsCluster;
+  }
+
+  public void shutdownMiniDFSCluster() throws Exception {
+    if (this.dfsCluster != null) {
+      // The below throws an exception per dn, AsynchronousCloseException.
+      this.dfsCluster.shutdown();
+    }
+  }
+
+  public boolean isRunningDFSCluster() {
+    return this.defaultFS != null;
+  }
+
+  public MiniDFSCluster getMiniDFSCluster() {
+    return this.dfsCluster;
+  }
+
+  public FileSystem getDefaultFileSystem() {
+    return this.defaultFS;
+  }
+
+  ////////////////////////////////////////////////////////
+  // Catalog Section
+  ////////////////////////////////////////////////////////
+  public MiniCatalogServer startCatalogCluster() throws Exception {
+    TajoConf c = getConfiguration();
+
+    if(clusterTestBuildDir == null) {
+      clusterTestBuildDir = setupClusterTestBuildDir();
+    }
+
+    conf.set(CatalogConstants.STORE_CLASS, "org.apache.tajo.catalog.store.MemStore");
+    conf.set(CatalogConstants.JDBC_URI, "jdbc:derby:target/test-data/tcat/db");
+    LOG.info("Apache Derby repository is set to "+conf.get(CatalogConstants.JDBC_URI));
+    conf.setVar(ConfVars.CATALOG_ADDRESS, "localhost:0");
+
+    catalogServer = new MiniCatalogServer(conf);
+    CatalogServer catServer = catalogServer.getCatalogServer();
+    InetSocketAddress sockAddr = catServer.getBindAddress();
+    c.setVar(ConfVars.CATALOG_ADDRESS, NetUtils.getIpPortString(sockAddr));
+
+    return this.catalogServer;
+  }
+
+  public void shutdownCatalogCluster() {
+    this.catalogServer.shutdown();
+  }
+
+  public MiniCatalogServer getMiniCatalogCluster() {
+    return this.catalogServer;
+  }
+
+  ////////////////////////////////////////////////////////
+  // Tajo Cluster Section
+  ////////////////////////////////////////////////////////
+  private void startMiniTajoCluster(File testBuildDir,
+                                               final int numSlaves,
+                                               boolean local) throws Exception {
+    TajoConf c = getConfiguration();
+    c.setVar(ConfVars.TASKRUNNER_LISTENER_ADDRESS, "localhost:0");
+    c.setVar(ConfVars.CLIENT_SERVICE_ADDRESS, "localhost:0");
+    c.setVar(ConfVars.CATALOG_ADDRESS, "localhost:0");
+    c.set(CatalogConstants.STORE_CLASS, "org.apache.tajo.catalog.store.MemStore");
+    c.set(CatalogConstants.JDBC_URI, "jdbc:derby:target/test-data/tcat/db");
+    LOG.info("derby repository is set to "+conf.get(CatalogConstants.JDBC_URI));
+
+    if (!local) {
+      c.setVar(ConfVars.ROOT_DIR,
+          getMiniDFSCluster().getFileSystem().getUri() + "/tajo");
+    } else {
+      c.setVar(ConfVars.ROOT_DIR,
+          clusterTestBuildDir.getAbsolutePath() + "/tajo");
+    }
+
+    tajoMaster = new TajoMaster();
+    tajoMaster.init(c);
+    tajoMaster.start();
+
+    this.conf.setVar(ConfVars.TASKRUNNER_LISTENER_ADDRESS, c.getVar(ConfVars.TASKRUNNER_LISTENER_ADDRESS));
+    this.conf.setVar(ConfVars.CLIENT_SERVICE_ADDRESS, c.getVar(ConfVars.CLIENT_SERVICE_ADDRESS));
+    this.conf.setVar(ConfVars.CATALOG_ADDRESS, c.getVar(ConfVars.CATALOG_ADDRESS));
+
+    LOG.info("Mini Tajo cluster is up");
+  }
+
+  public void restartTajoCluster(int numSlaves) throws Exception {
+    tajoMaster.stop();
+    tajoMaster.start();
+
+    LOG.info("Minicluster has been restarted");
+  }
+
+  public TajoMaster getMaster() {
+    return this.tajoMaster;
+  }
+
+  public void shutdownMiniTajoCluster() {
+    if(this.tajoMaster != null) {
+      this.tajoMaster.stop();
+    }
+    this.tajoMaster= null;
+  }
+
+  ////////////////////////////////////////////////////////
+  // Meta Cluster Section
+  ////////////////////////////////////////////////////////
+  /**
+   * @throws java.io.IOException If a cluster -- dfs or engine -- already running.
+   */
+  void isRunningCluster(String passedBuildPath) throws IOException {
+    if (this.clusterTestBuildDir == null || passedBuildPath != null) return;
+    throw new IOException("Cluster already running at " +
+        this.clusterTestBuildDir);
+  }
+
+  /**
+   * This method starts up a tajo cluster with a given number of clusters in
+   * distributed mode.
+   *
+   * @param numSlaves the number of tajo cluster to start up
+   * @throws Exception
+   */
+  public void startMiniCluster(final int numSlaves)
+      throws Exception {
+    String localHostName = InetAddress.getLocalHost().getHostName();
+    startMiniCluster(numSlaves, new String[] {localHostName});
+  }
+
+  public void startMiniCluster(final int numSlaves,
+                                          final String [] dataNodeHosts) throws Exception {
+    // the conf is set to the distributed mode.
+    this.conf.setBoolVar(ConfVars.CLUSTER_DISTRIBUTED, true);
+
+    int numDataNodes = numSlaves;
+    if(dataNodeHosts != null && dataNodeHosts.length != 0) {
+      numDataNodes = dataNodeHosts.length;
+    }
+
+    LOG.info("Starting up minicluster with 1 master(s) and " +
+        numSlaves + " worker(s) and " + numDataNodes + " datanode(s)");
+
+    // If we already put up a cluster, fail.
+    String testBuildPath = conf.get(TEST_DIRECTORY_KEY, null);
+    isRunningCluster(testBuildPath);
+    if (testBuildPath != null) {
+      LOG.info("Using passed path: " + testBuildPath);
+    }
+
+    // Make a new random dir to home everything in.  Set it as system property.
+    // minidfs reads home from system property.
+    this.clusterTestBuildDir = testBuildPath == null?
+        setupClusterTestBuildDir() : new File(testBuildPath);
+
+    System.setProperty(TEST_DIRECTORY_KEY,
+        this.clusterTestBuildDir.getAbsolutePath());
+
+    startMiniDFSCluster(numDataNodes, this.clusterTestBuildDir, dataNodeHosts);
+    this.dfsCluster.waitClusterUp();
+
+    LOG.info("Starting up YARN cluster");
+    // Scheduler properties required for YARN to work
+    conf.set("yarn.scheduler.capacity.root.queues", "default");
+    conf.set("yarn.scheduler.capacity.root.default.capacity", "100");
+
+    conf.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 384);
+    conf.setInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB, 3000);
+    conf.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES, 1);
+    conf.setInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_CORES, 2);
+
+    if (yarnCluster == null) {
+      yarnCluster = new MiniTajoYarnCluster(TajoTestingCluster.class.getName(), 3);
+      yarnCluster.init(conf);
+      yarnCluster.start();
+
+      conf.set(YarnConfiguration.RM_ADDRESS,
+          NetUtils.getIpPortString(yarnCluster.getResourceManager().
+              getClientRMService().getBindAddress()));
+      conf.set(YarnConfiguration.RM_SCHEDULER_ADDRESS,
+          NetUtils.getIpPortString(yarnCluster.getResourceManager().
+              getApplicationMasterService().getBindAddress()));
+
+      URL url = Thread.currentThread().getContextClassLoader().getResource("yarn-site.xml");
+      if (url == null) {
+        throw new RuntimeException("Could not find 'yarn-site.xml' dummy file in classpath");
+      }
+      yarnCluster.getConfig().set("yarn.application.classpath", new File(url.getPath()).getParent());
+      OutputStream os = new FileOutputStream(new File(url.getPath()));
+      yarnCluster.getConfig().writeXml(os);
+      os.close();
+    }
+
+    startMiniTajoCluster(this.clusterTestBuildDir, numSlaves, false);
+  }
+
+  public void startMiniClusterInLocal(final int numSlaves) throws Exception {
+    // the conf is set to the distributed mode.
+    this.conf.setBoolVar(ConfVars.CLUSTER_DISTRIBUTED, true);
+
+    // If we already put up a cluster, fail.
+    String testBuildPath = conf.get(TEST_DIRECTORY_KEY, null);
+    isRunningCluster(testBuildPath);
+    if (testBuildPath != null) {
+      LOG.info("Using passed path: " + testBuildPath);
+    }
+
+    // Make a new random dir to home everything in.  Set it as system property.
+    // minidfs reads home from system property.
+    this.clusterTestBuildDir = testBuildPath == null?
+        setupClusterTestBuildDir() : new File(testBuildPath);
+
+    System.setProperty(TEST_DIRECTORY_KEY,
+        this.clusterTestBuildDir.getAbsolutePath());
+
+    startMiniTajoCluster(this.clusterTestBuildDir, numSlaves, true);
+  }
+
+  public void shutdownMiniCluster() throws IOException {
+    LOG.info("Shutting down minicluster");
+    shutdownMiniTajoCluster();
+
+    if(this.catalogServer != null) {
+      shutdownCatalogCluster();
+    }
+
+    if(this.dfsCluster != null) {
+      this.dfsCluster.shutdown();
+    }
+
+    if(this.clusterTestBuildDir != null && this.clusterTestBuildDir.exists()) {
+      LocalFileSystem localFS = LocalFileSystem.getLocal(conf);
+      localFS.delete(
+          new Path(clusterTestBuildDir.toString()), true);
+      this.clusterTestBuildDir = null;
+    }
+
+    LOG.info("Minicluster is down");
+  }
+
+  public static ResultSet runInLocal(String[] tableNames,
+                                     Schema[] schemas,
+                                     Options option,
+                                     String[][] tables,
+                                     String query) throws Exception {
+    TajoTestingCluster util = new TajoTestingCluster();
+    util.startMiniClusterInLocal(1);
+    TajoConf conf = util.getConfiguration();
+    TajoClient client = new TajoClient(conf);
+
+    File tmpDir = util.setupClusterTestBuildDir();
+    for (int i = 0; i < tableNames.length; i++) {
+      File tableDir = new File(tmpDir,tableNames[i]);
+      tableDir.mkdirs();
+      File dataDir = new File(tableDir, "data");
+      dataDir.mkdirs();
+      File tableFile = new File(dataDir, tableNames[i]);
+      writeLines(tableFile, tables[i]);
+      TableMeta meta = CatalogUtil
+          .newTableMeta(schemas[i], CatalogProtos.StoreType.CSV, option);
+      client.createTable(tableNames[i], new Path(tableDir.getAbsolutePath()), meta);
+    }
+    Thread.sleep(1000);
+    ResultSet res = client.executeQueryAndGetResult(query);
+    util.shutdownMiniCluster();
+    return res;
+  }
+
+  public static ResultSet run(String[] names,
+                              String[] tablepaths,
+                              Schema[] schemas,
+                              Options option,
+                              String query) throws Exception {
+    TajoTestingCluster util = new TajoTestingCluster();
+    util.startMiniCluster(1);
+    TajoConf conf = util.getConfiguration();
+    TajoClient client = new TajoClient(conf);
+
+    FileSystem fs = util.getDefaultFileSystem();
+    Path rootDir = util.getMaster().
+        getStorageManager().getBaseDir();
+    fs.mkdirs(rootDir);
+    for (int i = 0; i < tablepaths.length; i++) {
+      Path localPath = new Path(tablepaths[i]);
+      Path tablePath = new Path(rootDir, names[i]);
+      fs.mkdirs(tablePath);
+      Path dataPath = new Path(tablePath, "data");
+      fs.mkdirs(dataPath);
+      Path dfsPath = new Path(dataPath, localPath.getName());
+      fs.copyFromLocalFile(localPath, dfsPath);
+      TableMeta meta = CatalogUtil.newTableMeta(schemas[i],
+          CatalogProtos.StoreType.CSV, option);
+      client.createTable(names[i], tablePath, meta);
+    }
+    Thread.sleep(1000);
+    ResultSet res = client.executeQueryAndGetResult(query);
+    util.shutdownMiniCluster();
+    return res;
+  }
+
+  public static ResultSet run(String[] names,
+                              Schema[] schemas,
+                              Options option,
+                              String[][] tables,
+                              String query) throws Exception {
+    TpchTestBase instance = TpchTestBase.getInstance();
+    TajoTestingCluster util = instance.getTestingCluster();
+    TajoConf conf = util.getConfiguration();
+    TajoClient client = new TajoClient(conf);
+
+    FileSystem fs = util.getDefaultFileSystem();
+    Path rootDir = util.getMaster().
+        getStorageManager().getBaseDir();
+    fs.mkdirs(rootDir);
+    for (int i = 0; i < names.length; i++) {
+      Path tablePath = new Path(rootDir, names[i]);
+      fs.mkdirs(tablePath);
+      Path dataPath = new Path(tablePath, "data");
+      fs.mkdirs(dataPath);
+      Path dfsPath = new Path(dataPath, names[i] + ".tbl");
+      FSDataOutputStream out = fs.create(dfsPath);
+      for (int j = 0; j < tables[i].length; j++) {
+        out.write((tables[i][j]+"\n").getBytes());
+      }
+      out.close();
+      TableMeta meta = CatalogUtil.newTableMeta(schemas[i],
+          CatalogProtos.StoreType.CSV, option);
+      client.createTable(names[i], tablePath, meta);
+    }
+    Thread.sleep(1000);
+    ResultSet res = client.executeQueryAndGetResult(query);
+    return res;
+  }
+
+    /**
+    * Write lines to a file.
+    *
+    * @param file File to write lines to
+    * @param lines Strings written to the file
+    * @throws java.io.IOException
+    */
+  private static void writeLines(File file, String... lines)
+      throws IOException {
+    Writer writer = Files.newWriter(file, Charsets.UTF_8);
+    try {
+      for (String line : lines) {
+        writer.write(line);
+        writer.write('\n');
+      }
+    } finally {
+      Closeables.closeQuietly(writer);
+    }
+  }
+
+
+	/**
+	 * @param args
+	 * @throws Exception
+	 */
+	public static void main(String[] args) throws Exception {
+		TajoTestingCluster cluster = new TajoTestingCluster();
+    File f = cluster.setupClusterTestBuildDir();
+    System.out.println("first setupClusterTestBuildDir: " + f);
+    f = cluster.setupClusterTestBuildDir();
+    System.out.println("second setupClusterTestBuildDir: " + f);
+    f = cluster.getTestDir();
+    System.out.println("getTestDir() after second: " + f);
+    f = cluster.getTestDir("abc");
+    System.out.println("getTestDir(\"abc\") after second: " + f);
+
+    cluster.initTestDir();
+    f = cluster.getTestDir();
+    System.out.println("getTestDir() after initTestDir: " + f);
+    f = cluster.getTestDir("abc");
+    System.out.println("getTestDir(\"abc\") after initTestDir: " + f);
+    f = cluster.setupClusterTestBuildDir();
+    System.out.println("setupClusterTestBuildDir() after initTestDir: " + f);
+
+    TajoTestingCluster cluster2 = new TajoTestingCluster();
+    File f2 = cluster2.setupClusterTestBuildDir();
+    System.out.println("first setupClusterTestBuildDir of cluster2: " + f2);
+    /*
+    String [] names = {"table1"};
+    String [][] tables = new String[1][];
+    tables[0] = new String[] {"a,b,c", "b,c,d"};
+
+    Schema [] schemas = new Schema[1];
+    schemas[0] = new Schema()
+          .addColumn("f1", CatalogProtos.DataType.STRING)
+          .addColumn("f2", CatalogProtos.DataType.STRING)
+          .addColumn("f3", CatalogProtos.DataType.STRING);
+
+    ResultSet res = runInLocal(names, schemas, tables, "select f1 from table1");
+    res.next();
+    System.out.println(res.getString(0));
+    res.next();
+    System.out.println(res.getString(0));
+    System.exit(0);
+    */
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/bc6359b8/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/TestQueryIdFactory.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/TestQueryIdFactory.java b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/TestQueryIdFactory.java
new file mode 100644
index 0000000..b4d920f
--- /dev/null
+++ b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/TestQueryIdFactory.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertTrue;
+
+public class TestQueryIdFactory {
+  
+  @Before
+  public void setup() {
+    QueryIdFactory.reset();
+  }
+
+  @Test
+  public void testNewQueryId() {
+    QueryId qid1 = QueryIdFactory.newQueryId();
+    QueryId qid2 = QueryIdFactory.newQueryId();
+    assertTrue(qid1.compareTo(qid2) < 0);
+  }
+  
+  @Test
+  public void testNewSubQueryId() {
+    QueryId qid = QueryIdFactory.newQueryId();
+    SubQueryId subqid1 = QueryIdFactory.newSubQueryId(qid);
+    SubQueryId subqid2 = QueryIdFactory.newSubQueryId(qid);
+    assertTrue(subqid1.compareTo(subqid2) < 0);
+  }
+  
+  @Test
+  public void testNewQueryUnitId() {
+    QueryId qid = QueryIdFactory.newQueryId();
+    SubQueryId subid = QueryIdFactory.newSubQueryId(qid);
+    QueryUnitId quid1 = QueryIdFactory.newQueryUnitId(subid);
+    QueryUnitId quid2 = QueryIdFactory.newQueryUnitId(subid);
+    assertTrue(quid1.compareTo(quid2) < 0);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/bc6359b8/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/TestQueryUnitId.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/TestQueryUnitId.java b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/TestQueryUnitId.java
new file mode 100644
index 0000000..837fbba
--- /dev/null
+++ b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/TestQueryUnitId.java
@@ -0,0 +1,167 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo;
+
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.util.BuilderUtils;
+import org.junit.Test;
+import org.apache.tajo.util.TajoIdUtils;
+
+import static org.junit.Assert.*;
+
+public class TestQueryUnitId {
+  @Test
+  public void testQueryId() {
+    long ts1 = 1315890136000l;
+    long ts2 = 1315890136001l;
+
+    QueryId j1 = createQueryId(ts1, 2, 1);
+    QueryId j2 = createQueryId(ts1, 1, 2);
+    QueryId j3 = createQueryId(ts2, 1, 2);
+    QueryId j4 = createQueryId(ts1, 2, 1);
+
+    assertTrue(j1.equals(j4));
+    assertFalse(j1.equals(j2));
+    assertFalse(j1.equals(j3));
+
+    assertTrue(j1.compareTo(j4) == 0);
+    assertTrue(j1.compareTo(j2) > 0);
+    assertTrue(j1.compareTo(j3) < 0);
+
+    assertTrue(j1.hashCode() == j4.hashCode());
+    assertFalse(j1.hashCode() == j2.hashCode());
+    assertFalse(j1.hashCode() == j3.hashCode());
+
+    QueryId j5 = createQueryId(ts1, 231415, 2);
+    assertEquals("q_" + ts1 + "_0002_000001", j1.toString());
+    assertEquals("q_" + ts1 + "_231415_000002", j5.toString());
+  }
+
+  @Test
+  public void testQueryIds() {
+    long timeId = 1315890136000l;
+    
+    QueryId queryId = createQueryId(timeId, 1, 1);
+    assertEquals("q_" + timeId + "_0001_000001", queryId.toString());
+    
+    SubQueryId subId = TajoIdUtils.newSubQueryId(queryId, 2);
+    assertEquals("sq_" + timeId +"_0001_000001_02", subId.toString());
+    
+    QueryUnitId qId = new QueryUnitId(subId, 5);
+    assertEquals("t_" + timeId + "_0001_000001_02_000005", qId.toString());
+
+    QueryUnitAttemptId attemptId = new QueryUnitAttemptId(qId, 4);
+    assertEquals("ta_" + timeId + "_0001_000001_02_000005_04", attemptId.toString());
+  }
+
+  @Test
+  public void testEqualsObject() {
+    long timeId = System.currentTimeMillis();
+    
+    QueryId queryId1 = createQueryId(timeId, 1, 1);
+    QueryId queryId2 = createQueryId(timeId, 2, 2);
+    assertNotSame(queryId1, queryId2);    
+    QueryId queryId3 = createQueryId(timeId, 1, 1);
+    assertEquals(queryId1, queryId3);
+    
+    SubQueryId sid1 = TajoIdUtils.newSubQueryId(queryId1, 1);
+    SubQueryId sid2 = TajoIdUtils.newSubQueryId(queryId1, 2);    
+    assertNotSame(sid1, sid2);
+    SubQueryId sid3 = TajoIdUtils.newSubQueryId(queryId1, 1);
+    assertEquals(sid1, sid3);
+    
+    QueryUnitId qid1 = new QueryUnitId(sid1, 9);
+    QueryUnitId qid2 = new QueryUnitId(sid1, 10);
+    assertNotSame(qid1, qid2);
+    QueryUnitId qid3 = new QueryUnitId(sid1, 9);
+    assertEquals(qid1, qid3);
+  }
+
+  @Test
+  public void testCompareTo() {
+    long time = System.currentTimeMillis();
+    
+    QueryId queryId1 = createQueryId(time, 1, 1);
+    QueryId queryId2 = createQueryId(time, 2, 2);
+    QueryId queryId3 = createQueryId(time, 1, 1);
+    assertEquals(-1, queryId1.compareTo(queryId2));
+    assertEquals(1, queryId2.compareTo(queryId1));
+    assertEquals(0, queryId3.compareTo(queryId1));
+    
+    SubQueryId sid1 = TajoIdUtils.newSubQueryId(queryId1, 1);
+    SubQueryId sid2 = TajoIdUtils.newSubQueryId(queryId1, 2);    
+    SubQueryId sid3 = TajoIdUtils.newSubQueryId(queryId1, 1);
+    assertEquals(-1, sid1.compareTo(sid2));
+    assertEquals(1, sid2.compareTo(sid1));
+    assertEquals(0, sid3.compareTo(sid1));
+    
+    QueryUnitId qid1 = new QueryUnitId(sid1, 9);
+    QueryUnitId qid2 = new QueryUnitId(sid1, 10);
+    QueryUnitId qid3 = new QueryUnitId(sid1, 9);
+    assertEquals(-1, qid1.compareTo(qid2));
+    assertEquals(1, qid2.compareTo(qid1));
+    assertEquals(0, qid3.compareTo(qid1));
+  }
+  
+  @Test
+  public void testConstructFromString() {
+    QueryIdFactory.reset();
+    QueryId qid1 = QueryIdFactory.newQueryId();
+    QueryId qid2 = TajoIdUtils.createQueryId(qid1.toString());
+    assertEquals(qid1, qid2);
+    
+    SubQueryId sub1 = QueryIdFactory.newSubQueryId(qid1);
+    SubQueryId sub2 = TajoIdUtils.newSubQueryId(sub1.toString());
+    assertEquals(sub1, sub2);
+    
+    QueryUnitId u1 = QueryIdFactory.newQueryUnitId(sub1);
+    QueryUnitId u2 = new QueryUnitId(u1.toString());
+    assertEquals(u1, u2);
+
+    QueryUnitAttemptId attempt1 = new QueryUnitAttemptId(u1, 1);
+    QueryUnitAttemptId attempt2 = new QueryUnitAttemptId(attempt1.toString());
+    assertEquals(attempt1, attempt2);
+  }
+
+  @Test
+  public void testConstructFromPB() {
+    QueryIdFactory.reset();
+    QueryId qid1 = QueryIdFactory.newQueryId();
+    QueryId qid2 = new QueryId(qid1.getProto());
+    assertEquals(qid1, qid2);
+
+    SubQueryId sub1 = QueryIdFactory.newSubQueryId(qid1);
+    SubQueryId sub2 = new SubQueryId(sub1.getProto());
+    assertEquals(sub1, sub2);
+
+    QueryUnitId u1 = QueryIdFactory.newQueryUnitId(sub1);
+    QueryUnitId u2 = new QueryUnitId(u1.getProto());
+    assertEquals(u1, u2);
+
+    QueryUnitAttemptId attempt1 = new QueryUnitAttemptId(u1, 1);
+    QueryUnitAttemptId attempt2 = new QueryUnitAttemptId(attempt1.getProto());
+    assertEquals(attempt1, attempt2);
+  }
+
+  public static QueryId createQueryId(long timestamp, int id, int attemptId) {
+    ApplicationId appId = BuilderUtils.newApplicationId(timestamp, id);
+
+    return TajoIdUtils.createQueryId(appId, attemptId);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/bc6359b8/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/TpchTestBase.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/TpchTestBase.java b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/TpchTestBase.java
new file mode 100644
index 0000000..bc3b91d
--- /dev/null
+++ b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/TpchTestBase.java
@@ -0,0 +1,122 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo;
+
+import com.google.common.collect.Maps;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.tajo.benchmark.TPCH;
+import org.apache.tajo.catalog.Options;
+import org.apache.tajo.catalog.Schema;
+import org.apache.tajo.storage.CSVFile;
+import org.apache.tajo.util.FileUtil;
+
+import java.io.File;
+import java.io.IOException;
+import java.sql.ResultSet;
+import java.util.Map;
+
+public class TpchTestBase {
+  private static final Log LOG = LogFactory.getLog(TpchTestBase.class);
+
+  String [] names;
+  String [] paths;
+  String [][] tables;
+  Schema[] schemas;
+  Map<String, Integer> nameMap = Maps.newHashMap();
+  protected TPCH tpch;
+  protected LocalTajoTestingUtility util;
+
+  private static TpchTestBase testBase;
+
+  static {
+    try {
+      testBase = new TpchTestBase();
+      testBase.setUp();
+      Runtime.getRuntime().addShutdownHook(new ShutdownHook());
+    } catch (Exception e) {
+      LOG.error(e);
+    }
+  }
+
+  private TpchTestBase() throws IOException {
+    names = new String[] {"customer", "lineitem", "nation", "orders", "part", "partsupp", "region", "supplier"};
+    paths = new String[names.length];
+    for (int i = 0; i < names.length; i++) {
+      nameMap.put(names[i], i);
+    }
+
+    tpch = new TPCH();
+    tpch.loadSchemas();
+    tpch.loadQueries();
+
+    schemas = new Schema[names.length];
+    for (int i = 0; i < names.length; i++) {
+      schemas[i] = tpch.getSchema(names[i]);
+    }
+
+    tables = new String[names.length][];
+    File file;
+    for (int i = 0; i < names.length; i++) {
+      file = new File("src/test/tpch/" + names[i] + ".tbl");
+      tables[i] = FileUtil.readTextFile(file).split("\n");
+      paths[i] = file.getAbsolutePath();
+    }
+    try {
+      Thread.sleep(1000);
+    } catch (InterruptedException e) {
+      e.printStackTrace();
+    }
+  }
+
+  private void setUp() throws Exception {
+    util = new LocalTajoTestingUtility();
+    Options opt = new Options();
+    opt.put(CSVFile.DELIMITER, "|");
+    util.setup(names, paths, schemas, opt);
+  }
+
+  public static TpchTestBase getInstance() {
+    return testBase;
+  }
+
+  public ResultSet execute(String query) throws Exception {
+    return util.execute(query);
+  }
+
+  public TajoTestingCluster getTestingCluster() {
+    return util.getTestingCluster();
+  }
+
+  public static class ShutdownHook extends Thread {
+
+    @Override
+    public void run() {
+      try {
+        testBase.tearDown();
+      } catch (IOException e) {
+        LOG.error(e);
+      }
+    }
+  }
+
+  private void tearDown() throws IOException {
+    util.shutdown();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/bc6359b8/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/benchmark/TestTPCH.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/benchmark/TestTPCH.java b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/benchmark/TestTPCH.java
new file mode 100644
index 0000000..9e6ad8d
--- /dev/null
+++ b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/benchmark/TestTPCH.java
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.benchmark;
+
+import com.google.common.collect.Maps;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.apache.tajo.IntegrationTest;
+import org.apache.tajo.TpchTestBase;
+
+import java.io.IOException;
+import java.sql.ResultSet;
+import java.util.Map;
+
+import static org.junit.Assert.*;
+
+@Category(IntegrationTest.class)
+public class TestTPCH {
+  private static TpchTestBase tpch;
+
+  @BeforeClass
+  public static void setUp() throws Exception {
+    tpch = TpchTestBase.getInstance();
+  }
+
+  @AfterClass
+  public static void tearDown() throws IOException {
+  }
+
+  /**
+   * it verifies NTA-788.
+   */
+  @Test
+  public void testQ1OrderBy() throws Exception {
+    ResultSet res = tpch.execute("select l_returnflag, l_linestatus, count(*) as count_order from lineitem " +
+        "group by l_returnflag, l_linestatus order by l_returnflag, l_linestatus");
+
+    Map<String,Integer> result = Maps.newHashMap();
+    result.put("NO", 3);
+    result.put("RF", 2);
+
+    res.next();
+    assertTrue(result.get(res.getString(1) + res.getString(2)) == res.getInt(3));
+    res.next();
+    assertTrue(result.get(res.getString(1) + res.getString(2)) == res.getInt(3));
+    assertFalse(res.next());
+  }
+
+  @Test
+  public void testQ2FiveWayJoin() throws Exception {
+    ResultSet res = tpch.execute(
+        "select s_acctbal, s_name, n_name, p_partkey, p_mfgr, s_address, s_phone, s_comment, ps_supplycost, " +
+            "r_name, p_type, p_size " +
+            "from region join nation on n_regionkey = r_regionkey and r_name = 'AMERICA' " +
+            "join supplier on s_nationkey = n_nationkey " +
+            "join partsupp on s_suppkey = ps_suppkey " +
+            "join part on p_partkey = ps_partkey and p_type like '%BRASS' and p_size = 15");
+
+    assertTrue(res.next());
+    assertEquals("AMERICA", res.getString(10));
+    String [] pType = res.getString(11).split(" ");
+    assertEquals("BRASS", pType[pType.length - 1]);
+    assertEquals(15, res.getInt(12));
+    assertFalse(res.next());
+  }
+
+  @Test
+  public void testTPCH14Expr() throws Exception {
+    ResultSet res = tpch.execute("select 100 * sum(" +
+        "case when p_type like 'PROMO%' then l_extendedprice else 0 end) / sum(l_extendedprice * (1 - l_discount)) "
+        + "as promo_revenue from lineitem, part where l_partkey = p_partkey");
+
+    res.next();
+    assertEquals(33, res.getInt(1));
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/bc6359b8/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/client/TestTajoClient.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/client/TestTajoClient.java b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/client/TestTajoClient.java
new file mode 100644
index 0000000..1bdce52
--- /dev/null
+++ b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/client/TestTajoClient.java
@@ -0,0 +1,164 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.client;
+
+import com.google.common.collect.Sets;
+import com.google.protobuf.ServiceException;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.apache.tajo.BackendTestingUtil;
+import org.apache.tajo.IntegrationTest;
+import org.apache.tajo.TajoTestingCluster;
+import org.apache.tajo.catalog.TableDesc;
+import org.apache.tajo.conf.TajoConf;
+import org.apache.tajo.storage.StorageUtil;
+import org.apache.tajo.util.CommonTestingUtil;
+
+import java.io.IOException;
+import java.util.Set;
+
+import static org.junit.Assert.*;
+
+@Category(IntegrationTest.class)
+public class TestTajoClient {
+  private static TajoTestingCluster util;
+  private static TajoConf conf;
+  private static TajoClient tajo;
+  private static String TEST_PATH = "target/test-data/"
+      + TestTajoClient.class.getName();
+  private static Path testDir;
+
+  @BeforeClass
+  public static void setUp() throws Exception {
+    util = new TajoTestingCluster();
+    util.startMiniCluster(1);
+    conf = util.getConfiguration();
+    Thread.sleep(3000);
+    tajo = new TajoClient(conf);
+
+    testDir = CommonTestingUtil.getTestDir(TEST_PATH);
+  }
+
+  @AfterClass
+  public static void tearDown() throws Exception {
+    util.shutdownMiniCluster();
+    tajo.close();
+  }
+
+  private static Path writeTmpTable(String tableName) throws IOException {
+    Path tablePath = StorageUtil.concatPath(testDir, tableName);
+    BackendTestingUtil.writeTmpTable(conf, testDir, tableName, true);
+    return tablePath;
+  }
+
+  @Test
+  public final void testAttachTable() throws IOException, ServiceException {
+    final String tableName = "attach";
+    Path tablePath = writeTmpTable(tableName);
+    assertFalse(tajo.existTable(tableName));
+    tajo.attachTable(tableName, tablePath);
+    assertTrue(tajo.existTable(tableName));
+    tajo.detachTable(tableName);
+    assertFalse(tajo.existTable(tableName));
+  }
+
+  @Test
+  public final void testUpdateQuery() throws IOException, ServiceException {
+    final String tableName = "testUpdateQuery";
+    Path tablePath = writeTmpTable(tableName);
+
+    assertFalse(tajo.existTable(tableName));
+    String tql =
+        "create external table " + tableName + " (deptname text, score integer) "
+            + "using csv location '" + tablePath + "'";
+    tajo.updateQuery(tql);
+    assertTrue(tajo.existTable(tableName));
+  }
+
+  @Test
+  public final void testCreateAndDropTable()
+      throws IOException, ServiceException {
+    final String tableName = "testCreateAndDropTable";
+    Path tablePath = writeTmpTable(tableName);
+
+    assertFalse(tajo.existTable(tableName));
+    tajo.createTable(tableName, tablePath, BackendTestingUtil.mockupMeta);
+    assertTrue(tajo.existTable(tableName));
+    tajo.dropTable(tableName);
+    assertFalse(tajo.existTable(tableName));
+    FileSystem fs = tablePath.getFileSystem(conf);
+    assertFalse(fs.exists(tablePath));
+  }
+
+  @Test
+  public final void testDDLByExecuteQuery() throws IOException, ServiceException {
+    TajoConf conf = util.getConfiguration();
+    final String tableName = "testDDLByExecuteQuery";
+    BackendTestingUtil.writeTmpTable(conf, "file:///tmp", tableName, false);
+
+    assertFalse(tajo.existTable(tableName));
+    String tql =
+        "create external table " + tableName + " (deptname text, score int4) "
+            + "using csv location 'file:///tmp/" + tableName + "'";
+    tajo.executeQueryAndGetResult(tql);
+    assertTrue(tajo.existTable(tableName));
+  }
+
+  // disabled
+  public final void testGetClusterInfo() throws IOException, InterruptedException {
+    assertEquals(1,tajo.getClusterInfo().size());
+  }
+
+  @Test
+  public final void testGetTableList() throws IOException, ServiceException {
+    final String tableName1 = "table1";
+    final String tableName2 = "table2";
+    Path table1Path = writeTmpTable(tableName1);
+    Path table2Path = writeTmpTable(tableName2);
+
+    assertFalse(tajo.existTable(tableName1));
+    assertFalse(tajo.existTable(tableName2));
+    tajo.attachTable(tableName1, table1Path);
+    assertTrue(tajo.existTable(tableName1));
+    Set<String> tables = Sets.newHashSet(tajo.getTableList());
+    assertTrue(tables.contains(tableName1));
+    tajo.attachTable(tableName2, table2Path);
+    assertTrue(tajo.existTable(tableName2));
+    tables = Sets.newHashSet(tajo.getTableList());
+    assertTrue(tables.contains(tableName1));
+    assertTrue(tables.contains(tableName2));
+  }
+
+  @Test
+  public final void testGetTableDesc() throws IOException, ServiceException {
+    final String tableName1 = "table3";
+    Path tablePath = writeTmpTable(tableName1);
+    assertFalse(tajo.existTable(tableName1));
+    tajo.attachTable(tableName1, tablePath);
+    assertTrue(tajo.existTable(tableName1));
+    TableDesc desc = tajo.getTableDesc(tableName1);
+    assertNotNull(desc);
+    assertEquals(tableName1, desc.getId());
+    assertTrue(desc.getMeta().getStat().getNumBytes() > 0);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/bc6359b8/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/cluster/TestServerName.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/cluster/TestServerName.java b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/cluster/TestServerName.java
new file mode 100644
index 0000000..513187d
--- /dev/null
+++ b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/cluster/TestServerName.java
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.cluster;
+
+import org.junit.Test;
+import org.apache.tajo.master.cluster.ServerName;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+public class TestServerName {
+
+	@Test
+	public void testServerNameStringInt() {
+		ServerName server = new ServerName("ex1.com",50030);
+		assertEquals("ex1.com", server.getHostname());
+		assertEquals(50030, server.getPort());
+	}
+
+	@Test
+	public void testServerNameString() {
+		ServerName server = new ServerName("ex1.com:50030");
+		assertEquals("ex1.com", server.getHostname());
+		assertEquals(50030, server.getPort());
+	}
+
+	@Test
+	public void testParseHostname() {
+		assertEquals("ex1.com",ServerName.parseHostname("ex1.com:50030"));
+	}
+
+	@Test
+	public void testParsePort() {
+		assertEquals(50030,ServerName.parsePort("ex1.com:50030"));
+	}
+
+	@Test
+	public void testToString() {
+		ServerName server = new ServerName("ex1.com",50030);
+		assertEquals("ex1.com:50030", server.toString());
+	}
+
+	@Test
+	public void testGetServerName() {
+		ServerName server = new ServerName("ex1.com",50030);
+		assertEquals("ex1.com:50030", server.getServerName());
+	}
+
+	@Test
+	public void testGetHostname() {
+		ServerName server = new ServerName("ex1.com",50030);
+		assertEquals("ex1.com", server.getHostname());
+	}
+
+	@Test
+	public void testGetPort() {
+		ServerName server = new ServerName("ex1.com",50030);
+		assertEquals(50030, server.getPort());
+	}
+
+	@Test
+	public void testGetServerNameStringInt() {
+		assertEquals("ex2.com:50030",ServerName.getServerName("ex2.com", 50030));
+	}
+
+	@Test
+	public void testCompareTo() {
+		ServerName s1 = new ServerName("ex1.com:50030");
+		ServerName s2 = new ServerName("ex1.com:60030");
+		
+		assertTrue(s1.compareTo(s2) < 0);
+		assertTrue(s2.compareTo(s1) > 0);
+		
+		ServerName s3 = new ServerName("ex1.com:50030");
+		assertTrue(s1.compareTo(s3) == 0);
+		
+		ServerName s4 = new ServerName("ex2.com:50030");
+		assertTrue(s1.compareTo(s4) < 0);
+		assertTrue(s4.compareTo(s1) > 0);
+	}
+
+  @Test (expected = IllegalArgumentException.class)
+  public void testException() {
+    new ServerName("ex1.com");
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/bc6359b8/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/engine/eval/TestEvalTree.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/engine/eval/TestEvalTree.java b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/engine/eval/TestEvalTree.java
new file mode 100644
index 0000000..f1402d0
--- /dev/null
+++ b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/engine/eval/TestEvalTree.java
@@ -0,0 +1,691 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.engine.eval;
+
+import org.apache.hadoop.fs.Path;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.apache.tajo.TajoTestingCluster;
+import org.apache.tajo.catalog.*;
+import org.apache.tajo.catalog.function.GeneralFunction;
+import org.apache.tajo.catalog.proto.CatalogProtos.FunctionType;
+import org.apache.tajo.catalog.proto.CatalogProtos.StoreType;
+import org.apache.tajo.common.TajoDataTypes.DataType;
+import org.apache.tajo.datum.Datum;
+import org.apache.tajo.datum.DatumFactory;
+import org.apache.tajo.engine.eval.EvalNode.Type;
+import org.apache.tajo.engine.json.GsonCreator;
+import org.apache.tajo.engine.parser.QueryAnalyzer;
+import org.apache.tajo.engine.parser.QueryBlock;
+import org.apache.tajo.master.TajoMaster;
+import org.apache.tajo.storage.Tuple;
+import org.apache.tajo.storage.VTuple;
+
+import static org.junit.Assert.*;
+import static org.apache.tajo.common.TajoDataTypes.Type.*;
+
+public class TestEvalTree {
+  private static TajoTestingCluster util;
+  private static CatalogService cat;
+  private static QueryAnalyzer analyzer;
+  private static Tuple [] tuples = new Tuple[3];
+  
+  @BeforeClass
+  public static void setUp() throws Exception {
+    util = new TajoTestingCluster();
+    util.startCatalogCluster();
+    cat = util.getMiniCatalogCluster().getCatalog();
+    for (FunctionDesc funcDesc : TajoMaster.initBuiltinFunctions()) {
+      cat.registerFunction(funcDesc);
+    }
+
+    Schema schema = new Schema();
+    schema.addColumn("name", TEXT);
+    schema.addColumn("score", INT4);
+    schema.addColumn("age", INT4);
+
+    TableMeta meta = CatalogUtil.newTableMeta(schema, StoreType.CSV);
+    TableDesc desc = new TableDescImpl("people", meta, new Path("file:///"));
+    cat.addTable(desc);
+
+    FunctionDesc funcMeta = new FunctionDesc("sum", TestSum.class, FunctionType.GENERAL,
+        CatalogUtil.newDataTypesWithoutLen(INT4),
+        CatalogUtil.newDataTypesWithoutLen(INT4, INT4));
+    cat.registerFunction(funcMeta);
+
+    analyzer = new QueryAnalyzer(cat);
+    
+    tuples[0] = new VTuple(3);
+    tuples[0].put(new Datum[] {
+        DatumFactory.createText("aabc"),
+        DatumFactory.createInt4(100),
+        DatumFactory.createInt4(10)});
+    tuples[1] = new VTuple(3);
+    tuples[1].put(new Datum[] {
+        DatumFactory.createText("aaba"),
+        DatumFactory.createInt4(200),
+        DatumFactory.createInt4(20)});
+    tuples[2] = new VTuple(3);
+    tuples[2].put(new Datum[] {
+        DatumFactory.createText("kabc"),
+        DatumFactory.createInt4(300),
+        DatumFactory.createInt4(30)});
+  }
+
+  @AfterClass
+  public static void tearDown() throws Exception {
+    util.shutdownCatalogCluster();
+  }
+
+  public static class TestSum extends GeneralFunction {
+    private Integer x;
+    private Integer y;
+
+    public TestSum() {
+      super(new Column[] { new Column("arg1", INT4),
+          new Column("arg2", INT4) });
+    }
+
+    @Override
+    public Datum eval(Tuple params) {
+      x =  params.get(0).asInt4();
+      y =  params.get(1).asInt4();
+      return DatumFactory.createInt4(x + y);
+    }
+    
+    public String toJSON() {
+    	return GsonCreator.getInstance().toJson(this, GeneralFunction.class);
+    }
+  }
+
+  static String[] QUERIES = {
+      "select name, score, age from people where score > 30", // 0
+      "select name, score, age from people where score * age", // 1
+      "select name, score, age from people where sum(score * age, 50)", // 2
+      "select 2+3", // 3
+      "select sum(score) from people", // 4
+      "select name from people where NOT (20 > 30)", // 5
+  };
+
+  @Test
+  public final void testFunctionEval() throws Exception {    
+    Tuple tuple = new VTuple(3);
+    tuple.put(
+        new Datum[] {
+          DatumFactory.createText("hyunsik"),
+          DatumFactory.createInt4(500),
+          DatumFactory.createInt4(30)});
+
+    QueryBlock block;
+    EvalNode expr;
+
+    Schema peopleSchema = cat.getTableDesc("people").getMeta().getSchema();
+    block = (QueryBlock) analyzer.parse(QUERIES[0]).getParseTree();
+    EvalContext evalCtx;
+    expr = block.getWhereCondition();
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, peopleSchema, tuple);
+    assertEquals(true, expr.terminate(evalCtx).asBool());
+
+    block = (QueryBlock) analyzer.parse(QUERIES[1]).getParseTree();
+    expr = block.getWhereCondition();
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, peopleSchema, tuple);
+    assertEquals(15000, expr.terminate(evalCtx).asInt4());
+    assertCloneEqual(expr);
+
+    block = (QueryBlock) analyzer.parse(QUERIES[2]).getParseTree();
+    expr = block.getWhereCondition();
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, peopleSchema, tuple);
+    assertEquals(15050, expr.terminate(evalCtx).asInt4());
+    assertCloneEqual(expr);
+    
+    block = (QueryBlock) analyzer.parse(QUERIES[2]).getParseTree();
+    expr = block.getWhereCondition();
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, peopleSchema, tuple);
+    assertEquals(15050, expr.terminate(evalCtx).asInt4());
+    assertCloneEqual(expr);
+    
+    // Aggregation function test
+    block = (QueryBlock) analyzer.parse(QUERIES[4]).getParseTree();
+    expr = block.getTargetList()[0].getEvalTree();
+    evalCtx = expr.newContext();
+    
+    final int tuplenum = 10;
+    Tuple [] tuples = new Tuple[tuplenum];
+    for (int i=0; i < tuplenum; i++) {
+      tuples[i] = new VTuple(3);
+      tuples[i].put(0, DatumFactory.createText("hyunsik"));
+      tuples[i].put(1, DatumFactory.createInt4(i + 1));
+      tuples[i].put(2, DatumFactory.createInt4(30));
+    }
+    
+    int sum = 0;
+    for (int i=0; i < tuplenum; i++) {
+      expr.eval(evalCtx, peopleSchema, tuples[i]);
+      sum = sum + (i+1);
+      assertEquals(sum, expr.terminate(evalCtx).asInt4());
+    }
+  }
+  
+  
+  @Test
+  public void testTupleEval() throws CloneNotSupportedException {
+    ConstEval e1 = new ConstEval(DatumFactory.createInt4(1));
+    assertCloneEqual(e1);
+    FieldEval e2 = new FieldEval("table1.score", CatalogUtil.newDataTypeWithoutLen(INT4)); // it indicates
+    assertCloneEqual(e2);
+
+    Schema schema1 = new Schema();
+    schema1.addColumn("table1.id", INT4);
+    schema1.addColumn("table1.score", INT4);
+    
+    BinaryEval expr = new BinaryEval(Type.PLUS, e1, e2);
+    EvalContext evalCtx = expr.newContext();
+    assertCloneEqual(expr);
+    VTuple tuple = new VTuple(2);
+    tuple.put(0, DatumFactory.createInt4(1)); // put 0th field
+    tuple.put(1, DatumFactory.createInt4(99)); // put 0th field
+
+    // the result of evaluation must be 100.
+    expr.eval(evalCtx, schema1, tuple);
+    assertEquals(expr.terminate(evalCtx).asInt4(), 100);
+  }
+
+  public static class MockTrueEval extends EvalNode {
+
+    public MockTrueEval() {
+      super(Type.CONST);
+    }
+
+    @Override
+    public String getName() {
+      return this.getClass().getName();
+    }
+
+    @Override
+    public Datum terminate(EvalContext ctx) {
+      return DatumFactory.createBool(true);
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+      return true;
+    }
+
+    @Override
+    public EvalContext newContext() {
+      return null;
+    }
+
+    @Override
+    public DataType [] getValueType() {
+      return CatalogUtil.newDataTypesWithoutLen(BOOLEAN);
+    }
+
+  }
+
+  public static class MockFalseExpr extends EvalNode {
+
+    public MockFalseExpr() {
+      super(Type.CONST);
+    }
+
+    @Override
+    public EvalContext newContext() {
+      return null;
+    }
+
+    @Override
+    public Datum terminate(EvalContext ctx) {
+      return DatumFactory.createBool(false);
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+      return true;
+    }
+
+    @Override
+    public String getName() {
+      return this.getClass().getName();
+    }
+
+    @Override
+    public DataType [] getValueType() {
+      return CatalogUtil.newDataTypesWithoutLen(BOOLEAN);
+    }
+  }
+
+  @Test
+  public void testAndTest() {
+    MockTrueEval trueExpr = new MockTrueEval();
+    MockFalseExpr falseExpr = new MockFalseExpr();
+
+    BinaryEval andExpr = new BinaryEval(Type.AND, trueExpr, trueExpr);
+    EvalContext evalCtx = andExpr.newContext();
+    andExpr.eval(evalCtx, null, null);
+    assertTrue(andExpr.terminate(evalCtx).asBool());
+
+    andExpr = new BinaryEval(Type.AND, falseExpr, trueExpr);
+    evalCtx = andExpr.newContext();
+    andExpr.eval(evalCtx, null, null);
+    assertFalse(andExpr.terminate(evalCtx).asBool());
+
+    andExpr = new BinaryEval(Type.AND, trueExpr, falseExpr);
+    evalCtx= andExpr.newContext();
+    andExpr.eval(evalCtx, null, null);
+    assertFalse(andExpr.terminate(evalCtx).asBool());
+
+    andExpr = new BinaryEval(Type.AND, falseExpr, falseExpr);
+    evalCtx= andExpr.newContext();
+    andExpr.eval(evalCtx, null, null);
+    assertFalse(andExpr.terminate(evalCtx).asBool());
+  }
+
+  @Test
+  public void testOrTest() {
+    MockTrueEval trueExpr = new MockTrueEval();
+    MockFalseExpr falseExpr = new MockFalseExpr();
+
+    BinaryEval orExpr = new BinaryEval(Type.OR, trueExpr, trueExpr);
+    EvalContext evalCtx= orExpr.newContext();
+    orExpr.eval(evalCtx, null, null);
+    assertTrue(orExpr.terminate(evalCtx).asBool());
+
+    orExpr = new BinaryEval(Type.OR, falseExpr, trueExpr);
+    evalCtx= orExpr.newContext();
+    orExpr.eval(evalCtx, null, null);
+    assertTrue(orExpr.terminate(evalCtx).asBool());
+
+    orExpr = new BinaryEval(Type.OR, trueExpr, falseExpr);
+    evalCtx= orExpr.newContext();
+    orExpr.eval(evalCtx, null, null);
+    assertTrue(orExpr.terminate(evalCtx).asBool());
+
+    orExpr = new BinaryEval(Type.OR, falseExpr, falseExpr);
+    evalCtx = orExpr.newContext();
+    orExpr.eval(evalCtx, null, null);
+    assertFalse(orExpr.terminate(evalCtx).asBool());
+  }
+
+  @Test
+  public final void testCompOperator() {
+    ConstEval e1;
+    ConstEval e2;
+    BinaryEval expr;
+
+    // Constant
+    e1 = new ConstEval(DatumFactory.createInt4(9));
+    e2 = new ConstEval(DatumFactory.createInt4(34));
+    expr = new BinaryEval(Type.LTH, e1, e2);
+    EvalContext evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertTrue(expr.terminate(evalCtx).asBool());
+    expr = new BinaryEval(Type.LEQ, e1, e2);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertTrue(expr.terminate(evalCtx).asBool());
+    expr = new BinaryEval(Type.LTH, e2, e1);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertFalse(expr.terminate(evalCtx).asBool());
+    expr = new BinaryEval(Type.LEQ, e2, e1);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertFalse(expr.terminate(evalCtx).asBool());
+
+    expr = new BinaryEval(Type.GTH, e2, e1);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertTrue(expr.terminate(evalCtx).asBool());
+    expr = new BinaryEval(Type.GEQ, e2, e1);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertTrue(expr.terminate(evalCtx).asBool());
+    expr = new BinaryEval(Type.GTH, e1, e2);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertFalse(expr.terminate(evalCtx).asBool());
+    expr = new BinaryEval(Type.GEQ, e1, e2);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertFalse(expr.terminate(evalCtx).asBool());
+
+    BinaryEval plus = new BinaryEval(Type.PLUS, e1, e2);
+    evalCtx = expr.newContext();
+    expr = new BinaryEval(Type.LTH, e1, plus);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertTrue(expr.terminate(evalCtx).asBool());
+    expr = new BinaryEval(Type.LEQ, e1, plus);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertTrue(expr.terminate(evalCtx).asBool());
+    expr = new BinaryEval(Type.LTH, plus, e1);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertFalse(expr.terminate(evalCtx).asBool());
+    expr = new BinaryEval(Type.LEQ, plus, e1);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertFalse(expr.terminate(evalCtx).asBool());
+
+    expr = new BinaryEval(Type.GTH, plus, e1);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertTrue(expr.terminate(evalCtx).asBool());
+    expr = new BinaryEval(Type.GEQ, plus, e1);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertTrue(expr.terminate(evalCtx).asBool());
+    expr = new BinaryEval(Type.GTH, e1, plus);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertFalse(expr.terminate(evalCtx).asBool());
+    expr = new BinaryEval(Type.GEQ, e1, plus);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertFalse(expr.terminate(evalCtx).asBool());
+  }
+
+  @Test
+  public final void testArithmaticsOperator() 
+      throws CloneNotSupportedException {
+    ConstEval e1;
+    ConstEval e2;
+
+    // PLUS
+    e1 = new ConstEval(DatumFactory.createInt4(9));
+    e2 = new ConstEval(DatumFactory.createInt4(34));
+    BinaryEval expr = new BinaryEval(Type.PLUS, e1, e2);
+    EvalContext evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertEquals(expr.terminate(evalCtx).asInt4(), 43);
+    assertCloneEqual(expr);
+    
+    // MINUS
+    e1 = new ConstEval(DatumFactory.createInt4(5));
+    e2 = new ConstEval(DatumFactory.createInt4(2));
+    expr = new BinaryEval(Type.MINUS, e1, e2);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertEquals(expr.terminate(evalCtx).asInt4(), 3);
+    assertCloneEqual(expr);
+    
+    // MULTIPLY
+    e1 = new ConstEval(DatumFactory.createInt4(5));
+    e2 = new ConstEval(DatumFactory.createInt4(2));
+    expr = new BinaryEval(Type.MULTIPLY, e1, e2);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertEquals(expr.terminate(evalCtx).asInt4(), 10);
+    assertCloneEqual(expr);
+    
+    // DIVIDE
+    e1 = new ConstEval(DatumFactory.createInt4(10));
+    e2 = new ConstEval(DatumFactory.createInt4(5));
+    expr = new BinaryEval(Type.DIVIDE, e1, e2);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertEquals(expr.terminate(evalCtx).asInt4(), 2);
+    assertCloneEqual(expr);
+  }
+
+  @Test
+  public final void testGetReturnType() {
+    ConstEval e1;
+    ConstEval e2;
+
+    // PLUS
+    e1 = new ConstEval(DatumFactory.createInt4(9));
+    e2 = new ConstEval(DatumFactory.createInt4(34));
+    BinaryEval expr = new BinaryEval(Type.PLUS, e1, e2);
+    assertEquals(CatalogUtil.newDataTypeWithoutLen(INT4), expr.getValueType()[0]);
+
+    expr = new BinaryEval(Type.LTH, e1, e2);
+    EvalContext evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertTrue(expr.terminate(evalCtx).asBool());
+    assertEquals(CatalogUtil.newDataTypeWithoutLen(BOOLEAN), expr.getValueType()[0]);
+
+    e1 = new ConstEval(DatumFactory.createFloat8(9.3));
+    e2 = new ConstEval(DatumFactory.createFloat8(34.2));
+    expr = new BinaryEval(Type.PLUS, e1, e2);
+    assertEquals(CatalogUtil.newDataTypeWithoutLen(FLOAT8), expr.getValueType()[0]);
+  }
+  
+  @Test
+  public final void testEquals() throws CloneNotSupportedException {
+    ConstEval e1;
+    ConstEval e2;
+
+    // PLUS
+    e1 = new ConstEval(DatumFactory.createInt4(34));
+    e2 = new ConstEval(DatumFactory.createInt4(34));
+    assertEquals(e1, e2);
+    
+    BinaryEval plus1 = new BinaryEval(Type.PLUS, e1, e2);
+    BinaryEval plus2 = new BinaryEval(Type.PLUS, e2, e1);
+    assertEquals(plus1, plus2);
+    
+    ConstEval e3 = new ConstEval(DatumFactory.createInt4(29));
+    BinaryEval plus3 = new BinaryEval(Type.PLUS, e1, e3);
+    assertFalse(plus1.equals(plus3));
+    
+    // LTH
+    ConstEval e4 = new ConstEval(DatumFactory.createInt4(9));
+    ConstEval e5 = new ConstEval(DatumFactory.createInt4(34));
+    BinaryEval compExpr1 = new BinaryEval(Type.LTH, e4, e5);
+    assertCloneEqual(compExpr1);
+    
+    ConstEval e6 = new ConstEval(DatumFactory.createInt4(9));
+    ConstEval e7 = new ConstEval(DatumFactory.createInt4(34));
+    BinaryEval compExpr2 = new BinaryEval(Type.LTH, e6, e7);
+    assertCloneEqual(compExpr2);
+    
+    assertTrue(compExpr1.equals(compExpr2));
+  }
+  
+  @Test
+  public final void testJson() throws CloneNotSupportedException {
+    ConstEval e1;
+    ConstEval e2;
+
+    // 29 > (34 + 5) + (5 + 34)
+    e1 = new ConstEval(DatumFactory.createInt4(34));
+    e2 = new ConstEval(DatumFactory.createInt4(5));
+    assertCloneEqual(e1); 
+    
+    BinaryEval plus1 = new BinaryEval(Type.PLUS, e1, e2);
+    assertCloneEqual(plus1);
+    BinaryEval plus2 = new BinaryEval(Type.PLUS, e2, e1);
+    assertCloneEqual(plus2);
+    BinaryEval plus3 = new BinaryEval(Type.PLUS, plus2, plus1);
+    assertCloneEqual(plus3);
+    
+    ConstEval e3 = new ConstEval(DatumFactory.createInt4(29));
+    BinaryEval gth = new BinaryEval(Type.GTH, e3, plus3);
+    assertCloneEqual(gth);
+    
+    String json = gth.toJSON();
+    EvalNode eval = GsonCreator.getInstance().fromJson(json, EvalNode.class);
+    assertCloneEqual(eval);
+    
+    assertEquals(gth.getType(), eval.getType());
+    assertEquals(e3.getType(), eval.getLeftExpr().getType());
+    assertEquals(plus3.getType(), eval.getRightExpr().getType());
+    assertEquals(plus3.getLeftExpr(), eval.getRightExpr().getLeftExpr());
+    assertEquals(plus3.getRightExpr(), eval.getRightExpr().getRightExpr());
+    assertEquals(plus2.getLeftExpr(), eval.getRightExpr().getLeftExpr().getLeftExpr());
+    assertEquals(plus2.getRightExpr(), eval.getRightExpr().getLeftExpr().getRightExpr());
+    assertEquals(plus1.getLeftExpr(), eval.getRightExpr().getRightExpr().getLeftExpr());
+    assertEquals(plus1.getRightExpr(), eval.getRightExpr().getRightExpr().getRightExpr());
+  }
+  
+  private void assertCloneEqual(EvalNode eval) throws CloneNotSupportedException {
+    EvalNode copy = (EvalNode) eval.clone();
+    assertEquals(eval, copy);
+    assertFalse(eval == copy);
+  }
+  
+  static String[] NOT = {
+    "select name, score, age from people where not (score >= 200)", // 0"
+  };
+  
+  @Test
+  public final void testNot() throws CloneNotSupportedException {
+    ConstEval e1;
+    ConstEval e2;
+    EvalNode expr;
+
+    // Constant
+    e1 = new ConstEval(DatumFactory.createInt4(9));
+    e2 = new ConstEval(DatumFactory.createInt4(34));
+    expr = new BinaryEval(Type.LTH, e1, e2);
+    EvalContext evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertTrue(expr.terminate(evalCtx).asBool());
+    NotEval not = new NotEval(expr);
+    evalCtx = not .newContext();
+    not.eval(evalCtx, null, null);
+    assertFalse(not.terminate(evalCtx).asBool());
+    
+    expr = new BinaryEval(Type.LEQ, e1, e2);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertTrue(expr.terminate(evalCtx).asBool());
+    not = new NotEval(expr);
+    evalCtx = not.newContext();
+    not.eval(evalCtx, null, null);
+    assertFalse(not.terminate(evalCtx).asBool());
+    
+    expr = new BinaryEval(Type.LTH, e2, e1);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertFalse(expr.terminate(evalCtx).asBool());
+    not = new NotEval(expr);
+    evalCtx = not.newContext();
+    not.eval(evalCtx, null, null);
+    assertTrue(not.terminate(evalCtx).asBool());
+    
+    expr = new BinaryEval(Type.LEQ, e2, e1);
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, null, null);
+    assertFalse(expr.terminate(evalCtx).asBool());
+    not = new NotEval(expr);
+    evalCtx = not.newContext();
+    not.eval(evalCtx, null, null);
+    assertTrue(not.terminate(evalCtx).asBool());
+    
+    // Evaluation Test
+    QueryBlock block;
+    Schema peopleSchema = cat.getTableDesc("people").getMeta().getSchema();
+    block = (QueryBlock) analyzer.parse(NOT[0]).getParseTree();
+    expr = block.getWhereCondition();
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, peopleSchema, tuples[0]);
+    assertTrue(expr.terminate(evalCtx).asBool());
+    expr.eval(evalCtx, peopleSchema, tuples[1]);
+    assertFalse(expr.terminate(evalCtx).asBool());
+    expr.eval(evalCtx, peopleSchema, tuples[2]);
+    assertFalse(expr.terminate(evalCtx).asBool());
+  }
+  
+  static String[] LIKE = {
+    "select name, score, age from people where name like '%bc'", // 0"
+    "select name, score, age from people where name like 'aa%'", // 1"
+    "select name, score, age from people where name not like '%bc'", // 2"
+  };
+  
+  @Test
+  public final void testLike() {
+    QueryBlock block;
+    EvalNode expr;
+
+    Schema peopleSchema = cat.getTableDesc("people").getMeta().getSchema();
+    block = (QueryBlock) analyzer.parse(LIKE[0]).getParseTree();
+    expr = block.getWhereCondition();
+    EvalContext evalCtx = expr.newContext();
+    expr.eval(evalCtx, peopleSchema, tuples[0]);
+    assertTrue(expr.terminate(evalCtx).asBool());
+    expr.eval(evalCtx, peopleSchema, tuples[1]);
+    assertFalse(expr.terminate(evalCtx).asBool());
+    expr.eval(evalCtx, peopleSchema, tuples[2]);
+    assertTrue(expr.terminate(evalCtx).asBool());
+    
+    // prefix
+    block = (QueryBlock) analyzer.parse(LIKE[1]).getParseTree();
+    expr = block.getWhereCondition();
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, peopleSchema, tuples[0]);
+    assertTrue(expr.terminate(evalCtx).asBool());
+    expr.eval(evalCtx, peopleSchema, tuples[1]);
+    assertTrue(expr.terminate(evalCtx).asBool());
+    expr.eval(evalCtx, peopleSchema, tuples[2]);
+    assertFalse(expr.terminate(evalCtx).asBool());
+
+    // Not Test
+    block = (QueryBlock) analyzer.parse(LIKE[2]).getParseTree();
+    expr = block.getWhereCondition();
+    evalCtx = expr.newContext();
+    expr.eval(evalCtx, peopleSchema, tuples[0]);
+    assertFalse(expr.terminate(evalCtx).asBool());
+    expr.eval(evalCtx, peopleSchema, tuples[1]);
+    assertTrue(expr.terminate(evalCtx).asBool());
+    expr.eval(evalCtx, peopleSchema, tuples[2]);
+    assertFalse(expr.terminate(evalCtx).asBool());
+  }
+
+  static String[] IS_NULL = {
+      "select name, score, age from people where name is null", // 0"
+      "select name, score, age from people where name is not null", // 1"
+  };
+
+  @Test
+  public void testIsNullEval() {
+    QueryBlock block;
+    EvalNode expr;
+
+    block = (QueryBlock) analyzer.parse(IS_NULL[0]).getParseTree();
+    expr = block.getWhereCondition();
+
+    assertIsNull(expr);
+
+    block = (QueryBlock) analyzer.parse(IS_NULL[1]).getParseTree();
+    expr = block.getWhereCondition();
+
+    IsNullEval nullEval = (IsNullEval) expr;
+    assertTrue(nullEval.isNot());
+    assertIsNull(expr);
+  }
+
+  private void assertIsNull(EvalNode isNullEval) {
+    assertEquals(Type.IS, isNullEval.getType());
+    assertEquals(Type.FIELD, isNullEval.getLeftExpr().getType());
+    FieldEval left = (FieldEval) isNullEval.getLeftExpr();
+    assertEquals("name", left.getColumnName());
+    assertEquals(Type.CONST, isNullEval.getRightExpr().getType());
+    ConstEval constEval = (ConstEval) isNullEval.getRightExpr();
+    assertEquals(DatumFactory.createNullDatum(), constEval.getValue());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/bc6359b8/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/engine/eval/TestEvalTreeUtil.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/engine/eval/TestEvalTreeUtil.java b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/engine/eval/TestEvalTreeUtil.java
new file mode 100644
index 0000000..033578b
--- /dev/null
+++ b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/engine/eval/TestEvalTreeUtil.java
@@ -0,0 +1,308 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.engine.eval;
+
+import com.google.common.collect.Sets;
+import org.apache.hadoop.fs.Path;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.apache.tajo.TajoTestingCluster;
+import org.apache.tajo.catalog.*;
+import org.apache.tajo.catalog.proto.CatalogProtos.FunctionType;
+import org.apache.tajo.catalog.proto.CatalogProtos.StoreType;
+import org.apache.tajo.common.TajoDataTypes;
+import org.apache.tajo.datum.DatumFactory;
+import org.apache.tajo.engine.eval.EvalNode.Type;
+import org.apache.tajo.engine.eval.TestEvalTree.TestSum;
+import org.apache.tajo.engine.parser.QueryAnalyzer;
+import org.apache.tajo.engine.parser.QueryBlock;
+import org.apache.tajo.engine.parser.QueryBlock.Target;
+import org.apache.tajo.engine.planner.LogicalPlanner;
+import org.apache.tajo.exception.InternalException;
+import org.apache.tajo.master.TajoMaster;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Set;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+public class TestEvalTreeUtil {
+  static TajoTestingCluster util;
+  static CatalogService catalog = null;
+  static EvalNode expr1;
+  static EvalNode expr2;
+  static EvalNode expr3;
+  static QueryAnalyzer analyzer;
+  static LogicalPlanner planner;
+
+
+  @BeforeClass
+  public static void setUp() throws Exception {
+    util = new TajoTestingCluster();
+    util.startCatalogCluster();
+    catalog = util.getMiniCatalogCluster().getCatalog();
+    for (FunctionDesc funcDesc : TajoMaster.initBuiltinFunctions()) {
+      catalog.registerFunction(funcDesc);
+    }
+
+    Schema schema = new Schema();
+    schema.addColumn("name", TajoDataTypes.Type.TEXT);
+    schema.addColumn("score", TajoDataTypes.Type.INT4);
+    schema.addColumn("age", TajoDataTypes.Type.INT4);
+
+    TableMeta meta = CatalogUtil.newTableMeta(schema, StoreType.CSV);
+    TableDesc desc = new TableDescImpl("people", meta, new Path("file:///"));
+    catalog.addTable(desc);
+
+    FunctionDesc funcMeta = new FunctionDesc("sum", TestSum.class,
+        FunctionType.GENERAL,
+        CatalogUtil.newDataTypesWithoutLen(TajoDataTypes.Type.INT4),
+        CatalogUtil.newDataTypesWithoutLen(TajoDataTypes.Type.INT4, TajoDataTypes.Type.INT4));
+    catalog.registerFunction(funcMeta);
+
+    analyzer = new QueryAnalyzer(catalog);
+    planner = new LogicalPlanner(catalog);
+    
+    QueryBlock block;
+
+    block = (QueryBlock) analyzer.parse(TestEvalTree.QUERIES[0]).getParseTree();
+    expr1 = block.getWhereCondition();
+
+    block = (QueryBlock) analyzer.parse(TestEvalTree.QUERIES[1]).getParseTree();
+    expr2 = block.getWhereCondition();
+    
+    block = (QueryBlock) analyzer.parse(TestEvalTree.QUERIES[2]).getParseTree();
+    expr3 = block.getWhereCondition();
+  }
+
+  @AfterClass
+  public static void tearDown() throws Exception {
+    util.shutdownCatalogCluster();
+  }
+
+  @Test
+  public final void testChangeColumnRef() throws CloneNotSupportedException {
+    EvalNode copy = (EvalNode)expr1.clone();
+    EvalTreeUtil.changeColumnRef(copy, "people.score", "newscore");
+    Set<Column> set = EvalTreeUtil.findDistinctRefColumns(copy);
+    assertEquals(1, set.size());
+    assertTrue(set.contains(new Column("newscore", TajoDataTypes.Type.INT4)));
+    
+    copy = (EvalNode)expr2.clone();
+    EvalTreeUtil.changeColumnRef(copy, "people.age", "sum_age");
+    set = EvalTreeUtil.findDistinctRefColumns(copy);
+    assertEquals(2, set.size());
+    assertTrue(set.contains(new Column("people.score", TajoDataTypes.Type.INT4)));
+    assertTrue(set.contains(new Column("sum_age", TajoDataTypes.Type.INT4)));
+    
+    copy = (EvalNode)expr3.clone();
+    EvalTreeUtil.changeColumnRef(copy, "people.age", "sum_age");
+    set = EvalTreeUtil.findDistinctRefColumns(copy);
+    assertEquals(2, set.size());
+    assertTrue(set.contains(new Column("people.score", TajoDataTypes.Type.INT4)));
+    assertTrue(set.contains(new Column("sum_age", TajoDataTypes.Type.INT4)));
+  }
+
+  @Test
+  public final void testFindAllRefColumns() {    
+    Set<Column> set = EvalTreeUtil.findDistinctRefColumns(expr1);
+    assertEquals(1, set.size());
+    assertTrue(set.contains(new Column("people.score", TajoDataTypes.Type.INT4)));
+    
+    set = EvalTreeUtil.findDistinctRefColumns(expr2);
+    assertEquals(2, set.size());
+    assertTrue(set.contains(new Column("people.score", TajoDataTypes.Type.INT4)));
+    assertTrue(set.contains(new Column("people.age", TajoDataTypes.Type.INT4)));
+    
+    set = EvalTreeUtil.findDistinctRefColumns(expr3);
+    assertEquals(2, set.size());
+    assertTrue(set.contains(new Column("people.score", TajoDataTypes.Type.INT4)));
+    assertTrue(set.contains(new Column("people.age", TajoDataTypes.Type.INT4)));
+  }
+  
+  public static final String [] QUERIES = {
+    "select 3 + 4 as plus, (3.5 * 2) as mul", // 0
+    "select (score + 3) < 4, age > 5 from people", // 1
+    "select score from people where score > 7", // 2
+    "select score from people where (10 * 2) * (score + 2) > 20 + 30 + 10", // 3
+    "select score from people where 10 * 2 > score * 10", // 4
+    "select score from people where score < 10 and 4 < score", // 5
+    "select score from people where score < 10 and 4 < score and age > 5", // 6
+  };
+  
+  @Test
+  public final void testGetSchemaFromTargets() throws InternalException {
+    QueryBlock block = (QueryBlock) analyzer.parse(QUERIES[0]).getParseTree();
+    Schema schema = 
+        EvalTreeUtil.getSchemaByTargets(null, block.getTargetList());
+    Column col1 = schema.getColumn(0);
+    Column col2 = schema.getColumn(1);
+    assertEquals("plus", col1.getColumnName());
+    assertEquals(TajoDataTypes.Type.INT4, col1.getDataType().getType());
+    assertEquals("mul", col2.getColumnName());
+    assertEquals(TajoDataTypes.Type.FLOAT8, col2.getDataType().getType());
+  }
+  
+  @Test
+  public final void testGetContainExprs() throws CloneNotSupportedException {
+    QueryBlock block = (QueryBlock) analyzer.parse(QUERIES[1]).getParseTree();
+    Target [] targets = block.getTargetList();
+    
+    Column col1 = new Column("people.score", TajoDataTypes.Type.INT4);
+    Collection<EvalNode> exprs =
+        EvalTreeUtil.getContainExpr(targets[0].getEvalTree(), col1);
+    EvalNode node = exprs.iterator().next();
+    assertEquals(Type.LTH, node.getType());
+    assertEquals(Type.PLUS, node.getLeftExpr().getType());
+    assertEquals(new ConstEval(DatumFactory.createInt4(4)), node.getRightExpr());
+    
+    Column col2 = new Column("people.age", TajoDataTypes.Type.INT4);
+    exprs = EvalTreeUtil.getContainExpr(targets[1].getEvalTree(), col2);
+    node = exprs.iterator().next();
+    assertEquals(Type.GTH, node.getType());
+    assertEquals("people.age", node.getLeftExpr().getName());
+    assertEquals(new ConstEval(DatumFactory.createInt4(5)), node.getRightExpr());
+  }
+  
+  @Test
+  public final void testGetCNF() {
+    // "select score from people where score < 10 and 4 < score "
+    QueryBlock block = (QueryBlock) analyzer.parse(QUERIES[5]).getParseTree();
+    EvalNode node = block.getWhereCondition();
+    EvalNode [] cnf = EvalTreeUtil.getConjNormalForm(node);
+    
+    Column col1 = new Column("people.score", TajoDataTypes.Type.INT4);
+    
+    assertEquals(2, cnf.length);
+    EvalNode first = cnf[0];
+    EvalNode second = cnf[1];
+    
+    FieldEval field = (FieldEval) first.getLeftExpr();
+    assertEquals(col1, field.getColumnRef());
+    assertEquals(Type.LTH, first.getType());
+    EvalContext firstRCtx = first.getRightExpr().newContext();
+    first.getRightExpr().eval(firstRCtx, null,  null);
+    assertEquals(10, first.getRightExpr().terminate(firstRCtx).asInt4());
+    
+    field = (FieldEval) second.getRightExpr();
+    assertEquals(col1, field.getColumnRef());
+    assertEquals(Type.LTH, second.getType());
+    EvalContext secondLCtx = second.getLeftExpr().newContext();
+    second.getLeftExpr().eval(secondLCtx, null,  null);
+    assertEquals(4, second.getLeftExpr().terminate(secondLCtx).asInt4());
+  }
+  
+  @Test
+  public final void testTransformCNF2Singleton() {
+    // "select score from people where score < 10 and 4 < score "
+    QueryBlock block = (QueryBlock) analyzer.parse(QUERIES[6]).getParseTree();
+    EvalNode node = block.getWhereCondition();
+    EvalNode [] cnf1 = EvalTreeUtil.getConjNormalForm(node);
+    assertEquals(3, cnf1.length);
+    
+    EvalNode conj = EvalTreeUtil.transformCNF2Singleton(cnf1);
+    EvalNode [] cnf2 = EvalTreeUtil.getConjNormalForm(conj);
+    
+    Set<EvalNode> set1 = Sets.newHashSet(cnf1);
+    Set<EvalNode> set2 = Sets.newHashSet(cnf2);
+    assertEquals(set1, set2);
+  }
+  
+  @Test
+  public final void testSimplify() {
+    QueryBlock block = (QueryBlock) analyzer.parse(QUERIES[0]).getParseTree();
+    Target [] targets = block.getTargetList();
+    EvalNode node = AlgebraicUtil.simplify(targets[0].getEvalTree());
+    EvalContext nodeCtx = node.newContext();
+    assertEquals(Type.CONST, node.getType());
+    node.eval(nodeCtx, null, null);
+    assertEquals(7, node.terminate(nodeCtx).asInt4());
+    node = AlgebraicUtil.simplify(targets[1].getEvalTree());
+    assertEquals(Type.CONST, node.getType());
+    nodeCtx = node.newContext();
+    node.eval(nodeCtx, null, null);
+    assertTrue(7.0d == node.terminate(nodeCtx).asFloat8());
+
+    block = (QueryBlock) analyzer.parse(QUERIES[1]).getParseTree();
+    targets = block.getTargetList();
+    Column col1 = new Column("people.score", TajoDataTypes.Type.INT4);
+    Collection<EvalNode> exprs =
+        EvalTreeUtil.getContainExpr(targets[0].getEvalTree(), col1);
+    node = exprs.iterator().next();
+    System.out.println(AlgebraicUtil.simplify(node));
+  }
+  
+  @Test
+  public final void testConatainSingleVar() {
+    QueryBlock block = (QueryBlock) analyzer.parse(QUERIES[2]).getParseTree();
+    EvalNode node = block.getWhereCondition();
+    assertEquals(true, AlgebraicUtil.containSingleVar(node));
+    
+    block = (QueryBlock) analyzer.parse(QUERIES[3]).getParseTree();
+    node = block.getWhereCondition();
+    assertEquals(true, AlgebraicUtil.containSingleVar(node));
+  }
+  
+  @Test
+  public final void testTranspose() {
+    QueryBlock block = (QueryBlock) analyzer.parse(QUERIES[2]).getParseTree();
+    EvalNode node = block.getWhereCondition();
+    assertEquals(true, AlgebraicUtil.containSingleVar(node));
+    
+    Column col1 = new Column("people.score", TajoDataTypes.Type.INT4);
+    block = (QueryBlock) analyzer.parse(QUERIES[3]).getParseTree();
+    node = block.getWhereCondition();    
+    // we expect that score < 3
+    EvalNode transposed = AlgebraicUtil.transpose(node, col1);
+    assertEquals(Type.GTH, transposed.getType());
+    FieldEval field = (FieldEval) transposed.getLeftExpr(); 
+    assertEquals(col1, field.getColumnRef());
+    EvalContext evalCtx = transposed.getRightExpr().newContext();
+    transposed.getRightExpr().eval(evalCtx, null, null);
+    assertEquals(1, transposed.getRightExpr().terminate(evalCtx).asInt4());
+
+    block = (QueryBlock) analyzer.parse(QUERIES[4]).getParseTree();
+    node = block.getWhereCondition();    
+    // we expect that score < 3
+    transposed = AlgebraicUtil.transpose(node, col1);
+    assertEquals(Type.LTH, transposed.getType());
+    field = (FieldEval) transposed.getLeftExpr(); 
+    assertEquals(col1, field.getColumnRef());
+    evalCtx = transposed.getRightExpr().newContext();
+    transposed.getRightExpr().eval(evalCtx, null, null);
+    assertEquals(2, transposed.getRightExpr().terminate(evalCtx).asInt4());
+  }
+
+  @Test
+  public final void testFindDistinctAggFunctions() {
+
+    QueryBlock block = (QueryBlock) analyzer.parse(
+        "select sum(score) + max(age) from people").getParseTree();
+    List<AggFuncCallEval> list = EvalTreeUtil.
+        findDistinctAggFunction(block.getTargetList()[0].getEvalTree());
+    assertEquals(2, list.size());
+    Set<String> result = Sets.newHashSet("max", "sum");
+    for (AggFuncCallEval eval : list) {
+      assertTrue(result.contains(eval.getName()));
+    }
+  }
+}
\ No newline at end of file