You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tajo.apache.org by hy...@apache.org on 2014/01/07 03:23:22 UTC

[5/5] git commit: TAJO-476: Add a test development kit for unit tests based on executions of queries.

TAJO-476: Add a test development kit for unit tests based on executions of queries.


Project: http://git-wip-us.apache.org/repos/asf/incubator-tajo/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-tajo/commit/eaf0a585
Tree: http://git-wip-us.apache.org/repos/asf/incubator-tajo/tree/eaf0a585
Diff: http://git-wip-us.apache.org/repos/asf/incubator-tajo/diff/eaf0a585

Branch: refs/heads/master
Commit: eaf0a5854bf13c3b6a767af5394d4ad100e5dbe5
Parents: f825dce
Author: Hyunsik Choi <hy...@apache.org>
Authored: Tue Jan 7 11:18:59 2014 +0900
Committer: Hyunsik Choi <hy...@apache.org>
Committed: Tue Jan 7 11:18:59 2014 +0900

----------------------------------------------------------------------
 CHANGES.txt                                     |   5 +-
 .../tajo/catalog/statistics/StatisticsUtil.java |  29 --
 .../engine/planner/global/GlobalPlanner.java    |  57 ++-
 .../join/GreedyHeuristicJoinOrderAlgorithm.java |   6 +-
 .../tajo/master/DefaultTaskScheduler.java       |   4 +-
 .../org/apache/tajo/master/GlobalEngine.java    |   3 +-
 .../apache/tajo/master/querymaster/Query.java   |   3 +-
 .../tajo/master/querymaster/Repartitioner.java  |  59 +--
 .../tajo/master/querymaster/SubQuery.java       |  14 +-
 .../java/org/apache/tajo/QueryTestCaseBase.java | 393 +++++++++++++++++
 .../org/apache/tajo/benchmark/TestTPCH.java     |  79 +---
 .../org/apache/tajo/client/TestDDLBuilder.java  |   2 +-
 .../engine/function/TestBuiltinFunctions.java   | 162 ++-----
 .../tajo/engine/parser/TestHiveConverter.java   |  80 ++--
 .../tajo/engine/parser/TestSQLAnalyzer.java     | 100 ++---
 .../tajo/engine/planner/TestLogicalPlanner.java |   2 +-
 .../tajo/engine/query/TestCaseByCases.java      |  58 +--
 .../tajo/engine/query/TestCreateTable.java      |  40 ++
 .../engine/query/TestCreateTableStatement.java  |  57 ---
 .../tajo/engine/query/TestGroupByQuery.java     | 227 ++--------
 .../apache/tajo/engine/query/TestJoinQuery.java | 197 ++-------
 .../tajo/engine/query/TestSelectQuery.java      | 426 +++----------------
 .../apache/tajo/engine/query/TestSortQuery.java | 174 ++------
 .../tajo/engine/query/TestTablePartitions.java  |   2 +-
 .../tajo/engine/query/TestTableSubQuery.java    |  90 +---
 .../apache/tajo/master/TestGlobalPlanner.java   |   9 +-
 .../src/test/queries/complex_union_1.sql        |  29 --
 .../src/test/queries/complex_union_2.sql        |  35 --
 .../create_partitioned_table_as_select.sql      |   6 -
 .../src/test/queries/create_table_1.hiveql      |   1 -
 .../src/test/queries/create_table_1.sql         |   1 -
 .../src/test/queries/create_table_10.sql        |   1 -
 .../src/test/queries/create_table_11.hiveql     |   3 -
 .../src/test/queries/create_table_11.sql        |   3 -
 .../src/test/queries/create_table_12.hiveql     |   4 -
 .../src/test/queries/create_table_12.sql        |   3 -
 .../src/test/queries/create_table_2.hiveql      |   1 -
 .../src/test/queries/create_table_2.sql         |   1 -
 .../src/test/queries/create_table_3.sql         |   1 -
 .../src/test/queries/create_table_4.sql         |   1 -
 .../src/test/queries/create_table_5.sql         |   1 -
 .../src/test/queries/create_table_6.sql         |   1 -
 .../src/test/queries/create_table_7.sql         |   1 -
 .../src/test/queries/create_table_8.sql         |  48 ---
 .../src/test/queries/create_table_9.sql         |   1 -
 .../create_table_partition_by_column.sql        |   4 -
 .../create_table_partition_by_hash_1.sql        |   3 -
 .../create_table_partition_by_hash_2.sql        |   7 -
 .../queries/create_table_partition_by_list.sql  |   8 -
 .../queries/create_table_partition_by_range.sql |   9 -
 .../test/queries/create_table_various_types.sql |  48 ---
 .../src/test/queries/drop_table.sql             |   1 -
 .../src/test/queries/exists_predicate_1.sql     |   1 -
 .../src/test/queries/exists_predicate_2.sql     |   1 -
 .../src/test/queries/groupby_1.sql              |   1 -
 .../src/test/queries/in_subquery_1.sql          |   1 -
 .../src/test/queries/in_subquery_2.sql          |   1 -
 .../src/test/queries/insert_into_select_1.sql   |   1 -
 .../src/test/queries/insert_into_select_2.sql   |   1 -
 .../src/test/queries/insert_into_select_3.sql   |   1 -
 .../queries/insert_overwrite_into_select_1.sql  |   1 -
 .../insert_overwrite_into_select_2.hiveql       |   1 -
 .../queries/insert_overwrite_into_select_2.sql  |   1 -
 .../queries/insert_overwrite_into_select_3.sql  |   1 -
 .../src/test/queries/join_1.sql                 |   1 -
 .../src/test/queries/join_10.sql                |   1 -
 .../src/test/queries/join_11.sql                |   1 -
 .../src/test/queries/join_12.sql                |   1 -
 .../src/test/queries/join_13.sql                |  13 -
 .../src/test/queries/join_14.sql                |   1 -
 .../src/test/queries/join_15.hiveql             |   1 -
 .../src/test/queries/join_15.sql                |   1 -
 .../src/test/queries/join_2.sql                 |   1 -
 .../src/test/queries/join_3.sql                 |   1 -
 .../src/test/queries/join_4.sql                 |   1 -
 .../src/test/queries/join_5.sql                 |   1 -
 .../src/test/queries/join_6.sql                 |   1 -
 .../src/test/queries/join_7.sql                 |   1 -
 .../src/test/queries/join_8.sql                 |  13 -
 .../src/test/queries/join_9.sql                 |   5 -
 .../src/test/queries/select_1.sql               |   1 -
 .../src/test/queries/select_10.hiveql           |   5 -
 .../src/test/queries/select_10.sql              |   5 -
 .../src/test/queries/select_11.hiveql           |   4 -
 .../src/test/queries/select_11.sql              |   6 -
 .../src/test/queries/select_12.hiveql           |   3 -
 .../src/test/queries/select_13.hiveql           |   2 -
 .../src/test/queries/select_13.sql              |   2 -
 .../src/test/queries/select_14.sql              |   2 -
 .../src/test/queries/select_2.sql               |   1 -
 .../src/test/queries/select_3.sql               |   1 -
 .../src/test/queries/select_4.sql               |   1 -
 .../src/test/queries/select_5.sql               |   1 -
 .../src/test/queries/select_6.sql               |   1 -
 .../src/test/queries/select_7.sql               |   1 -
 .../src/test/queries/select_8.sql               |   1 -
 .../src/test/queries/select_9.hiveql            |   4 -
 .../src/test/queries/select_9.sql               |   4 -
 .../src/test/queries/set_1.sql                  |   1 -
 .../src/test/queries/set_2.sql                  |   1 -
 .../src/test/queries/set_3.sql                  |   1 -
 .../src/test/queries/set_4.sql                  |  36 --
 .../src/test/queries/table_subquery1.sql        |  19 -
 .../src/test/queries/table_subquery2.sql        |  21 -
 .../src/test/queries/tajo415_case.sql           |  33 --
 .../src/test/queries/tajo418_case.sql           |  29 --
 .../src/test/queries/tpch_q2_simplified.sql     |  20 -
 .../src/test/queries/union_1.hiveql             |  14 -
 .../dataset/TestCreateTable/table1.tbl          |   3 +
 .../TestBuiltinFunctions/testAvgDouble.sql      |   1 +
 .../queries/TestBuiltinFunctions/testAvgInt.sql |   1 +
 .../TestBuiltinFunctions/testAvgLong.sql        |   1 +
 .../queries/TestBuiltinFunctions/testCount.sql  |   1 +
 .../TestBuiltinFunctions/testMaxLong.sql        |   1 +
 .../TestBuiltinFunctions/testMinLong.sql        |   1 +
 .../queries/TestBuiltinFunctions/testRandom.sql |   1 +
 .../TestBuiltinFunctions/testSplitPart.sql      |   1 +
 .../testSplitPartByString.sql                   |   1 +
 .../testSplitPartNested.sql                     |   1 +
 .../queries/TestCaseByCases/testTAJO415Case.sql |  33 ++
 .../queries/TestCaseByCases/testTAJO418Case.sql |  29 ++
 .../create_table_various_types.sql              |  48 +++
 .../queries/TestCreateTable/table1_ddl.sql      |   1 +
 .../TestGroupByQuery/testComplexParameter.sql   |   1 +
 .../TestGroupByQuery/testComplexParameter2.sql  |   1 +
 .../testComplexParameterWithSubQuery.sql        |   6 +
 .../TestGroupByQuery/testCountDistinct.sql      |   1 +
 .../TestGroupByQuery/testCountDistinct2.sql     |   1 +
 .../queries/TestGroupByQuery/testGroupBy.sql    |   1 +
 .../queries/TestGroupByQuery/testGroupBy2.sql   |   1 +
 .../queries/TestGroupByQuery/testGroupBy3.sql   |   1 +
 .../queries/TestGroupByQuery/testGroupBy4.sql   |   1 +
 .../testHavingWithAggFunction.sql               |   1 +
 .../testHavingWithNamedTarget.sql               |   1 +
 .../queries/TestJoinQuery/testCrossJoin.sql     |   1 +
 .../testCrossJoinWithExplicitJoinQual.sql       |   1 +
 .../TestJoinQuery/testFullOuterJoin1.sql        |   1 +
 .../TestJoinQuery/testJoinAndCaseWhen.sql       |  15 +
 .../queries/TestJoinQuery/testJoinRefEval.sql   |   1 +
 .../TestJoinQuery/testLeftOuterJoin1.sql        |   1 +
 .../TestJoinQuery/testRightOuterJoin1.sql       |   1 +
 .../queries/TestJoinQuery/testTPCHQ2Join.sql    |  20 +
 .../queries/TestSelectQuery/testCaseWhen.sql    |  11 +
 .../TestSelectQuery/testCaseWhenWithoutElse.sql |  10 +
 .../TestSelectQuery/testCreateAfterSelect.sql   |   1 +
 .../queries/TestSelectQuery/testInClause.sql    |   1 +
 .../queries/TestSelectQuery/testInStrClause.sql |   1 +
 .../queries/TestSelectQuery/testLikeClause.sql  |   1 +
 .../queries/TestSelectQuery/testLimit.sql       |   1 +
 .../queries/TestSelectQuery/testNotEqual.sql    |   1 +
 .../queries/TestSelectQuery/testNotInClause.sql |   1 +
 .../TestSelectQuery/testNotInStrClause.sql      |   1 +
 .../TestSelectQuery/testRealValueCompare.sql    |   1 +
 .../queries/TestSelectQuery/testSelect.sql      |   1 +
 .../queries/TestSelectQuery/testSelect2.sql     |   1 +
 .../queries/TestSelectQuery/testSelect3.sql     |   1 +
 .../TestSelectQuery/testSelectAsterik.sql       |   1 +
 .../TestSelectQuery/testSelectDistinct.sql      |   7 +
 .../TestSelectQuery/testStringCompare.sql       |   1 +
 .../queries/TestSelectQuery/testUnion1.sql      |  19 +
 .../queries/TestSelectQuery/testUnion2.sql      |   9 +
 .../queries/TestSelectQuery/testWhereCond1.sql  |   1 +
 .../TestSelectQuery/testWhereCondWithAlias1.sql |   1 +
 .../TestSelectQuery/testWhereCondWithAlias2.sql |   1 +
 .../queries/TestSortQuery/testSort.sql          |   1 +
 .../TestSortQuery/testSortAfterGroupby.sql      |   1 +
 .../testSortAfterGroupbyWithAlias.sql           |   1 +
 .../queries/TestSortQuery/testSortDesc.sql      |   1 +
 .../testSortWithAliasButOriginalName.sql        |   1 +
 .../TestSortQuery/testSortWithAliasKey.sql      |   1 +
 .../queries/TestSortQuery/testTopK.sql          |   1 +
 .../queries/TestTPCH/testQ1OrderBy.sql          |  12 +
 .../queries/TestTPCH/testQ2FourJoins.sql        |   8 +
 .../queries/TestTPCH/testTPCH14Expr.sql         |   7 +
 .../TestTableSubQuery/testGroupBySubQuery.sql   |   1 +
 .../TestTableSubQuery/testJoinSubQuery.sql      |   5 +
 .../TestTableSubQuery/testJoinSubQuery2.sql     |   4 +
 .../TestTableSubQuery/testTableSubquery1.sql    |   1 +
 .../queries/default/complex_union_1.sql         |  29 ++
 .../queries/default/complex_union_2.sql         |  35 ++
 .../create_partitioned_table_as_select.sql      |   6 +
 .../queries/default/create_table_1.hiveql       |   1 +
 .../queries/default/create_table_1.sql          |   1 +
 .../queries/default/create_table_10.sql         |   1 +
 .../queries/default/create_table_11.hiveql      |   3 +
 .../queries/default/create_table_11.sql         |   3 +
 .../queries/default/create_table_12.hiveql      |   4 +
 .../queries/default/create_table_12.sql         |   3 +
 .../queries/default/create_table_2.hiveql       |   1 +
 .../queries/default/create_table_2.sql          |   1 +
 .../queries/default/create_table_3.sql          |   1 +
 .../queries/default/create_table_4.sql          |   1 +
 .../queries/default/create_table_5.sql          |   1 +
 .../queries/default/create_table_6.sql          |   1 +
 .../queries/default/create_table_7.sql          |   1 +
 .../queries/default/create_table_8.sql          |  48 +++
 .../queries/default/create_table_9.sql          |   1 +
 .../create_table_partition_by_column.sql        |   4 +
 .../create_table_partition_by_hash_1.sql        |   3 +
 .../create_table_partition_by_hash_2.sql        |   7 +
 .../default/create_table_partition_by_list.sql  |   8 +
 .../default/create_table_partition_by_range.sql |   9 +
 .../resources/queries/default/drop_table.sql    |   1 +
 .../queries/default/exists_predicate_1.sql      |   1 +
 .../queries/default/exists_predicate_2.sql      |   1 +
 .../resources/queries/default/groupby_1.sql     |   1 +
 .../resources/queries/default/in_subquery_1.sql |   1 +
 .../resources/queries/default/in_subquery_2.sql |   1 +
 .../queries/default/insert_into_select_1.sql    |   1 +
 .../queries/default/insert_into_select_2.sql    |   1 +
 .../queries/default/insert_into_select_3.sql    |   1 +
 .../default/insert_overwrite_into_select_1.sql  |   1 +
 .../insert_overwrite_into_select_2.hiveql       |   1 +
 .../default/insert_overwrite_into_select_2.sql  |   1 +
 .../default/insert_overwrite_into_select_3.sql  |   1 +
 .../test/resources/queries/default/join_1.sql   |   1 +
 .../test/resources/queries/default/join_10.sql  |   1 +
 .../test/resources/queries/default/join_11.sql  |   1 +
 .../test/resources/queries/default/join_12.sql  |   1 +
 .../test/resources/queries/default/join_13.sql  |  13 +
 .../test/resources/queries/default/join_14.sql  |   1 +
 .../resources/queries/default/join_15.hiveql    |   1 +
 .../test/resources/queries/default/join_15.sql  |   1 +
 .../test/resources/queries/default/join_2.sql   |   1 +
 .../test/resources/queries/default/join_3.sql   |   1 +
 .../test/resources/queries/default/join_4.sql   |   1 +
 .../test/resources/queries/default/join_5.sql   |   1 +
 .../test/resources/queries/default/join_6.sql   |   1 +
 .../test/resources/queries/default/join_7.sql   |   1 +
 .../test/resources/queries/default/join_8.sql   |  13 +
 .../test/resources/queries/default/join_9.sql   |   5 +
 .../test/resources/queries/default/select_1.sql |   1 +
 .../resources/queries/default/select_10.hiveql  |   5 +
 .../resources/queries/default/select_10.sql     |   5 +
 .../resources/queries/default/select_11.hiveql  |   4 +
 .../resources/queries/default/select_11.sql     |   6 +
 .../resources/queries/default/select_12.hiveql  |   3 +
 .../resources/queries/default/select_13.hiveql  |   2 +
 .../resources/queries/default/select_13.sql     |   2 +
 .../resources/queries/default/select_14.sql     |   2 +
 .../test/resources/queries/default/select_2.sql |   1 +
 .../test/resources/queries/default/select_3.sql |   1 +
 .../test/resources/queries/default/select_4.sql |   1 +
 .../test/resources/queries/default/select_5.sql |   1 +
 .../test/resources/queries/default/select_6.sql |   1 +
 .../test/resources/queries/default/select_7.sql |   1 +
 .../test/resources/queries/default/select_8.sql |   1 +
 .../resources/queries/default/select_9.hiveql   |   4 +
 .../test/resources/queries/default/select_9.sql |   4 +
 .../test/resources/queries/default/set_1.sql    |   1 +
 .../test/resources/queries/default/set_2.sql    |   1 +
 .../test/resources/queries/default/set_3.sql    |   1 +
 .../test/resources/queries/default/set_4.sql    |  36 ++
 .../queries/default/table_subquery1.sql         |  19 +
 .../queries/default/table_subquery2.sql         |  21 +
 .../resources/queries/default/union_1.hiveql    |  14 +
 .../TestBuiltinFunctions/testAvgDouble.result   |   5 +
 .../TestBuiltinFunctions/testAvgInt.result      |   3 +
 .../TestBuiltinFunctions/testAvgLong.result     |   3 +
 .../TestBuiltinFunctions/testCount.result       |   3 +
 .../TestBuiltinFunctions/testMaxLong.result     |   3 +
 .../TestBuiltinFunctions/testMinLong.result     |   3 +
 .../TestBuiltinFunctions/testRandom.result      |   1 +
 .../TestBuiltinFunctions/testSplitPart.result   |   7 +
 .../testSplitPartByString.result                |   7 +
 .../testSplitPartNested.result                  |   7 +
 .../TestCaseByCases/testTAJO415Case.result      |   7 +
 .../TestCaseByCases/testTAJO418Case.result      |   3 +
 .../testComplexParameter.result                 |   3 +
 .../testComplexParameter2.result                |   3 +
 .../testComplexParameterWithSubQuery.result     |   3 +
 .../TestGroupByQuery/testCountDistinct.result   |   5 +
 .../TestGroupByQuery/testCountDistinct2.result  |   5 +
 .../results/TestGroupByQuery/testGroupBy.result |   3 +
 .../TestGroupByQuery/testGroupBy2.result        |   4 +
 .../TestGroupByQuery/testGroupBy3.result        |   5 +
 .../TestGroupByQuery/testGroupBy4.result        |   5 +
 .../testHavingWithAggFunction.result            |   4 +
 .../testHavingWithNamedTarget.result            |   5 +
 .../results/TestJoinQuery/testCrossJoin.result  | 127 ++++++
 .../testCrossJoinWithExplicitJoinQual.result    |  27 ++
 .../TestJoinQuery/testFullOuterJoin1.result     |   7 +
 .../TestJoinQuery/testJoinAndCaseWhen.result    |  27 ++
 .../TestJoinQuery/testJoinRefEval.result        |  27 ++
 .../TestJoinQuery/testLeftOuterJoin1.result     |   7 +
 .../TestJoinQuery/testRightOuterJoin1.result    |   7 +
 .../results/TestJoinQuery/testTPCHQ2Join.result |   5 +
 .../results/TestSelectQuery/testCaseWhen.result |   7 +
 .../testCaseWhenWithoutElse.result              |   7 +
 .../results/TestSelectQuery/testInClause.result |   5 +
 .../TestSelectQuery/testInStrClause.result      |   4 +
 .../TestSelectQuery/testLikeClause.result       |   9 +
 .../results/TestSelectQuery/testLimit.result    |   5 +
 .../results/TestSelectQuery/testNotEqual.result |   5 +
 .../TestSelectQuery/testNotInClause.result      |   4 +
 .../TestSelectQuery/testNotInStrClause.result   |   4 +
 .../TestSelectQuery/testRealValueCompare.result |   3 +
 .../results/TestSelectQuery/testSelect.result   |   7 +
 .../results/TestSelectQuery/testSelect2.result  |   7 +
 .../results/TestSelectQuery/testSelect3.result  |   7 +
 .../TestSelectQuery/testSelectAsterik.result    |   7 +
 .../TestSelectQuery/testSelectDistinct.result   |   7 +
 .../TestSelectQuery/testStringCompare.result    |   5 +
 .../results/TestSelectQuery/testUnion1.result   |  10 +
 .../results/TestSelectQuery/testUnion2.result   |  12 +
 .../TestSelectQuery/testWhereCond1.result       |   3 +
 .../testWhereCondWithAlias1.result              |   4 +
 .../testWhereCondWithAlias2.result              |   4 +
 .../results/TestSortQuery/testSort.result       |   7 +
 .../TestSortQuery/testSortAfterGroupby.result   |   5 +
 .../testSortAfterGroupbyWithAlias.result        |   5 +
 .../results/TestSortQuery/testSortDesc.result   |   7 +
 .../testSortWithAliasButOriginalName.result     |   7 +
 .../TestSortQuery/testSortWithAliasKey.result   |   7 +
 .../results/TestSortQuery/testTopK.result       |   5 +
 .../results/TestTPCH/testQ1OrderBy.result       |   4 +
 .../results/TestTPCH/testQ2FourJoins.result     |   3 +
 .../results/TestTPCH/testTPCH14Expr.result      |   3 +
 .../testGroupBySubQuery.result                  |   3 +
 .../TestTableSubQuery/testJoinSubQuery.result   |   4 +
 .../TestTableSubQuery/testJoinSubQuery2.result  |   4 +
 .../TestTableSubQuery/testTableSubquery1.result |   7 +
 .../test/resources/results/testBuildDDL.result  |   5 +
 .../src/test/results/testBuildDDL.result        |   5 -
 324 files changed, 2039 insertions(+), 1942 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/eaf0a585/CHANGES.txt
----------------------------------------------------------------------
diff --git a/CHANGES.txt b/CHANGES.txt
index 169be3f..1efed2c 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -110,7 +110,10 @@ Release 0.8.0 - unreleased
 
   IMPROVEMENTS
 
-    TAJO-464: Rename the name 'partition', actually meaning shuffle to 
+    TAJO-476: Add a test development kit for unit tests based on executions
+    of queries. (hyunsik)
+
+    TAJO-464: Rename the name 'partition', actually meaning shuffle to
     'shuffle'. (hyunsik)
 
     TAJO-385: Refactoring TaskScheduler to assign multiple fragments. (jihoon)

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/eaf0a585/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/statistics/StatisticsUtil.java
----------------------------------------------------------------------
diff --git a/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/statistics/StatisticsUtil.java b/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/statistics/StatisticsUtil.java
index 8e411b5..b836818 100644
--- a/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/statistics/StatisticsUtil.java
+++ b/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/statistics/StatisticsUtil.java
@@ -22,8 +22,6 @@ import com.google.common.collect.Lists;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import java.util.Collection;
-import java.util.Iterator;
 import java.util.List;
 
 public class StatisticsUtil {
@@ -102,31 +100,4 @@ public class StatisticsUtil {
 
     return aggregated;
   }
-
-  public static TableStats computeStatFromUnionBlock(Collection<TableStats> stats) {
-    TableStats stat = new TableStats();
-    TableStats childStat;
-    long avgRows = 0, numBytes = 0, numRows = 0;
-    int numBlocks = 0, numPartitions = 0;
-    List<ColumnStats> columnStatses = Lists.newArrayList();
-
-    Iterator<TableStats> it = stats.iterator();
-    while (it.hasNext()) {
-      childStat = it.next();
-      avgRows += childStat.getAvgRows();
-      columnStatses.addAll(childStat.getColumnStats());
-      numBlocks += childStat.getNumBlocks();
-      numBytes += childStat.getNumBytes();
-      numPartitions += childStat.getNumShuffleOutputs();
-      numRows += childStat.getNumRows();
-    }
-
-    stat.setColumnStats(columnStatses);
-    stat.setNumBlocks(numBlocks);
-    stat.setNumBytes(numBytes);
-    stat.setNumShuffleOutputs(numPartitions);
-    stat.setNumRows(numRows);
-    stat.setAvgRows(avgRows);
-    return stat;
-  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/eaf0a585/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/engine/planner/global/GlobalPlanner.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/engine/planner/global/GlobalPlanner.java b/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/engine/planner/global/GlobalPlanner.java
index 2b3ecea..d05ac46 100644
--- a/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/engine/planner/global/GlobalPlanner.java
+++ b/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/engine/planner/global/GlobalPlanner.java
@@ -314,24 +314,44 @@ public class GlobalPlanner {
     ExecutionBlock currentBlock;
 
     SortNode firstSortNode = PlannerUtil.clone(context.plan.getLogicalPlan(), currentNode);
-    LogicalNode childBlockPlan = childBlock.getPlan();
-    firstSortNode.setChild(childBlockPlan);
-    // sort is a non-projectable operator. So, in/out schemas are the same to its child operator.
-    firstSortNode.setInSchema(childBlockPlan.getOutSchema());
-    firstSortNode.setOutSchema(childBlockPlan.getOutSchema());
-    childBlock.setPlan(firstSortNode);
 
-    currentBlock = masterPlan.newExecutionBlock();
-    DataChannel channel = new DataChannel(childBlock, currentBlock, RANGE_SHUFFLE, 32);
-    channel.setShuffleKeys(PlannerUtil.sortSpecsToSchema(currentNode.getSortKeys()).toArray());
-    channel.setSchema(firstSortNode.getOutSchema());
-    channel.setStoreType(storeType);
+    if (firstSortNode.getChild().getType() == NodeType.TABLE_SUBQUERY &&
+        ((TableSubQueryNode)firstSortNode.getChild()).getSubQuery().getType() == NodeType.UNION) {
 
-    ScanNode secondScan = buildInputExecutor(masterPlan.getLogicalPlan(), channel);
-    currentNode.setChild(secondScan);
-    currentNode.setInSchema(secondScan.getOutSchema());
-    currentBlock.setPlan(currentNode);
-    masterPlan.addConnect(channel);
+      currentBlock = childBlock;
+      for (DataChannel channel : masterPlan.getIncomingChannels(childBlock.getId())) {
+        channel.setShuffle(RANGE_SHUFFLE, PlannerUtil.sortSpecsToSchema(currentNode.getSortKeys()).toArray(), 32);
+        channel.setSchema(firstSortNode.getOutSchema());
+
+        ExecutionBlock subBlock = masterPlan.getExecBlock(channel.getSrcId());
+        SortNode s1 = PlannerUtil.clone(context.plan.getLogicalPlan(), firstSortNode);
+        s1.setChild(subBlock.getPlan());
+        subBlock.setPlan(s1);
+
+        ScanNode secondScan = buildInputExecutor(masterPlan.getLogicalPlan(), channel);
+        currentNode.setChild(secondScan);
+        currentNode.setInSchema(secondScan.getOutSchema());
+        currentBlock.setPlan(currentNode);
+      }
+    } else {
+      LogicalNode childBlockPlan = childBlock.getPlan();
+      firstSortNode.setChild(childBlockPlan);
+      // sort is a non-projectable operator. So, in/out schemas are the same to its child operator.
+      firstSortNode.setInSchema(childBlockPlan.getOutSchema());
+      firstSortNode.setOutSchema(childBlockPlan.getOutSchema());
+      childBlock.setPlan(firstSortNode);
+
+      currentBlock = masterPlan.newExecutionBlock();
+      DataChannel channel = new DataChannel(childBlock, currentBlock, RANGE_SHUFFLE, 32);
+      channel.setShuffleKeys(PlannerUtil.sortSpecsToSchema(currentNode.getSortKeys()).toArray());
+      channel.setSchema(firstSortNode.getOutSchema());
+
+      ScanNode secondScan = buildInputExecutor(masterPlan.getLogicalPlan(), channel);
+      currentNode.setChild(secondScan);
+      currentNode.setInSchema(secondScan.getOutSchema());
+      currentBlock.setPlan(currentNode);
+      masterPlan.addConnect(channel);
+    }
 
     return currentBlock;
   }
@@ -360,7 +380,7 @@ public class GlobalPlanner {
     // 2. create a new execution block, pipeline 2 exec blocks through a DataChannel
     MasterPlan masterPlan = context.plan;
     ExecutionBlock currentBlock = masterPlan.newExecutionBlock();
-    DataChannel channel = null;
+    DataChannel channel;
     CatalogProtos.PartitionsType partitionsType = partitionDesc.getPartitionsType();
     if(partitionsType == CatalogProtos.PartitionsType.COLUMN) {
       channel = new DataChannel(childBlock, currentBlock, HASH_SHUFFLE, 32);
@@ -389,8 +409,7 @@ public class GlobalPlanner {
     @Override
     public LogicalNode visitRoot(GlobalPlanContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
                                  LogicalRootNode node, Stack<LogicalNode> stack) throws PlanningException {
-      LogicalNode child = super.visitRoot(context, plan, block, node, stack);
-      return child;
+      return super.visitRoot(context, plan, block, node, stack);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/eaf0a585/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/engine/planner/logical/join/GreedyHeuristicJoinOrderAlgorithm.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/engine/planner/logical/join/GreedyHeuristicJoinOrderAlgorithm.java b/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/engine/planner/logical/join/GreedyHeuristicJoinOrderAlgorithm.java
index cbdad1a..1b2e4d9 100644
--- a/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/engine/planner/logical/join/GreedyHeuristicJoinOrderAlgorithm.java
+++ b/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/engine/planner/logical/join/GreedyHeuristicJoinOrderAlgorithm.java
@@ -27,7 +27,7 @@ import org.apache.tajo.engine.planner.PlanningException;
 import org.apache.tajo.engine.planner.logical.*;
 import org.apache.tajo.engine.utils.SchemaUtil;
 
-import java.util.HashSet;
+import java.util.LinkedHashSet;
 import java.util.Set;
 
 /**
@@ -43,7 +43,9 @@ public class GreedyHeuristicJoinOrderAlgorithm implements JoinOrderAlgorithm {
                                       Set<String> relationsWithoutQual) throws PlanningException {
 
     // Setup a remain relation set to be joined
-    Set<LogicalNode> remainRelations = new HashSet<LogicalNode>();
+    // Why we should use LinkedHashSet? - it should keep the deterministic for the order of joins.
+    // Otherwise, join orders can be different even if join costs are the same to each other.
+    Set<LogicalNode> remainRelations = new LinkedHashSet<LogicalNode>();
     for (RelationNode relation : block.getRelations()) {
       remainRelations.add(relation);
     }

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/eaf0a585/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/DefaultTaskScheduler.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/DefaultTaskScheduler.java b/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/DefaultTaskScheduler.java
index 5c986ec..82788fd 100644
--- a/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/DefaultTaskScheduler.java
+++ b/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/DefaultTaskScheduler.java
@@ -126,7 +126,9 @@ public class DefaultTaskScheduler extends AbstractTaskScheduler {
   @Override
   public void stop() {
     stopEventHandling = true;
-    schedulingThread.interrupt();
+    if (schedulingThread != null) {
+      schedulingThread.interrupt();
+    }
 
     // Return all of request callbacks instantly.
     for (TaskRequestEvent req : taskRequests.taskRequestQueue) {

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/eaf0a585/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/GlobalEngine.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/GlobalEngine.java b/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/GlobalEngine.java
index 7336f2f..ed7828e 100644
--- a/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/GlobalEngine.java
+++ b/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/GlobalEngine.java
@@ -276,6 +276,7 @@ public class GlobalEngine extends AbstractService {
     }
 
     if(!fs.exists(path)) {
+      LOG.error("ERROR: " + path.toUri() + " does not exist");
       throw new IOException("ERROR: " + path.toUri() + " does not exist");
     }
 
@@ -284,7 +285,7 @@ public class GlobalEngine extends AbstractService {
     try {
       totalSize = sm.calculateSize(path);
     } catch (IOException e) {
-      LOG.error("Cannot calculate the size of the relation", e);
+      LOG.warn("Cannot calculate the size of the relation", e);
     }
 
     TableStats stats = new TableStats();

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/eaf0a585/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/querymaster/Query.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/querymaster/Query.java b/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/querymaster/Query.java
index f8c335b..22c3a35 100644
--- a/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/querymaster/Query.java
+++ b/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/querymaster/Query.java
@@ -307,8 +307,7 @@ public class Query implements EventHandler<QueryEvent> {
           SubQuery nextSubQuery = new SubQuery(query.context, query.getPlan(), nextBlock, query.sm);
           nextSubQuery.setPriority(query.priority--);
           query.addSubQuery(nextSubQuery);
-          nextSubQuery.handle(new SubQueryEvent(nextSubQuery.getId(),
-              SubQueryEventType.SQ_INIT));
+          nextSubQuery.handle(new SubQueryEvent(nextSubQuery.getId(), SubQueryEventType.SQ_INIT));
           LOG.info("Scheduling SubQuery:" + nextSubQuery.getId());
           if(LOG.isDebugEnabled()) {
             LOG.debug("Scheduling SubQuery's Priority: " + nextSubQuery.getPriority());

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/eaf0a585/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/querymaster/Repartitioner.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/querymaster/Repartitioner.java b/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/querymaster/Repartitioner.java
index 2eed5d8..0f4a62b 100644
--- a/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/querymaster/Repartitioner.java
+++ b/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/querymaster/Repartitioner.java
@@ -252,37 +252,45 @@ public class Repartitioner {
   }
 
   public static void scheduleFragmentsForNonLeafTasks(TaskSchedulerContext schedulerContext,
-                                                      MasterPlan masterPlan, SubQuery subQuery, SubQuery childSubQuery,
-                                                      DataChannel channel, int maxNum)
+                                                      MasterPlan masterPlan, SubQuery subQuery, int maxNum)
       throws InternalException {
+    DataChannel channel = masterPlan.getIncomingChannels(subQuery.getBlock().getId()).get(0);
     if (channel.getShuffleType() == HASH_SHUFFLE) {
       scheduleHashShuffledFetches(schedulerContext, masterPlan, subQuery, channel, maxNum);
     } else if (channel.getShuffleType() == RANGE_SHUFFLE) {
-      scheduleRangeShuffledFetches(schedulerContext, subQuery, childSubQuery, channel, maxNum);
+      scheduleRangeShuffledFetches(schedulerContext, masterPlan, subQuery, channel, maxNum);
     } else {
       throw new InternalException("Cannot support partition type");
     }
   }
 
-  public static void scheduleRangeShuffledFetches(TaskSchedulerContext schedulerContext, SubQuery subQuery,
-                                                  SubQuery childSubQuery, DataChannel channel, int maxNum)
-      throws InternalException {
-    ExecutionBlock execBlock = subQuery.getBlock();
-    TableStats stat = childSubQuery.getTableStat();
-    if (stat.getNumRows() == 0) {
-      return;
+  private static TableStats computeChildBlocksStats(QueryMasterTask.QueryMasterTaskContext context, MasterPlan masterPlan,
+                                                    ExecutionBlockId parentBlockId) {
+    List<TableStats> tableStatses = new ArrayList<TableStats>();
+    List<ExecutionBlock> childBlocks = masterPlan.getChilds(parentBlockId);
+    for (ExecutionBlock childBlock : childBlocks) {
+      SubQuery childExecSM = context.getSubQuery(childBlock.getId());
+      tableStatses.add(childExecSM.getTableStat());
     }
+    return StatisticsUtil.aggregateTableStat(tableStatses);
+  }
 
+  public static void scheduleRangeShuffledFetches(TaskSchedulerContext schedulerContext, MasterPlan masterPlan,
+                                                  SubQuery subQuery, DataChannel channel, int maxNum)
+      throws InternalException {
+    ExecutionBlock execBlock = subQuery.getBlock();
     ScanNode scan = execBlock.getScanNodes()[0];
     Path tablePath;
     tablePath = subQuery.getContext().getStorageManager().getTablePath(scan.getTableName());
 
-    SortNode sortNode = PlannerUtil.findTopNode(childSubQuery.getBlock().getPlan(), NodeType.SORT);
+    ExecutionBlock sampleChildBlock = masterPlan.getChild(subQuery.getId(), 0);
+    SortNode sortNode = PlannerUtil.findTopNode(sampleChildBlock.getPlan(), NodeType.SORT);
     SortSpec [] sortSpecs = sortNode.getSortKeys();
     Schema sortSchema = new Schema(channel.getShuffleKeys());
 
     // calculate the number of maximum query ranges
-    TupleRange mergedRange = TupleUtil.columnStatToRange(channel.getSchema(), sortSchema, stat.getColumnStats());
+    TableStats totalStat = computeChildBlocksStats(subQuery.getContext(), masterPlan, subQuery.getId());
+    TupleRange mergedRange = TupleUtil.columnStatToRange(channel.getSchema(), sortSchema, totalStat.getColumnStats());
     RangePartitionAlgorithm partitioner = new UniformRangePartition(sortSchema, mergedRange);
     BigDecimal card = partitioner.getTotalCardinality();
 
@@ -305,12 +313,14 @@ public class Repartitioner {
     SubQuery.scheduleFragment(subQuery, dummyFragment);
 
     List<String> basicFetchURIs = new ArrayList<String>();
-
-    for (QueryUnit qu : childSubQuery.getQueryUnits()) {
-      for (IntermediateEntry p : qu.getIntermediateData()) {
-        String uri = createBasicFetchUri(p.getPullHost(), p.getPullPort(),
-            childSubQuery.getId(), p.taskId, p.attemptId);
-        basicFetchURIs.add(uri);
+    List<ExecutionBlock> childBlocks = masterPlan.getChilds(subQuery.getId());
+    for (ExecutionBlock childBlock : childBlocks) {
+      SubQuery childExecSM = subQuery.getContext().getSubQuery(childBlock.getId());
+      for (QueryUnit qu : childExecSM.getQueryUnits()) {
+        for (IntermediateEntry p : qu.getIntermediateData()) {
+          String uri = createBasicFetchUri(p.getPullHost(), p.getPullPort(), childBlock.getId(), p.taskId, p.attemptId);
+          basicFetchURIs.add(uri);
+        }
       }
     }
 
@@ -345,7 +355,7 @@ public class Repartitioner {
   }
 
   public static void scheduleFetchesByRoundRobin(SubQuery subQuery, Map<?, Set<URI>> partitions,
-                                                 String tableName, int num) {
+                                                   String tableName, int num) {
     int i;
     Map<String, List<URI>>[] fetchesArray = new Map[num];
     for (i = 0; i < num; i++) {
@@ -381,14 +391,7 @@ public class Repartitioner {
                                                  SubQuery subQuery, DataChannel channel,
                                                  int maxNum) {
     ExecutionBlock execBlock = subQuery.getBlock();
-
-    List<TableStats> tableStatses = new ArrayList<TableStats>();
-    List<ExecutionBlock> childBlocks = masterPlan.getChilds(subQuery.getId());
-    for (ExecutionBlock childBlock : childBlocks) {
-      SubQuery childExecSM = subQuery.getContext().getSubQuery(childBlock.getId());
-      tableStatses.add(childExecSM.getTableStat());
-    }
-    TableStats totalStat = StatisticsUtil.computeStatFromUnionBlock(tableStatses);
+    TableStats totalStat = computeChildBlocksStats(subQuery.getContext(), masterPlan, subQuery.getId());
 
     if (totalStat.getNumRows() == 0) {
       return;
@@ -406,7 +409,7 @@ public class Repartitioner {
     Map<String, List<IntermediateEntry>> hashedByHost;
     Map<Integer, List<URI>> finalFetchURI = new HashMap<Integer, List<URI>>();
 
-    for (ExecutionBlock block : childBlocks) {
+    for (ExecutionBlock block : masterPlan.getChilds(execBlock)) {
       List<IntermediateEntry> partitions = new ArrayList<IntermediateEntry>();
       for (QueryUnit tasks : subQuery.getContext().getSubQuery(block.getId()).getQueryUnits()) {
         if (tasks.getIntermediateData() != null) {

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/eaf0a585/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/querymaster/SubQuery.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/querymaster/SubQuery.java b/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/querymaster/SubQuery.java
index 3e44959..91c403d 100644
--- a/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/querymaster/SubQuery.java
+++ b/tajo-core/tajo-core-backend/src/main/java/org/apache/tajo/master/querymaster/SubQuery.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.Priority;
 import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.event.Event;
 import org.apache.hadoop.yarn.event.EventHandler;
 import org.apache.hadoop.yarn.state.*;
 import org.apache.hadoop.yarn.util.Records;
@@ -77,7 +78,7 @@ public class SubQuery implements EventHandler<SubQueryEvent> {
   private Schema schema;
   private TableMeta meta;
   private TableStats statistics;
-  private EventHandler eventHandler;
+  private EventHandler<Event> eventHandler;
   private final AbstractStorageManager sm;
   private AbstractTaskScheduler taskScheduler;
   private QueryMasterTask.QueryMasterTaskContext context;
@@ -233,7 +234,7 @@ public class SubQuery implements EventHandler<SubQueryEvent> {
     return masterPlan.getOutgoingChannels(getId()).iterator().next();
   }
 
-  public EventHandler getEventHandler() {
+  public EventHandler<Event> getEventHandler() {
     return eventHandler;
   }
 
@@ -666,11 +667,10 @@ public class SubQuery implements EventHandler<SubQueryEvent> {
         Repartitioner.scheduleFragmentsForJoinQuery(subQuery.schedulerContext, subQuery);
       } else { // Case 3: Others (Sort or Aggregation)
         int numTasks = getNonLeafTaskNum(subQuery);
-        ExecutionBlockId childId = masterPlan.getChilds(subQuery.getBlock()).get(0).getId();
-        SubQuery child = subQuery.context.getSubQuery(childId);
-        DataChannel channel = masterPlan.getChannel(child.getId(), subQuery.getId());
-        Repartitioner.scheduleFragmentsForNonLeafTasks(subQuery.schedulerContext, masterPlan, subQuery, child,
-            channel, numTasks);
+//        ExecutionBlockId childId = masterPlan.getChilds(subQuery.getBlock()).get(0).getId();
+//        SubQuery child = subQuery.context.getSubQuery(childId);
+//        DataChannel channel = masterPlan.getChannel(child.getId(), subQuery.getId());
+        Repartitioner.scheduleFragmentsForNonLeafTasks(subQuery.schedulerContext, masterPlan, subQuery, numTasks);
       }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/eaf0a585/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/QueryTestCaseBase.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/QueryTestCaseBase.java b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/QueryTestCaseBase.java
new file mode 100644
index 0000000..fd280c7
--- /dev/null
+++ b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/QueryTestCaseBase.java
@@ -0,0 +1,393 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo;
+
+import com.google.protobuf.ServiceException;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.tajo.algebra.CreateTable;
+import org.apache.tajo.algebra.DropTable;
+import org.apache.tajo.algebra.Expr;
+import org.apache.tajo.algebra.OpType;
+import org.apache.tajo.client.TajoClient;
+import org.apache.tajo.conf.TajoConf;
+import org.apache.tajo.engine.parser.SQLAnalyzer;
+import org.apache.tajo.storage.StorageUtil;
+import org.apache.tajo.util.FileUtil;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.rules.TestName;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URL;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.util.HashSet;
+import java.util.Set;
+
+import static org.junit.Assert.*;
+
+/**
+ * (Note that this class is not thread safe. Do not execute maven test in any parallel mode.)
+ * <br />
+ * <code>QueryTestCaseBase</code> provides useful methods to easily execute queries and verify their results.
+ *
+ * This class basically uses four resource directories:
+ * <ul>
+ *   <li>src/test/resources/dataset - contains a set of data files. It contains sub directories, each of which
+ *   corresponds each test class. All data files in each sub directory can be used in the corresponding test class.</li>
+ *
+ *   <li>src/test/resources/queries - This is the query directory. It contains sub directories, each of which
+ *   corresponds each test class. All query files in each sub directory can be used in the corresponding test
+ *   class.</li>
+ *
+ *   <li>src/test/resources/results - This is the result directory. It contains sub directories, each of which
+ *   corresponds each test class. All result files in each sub directory can be used in the corresponding test class.
+ *   </li>
+ * </ul>
+ *
+ * For example, if you create a test class named <code>TestJoinQuery</code>, you should create a pair of query and
+ * result set directories as follows:
+ *
+ * <pre>
+ *   src-|
+ *       |- resources
+ *             |- dataset
+ *             |     |- TestJoinQuery
+ *             |              |- table1.tbl
+ *             |              |- table2.tbl
+ *             |
+ *             |- queries
+ *             |     |- TestJoinQuery
+ *             |              |- TestInnerJoin.sql
+ *             |              |- table1_ddl.sql
+ *             |              |- table2_ddl.sql
+ *             |
+ *             |- results
+ *                   |- TestJoinQuery
+ *                            |- TestInnerJoin.result
+ * </pre>
+ *
+ * <code>QueryTestCaseBase</code> basically provides the following methods:
+ * <ul>
+ *  <li><code>{@link #executeQuery()}</code> - executes a corresponding query and returns an ResultSet instance</li>
+ *  <li><code>{@link #executeQuery(String)}</code> - executes a given query file included in the corresponding query
+ *  file in the current class's query directory</li>
+ *  <li><code>assertResultSet()</code> - check if the query result is equivalent to the expected result included
+ *  in the corresponding result file in the current class's result directory.</li>
+ *  <li><code>cleanQuery()</code> - clean up all resources</li>
+ *  <li><code>executeDDL()</code> - execute a DDL query like create or drop table.</li>
+ * </ul>
+ *
+ * In order to make use of the above methods, query files and results file must be as follows:
+ * <ul>
+ *  <li>Each query file must be located on the subdirectory whose structure must be src/resources/queries/${ClassName},
+ *  where ${ClassName} indicates an actual test class's simple name.</li>
+ *  <li>Each result file must be located on the subdirectory whose structure must be src/resources/results/${ClassName},
+ *  where ${ClassName} indicates an actual test class's simple name.</li>
+ * </ul>
+ *
+ * Especially, {@link #executeQuery() and {@link #assertResultSet(java.sql.ResultSet)} methods automatically finds
+ * a query file to be executed and a result to be compared, which are corresponding to the running class and method.
+ * For them, query and result files additionally must be follows as:
+ * <ul>
+ *  <li>Each result file must have the file extension '.result'</li>
+ *  <li>Each query file must have the file extension '.sql'.</li>
+ * </ul>
+ */
+public class QueryTestCaseBase {
+
+  protected static final TpchTestBase testingCluster;
+  protected static TajoConf conf;
+  protected static TajoClient client;
+  protected static SQLAnalyzer sqlParser = new SQLAnalyzer();
+
+  /** the base path of dataset directories */
+  protected static final Path datasetBasePath;
+  /** the base path of query directories */
+  protected static final Path queryBasePath;
+  /** the base path of result directories */
+  protected static final Path resultBasePath;
+
+  static {
+    testingCluster = TpchTestBase.getInstance();
+    conf = testingCluster.getTestingCluster().getConfiguration();
+    URL datasetBaseURL = ClassLoader.getSystemResource("dataset");
+    datasetBasePath = new Path(datasetBaseURL.toString());
+    URL queryBaseURL = ClassLoader.getSystemResource("queries");
+    queryBasePath = new Path(queryBaseURL.toString());
+    URL resultBaseURL = ClassLoader.getSystemResource("results");
+    resultBasePath = new Path(resultBaseURL.toString());
+  }
+
+  /** It transiently contains created tables for the running test class. */
+  private static Set<String> createdTableSet = new HashSet<String>();
+  // queries and results directory corresponding to subclass class.
+  private Path currentQueryPath;
+  private Path currentResultPath;
+  private Path currentDatasetPath;
+
+  // for getting a method name
+  @Rule public TestName name= new TestName();
+
+  @BeforeClass
+  public static void setUpClass() throws IOException {
+    conf = testingCluster.getTestingCluster().getConfiguration();
+    client = new TajoClient(conf);
+  }
+
+  @AfterClass
+  public static void tearDownClass() throws ServiceException {
+    for (String tableName : createdTableSet) {
+      client.dropTable(tableName, false);
+    }
+    createdTableSet.clear();
+    client.close();
+  }
+
+  @Before
+  public void setUp() {
+    String className = getClass().getSimpleName();
+    currentQueryPath = new Path(queryBasePath, className);
+    currentResultPath = new Path(resultBasePath, className);
+    currentDatasetPath = new Path(datasetBasePath, className);
+  }
+
+  /**
+   * Execute a query contained in the file located in src/test/resources/results/<i>ClassName</i>/<i>MethodName</i>.
+   * <i>ClassName</i> and <i>MethodName</i> will be replaced by actual executed class and methods.
+   *
+   * @return ResultSet of query execution.
+   */
+  public ResultSet executeQuery() throws Exception {
+    return executeQuery(name.getMethodName() + ".sql");
+  }
+
+  /**
+   * Execute a query contained in the given named file. This methods tries to find the given file within the directory
+   * src/test/resources/results/<i>ClassName</i>.
+   *
+   * @param queryFileName The file name to be used to execute a query.
+   * @return ResultSet of query execution.
+   */
+  public ResultSet executeQuery(String queryFileName) throws Exception {
+    Path queryFilePath = getQueryFilePath(queryFileName);
+    FileSystem fs = currentQueryPath.getFileSystem(testingCluster.getTestingCluster().getConfiguration());
+    assertTrue(queryFilePath.toString() + " existence check", fs.exists(queryFilePath));
+    ResultSet result = testingCluster.execute(FileUtil.readTextFile(new File(queryFilePath.toUri())));
+    assertNotNull("Query succeeded test", result);
+    return result;
+  }
+
+  /**
+   * Assert the equivalence between the expected result and an actual query result.
+   * If it isn't it throws an AssertionError.
+   *
+   * @param result Query result to be compared.
+   */
+  public final void assertResultSet(ResultSet result) throws IOException {
+    assertResultSet("Result Verification", result, name.getMethodName() + ".result");
+  }
+
+  /**
+   * Assert the equivalence between the expected result and an actual query result.
+   * If it isn't it throws an AssertionError.
+   *
+   * @param result Query result to be compared.
+   * @param resultFileName The file name containing the result to be compared
+   */
+  public final void assertResultSet(ResultSet result, String resultFileName) throws IOException {
+    assertResultSet("Result Verification", result, resultFileName);
+  }
+
+  /**
+   * Assert the equivalence between the expected result and an actual query result.
+   * If it isn't it throws an AssertionError with the given message.
+   *
+   * @param message message The message to printed if the assertion is failed.
+   * @param result Query result to be compared.
+   */
+  public final void assertResultSet(String message, ResultSet result, String resultFileName) throws IOException {
+    FileSystem fs = currentQueryPath.getFileSystem(testingCluster.getTestingCluster().getConfiguration());
+    Path resultFile = getResultFile(resultFileName);
+    assertTrue(resultFile.toString() + " existence check", fs.exists(resultFile));
+    try {
+      verifyResult(message, result, resultFile);
+    } catch (SQLException e) {
+      throw new IOException(e);
+    }
+  }
+
+  /**
+   * Release all resources
+   *
+   * @param resultSet ResultSet
+   */
+  public final void cleanupQuery(ResultSet resultSet) throws IOException {
+    try {
+      resultSet.close();
+    } catch (SQLException e) {
+      throw new IOException(e);
+    }
+  }
+
+  public void assertTableExists(String tableName) throws ServiceException {
+    assertTrue(client.existTable(tableName));
+  }
+
+  /**
+   * It transforms a ResultSet instance to rows represented as strings.
+   *
+   * @param resultSet ResultSet that contains a query result
+   * @return String
+   * @throws SQLException
+   */
+  public String resultSetToString(ResultSet resultSet) throws SQLException {
+    StringBuilder sb = new StringBuilder();
+    ResultSetMetaData rsmd = resultSet.getMetaData();
+    int numOfColumns = rsmd.getColumnCount();
+
+    for (int i = 1; i <= numOfColumns; i++) {
+      if (i > 1) sb.append(",");
+      String columnName = rsmd.getColumnName(i);
+      sb.append(columnName);
+    }
+    sb.append("\n-------------------------------\n");
+
+    while (resultSet.next()) {
+      for (int i = 1; i <= numOfColumns; i++) {
+        if (i > 1) sb.append(",");
+        String columnValue = resultSet.getObject(i).toString();
+        sb.append(columnValue);
+      }
+      sb.append("\n");
+    }
+    return sb.toString();
+  }
+
+  private void verifyResult(String message, ResultSet res, Path resultFile) throws SQLException, IOException {
+    String actualResult = resultSetToString(res);
+    String expectedResult = FileUtil.readTextFile(new File(resultFile.toUri()));
+    assertEquals(message, expectedResult.trim(), actualResult.trim());
+  }
+
+  private Path getQueryFilePath(String fileName) {
+    return StorageUtil.concatPath(currentQueryPath, fileName);
+  }
+
+  private Path getResultFile(String fileName) {
+    return StorageUtil.concatPath(currentResultPath, fileName);
+  }
+
+  private Path getDataSetFile(String fileName) {
+    return StorageUtil.concatPath(currentDatasetPath, fileName);
+  }
+
+  /**
+   *
+   * Execute a data definition language (DDL) template. A general SQL DDL statement can be included in this file. But,
+   * for user-specified table name or exact external table path, you must use some format string to indicate them.
+   * The format string will be replaced by the corresponding arguments.
+   *
+   * The below is predefined format strings:
+   * <ul>
+   *   <li>${table.path} - It is replaced by the absolute file path that <code>dataFileName</code> points. </li>
+   *   <li>${i} - It is replaced by the corresponding element of <code>args</code>. For example, ${0} and ${1} are
+   *   replaced by the first and second elements of <code>args</code> respectively</li>. It uses zero-based index.
+   * </ul>
+   *
+   * @param ddlFileName A file name, containing a data definition statement.
+   * @param dataFileName A file name, containing data rows, which columns have to be separated by vertical bar '|'.
+   *                     This file name is used for replacing some format string indicating an external table location.
+   * @param args A list of arguments, each of which is used to replace corresponding variable which has a form of ${i}.
+   * @return The table name created
+   */
+  public String executeDDL(String ddlFileName, String dataFileName, String ... args) throws Exception {
+    return executeDDL(ddlFileName, dataFileName, true, args);
+  }
+
+  private String executeDDL(String ddlFileName, String dataFileName, boolean isLocalTable, String ... args)
+      throws Exception {
+
+    Path ddlFilePath = new Path(currentQueryPath, ddlFileName);
+    FileSystem fs = ddlFilePath.getFileSystem(conf);
+    assertTrue(ddlFilePath + " existence check", fs.exists(ddlFilePath));
+
+    String template = FileUtil.readTextFile(new File(ddlFilePath.toUri()));
+    String dataFilePath = null;
+    if (dataFileName != null) {
+      dataFilePath = getDataSetFile(dataFileName).toString();
+    }
+    String compiled = compileTemplate(template, dataFilePath, args);
+
+    // parse a statement
+    Expr expr = sqlParser.parse(compiled);
+    assertNotNull(ddlFilePath + " cannot be parsed", expr);
+
+    String tableName = null;
+    if (expr.getType() == OpType.CreateTable) {
+      CreateTable createTable = (CreateTable) expr;
+      tableName = createTable.getTableName();
+      client.updateQuery(compiled);
+      assertTrue("table '" + tableName  + "' creation check", client.existTable(tableName));
+      if (isLocalTable) {
+        createdTableSet.add(tableName);
+      }
+    } else if (expr.getType() == OpType.DropTable) {
+      DropTable dropTable = (DropTable) expr;
+      tableName = dropTable.getTableName();
+      assertTrue("table '" + tableName + "' existence check", client.existTable(tableName));
+      client.updateQuery(compiled);
+      assertFalse("table '" + tableName + "' dropped check", client.existTable(tableName));
+      if (isLocalTable) {
+        createdTableSet.remove(tableName);
+      }
+    } else {
+      assertTrue(ddlFilePath + " is not a Create or Drop Table statement", false);
+    }
+
+    return tableName;
+  }
+
+  /**
+   * Replace format strings by a given parameters.
+   *
+   * @param template
+   * @param dataFileName The data file name to replace <code>${table.path}</code>
+   * @param args The list argument to replace each corresponding format string ${i}. ${i} uses zero-based index.
+   * @return A string compiled
+   */
+  private String compileTemplate(String template, String dataFileName, String... args) {
+    String result;
+    if (dataFileName != null) {
+      result = template.replace("${table.path}", "\'" + dataFileName + "'");
+    } else {
+      result = template;
+    }
+
+    for (int i = 0; i < args.length; i++) {
+      result = result.replace("${" + i + "}", args[i]);
+    }
+    return result;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/eaf0a585/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/benchmark/TestTPCH.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/benchmark/TestTPCH.java b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/benchmark/TestTPCH.java
index b61dc46..f0a0812 100644
--- a/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/benchmark/TestTPCH.java
+++ b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/benchmark/TestTPCH.java
@@ -18,90 +18,33 @@
 
 package org.apache.tajo.benchmark;
 
-import com.google.common.collect.Maps;
 import org.apache.tajo.IntegrationTest;
-import org.apache.tajo.TpchTestBase;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
+import org.apache.tajo.QueryTestCaseBase;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import java.io.IOException;
 import java.sql.ResultSet;
-import java.util.Map;
-
-import static org.junit.Assert.*;
 
 @Category(IntegrationTest.class)
-public class TestTPCH {
-  private static TpchTestBase tpch;
-
-  @BeforeClass
-  public static void setUp() throws Exception {
-    tpch = TpchTestBase.getInstance();
-  }
-
-  @AfterClass
-  public static void tearDown() throws IOException {
-  }
-
-  /**
-   * it verifies NTA-788.
-   */
+public class TestTPCH extends QueryTestCaseBase {
   @Test
   public void testQ1OrderBy() throws Exception {
-    ResultSet res = tpch.execute("select l_returnflag, l_linestatus, count(*) as count_order from lineitem " +
-        "group by l_returnflag, l_linestatus order by l_returnflag, l_linestatus");
-
-    try {
-      Map<String,Integer> result = Maps.newHashMap();
-      result.put("NO", 3);
-      result.put("RF", 2);
-
-      assertNotNull(res);
-      assertTrue(res.next());
-      assertTrue(result.get(res.getString(1) + res.getString(2)) == res.getInt(3));
-      assertTrue(res.next());
-      assertTrue(result.get(res.getString(1) + res.getString(2)) == res.getInt(3));
-      assertFalse(res.next());
-    } finally {
-      res.close();
-    }
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
   }
 
   @Test
   public void testQ2FourJoins() throws Exception {
-    ResultSet res = tpch.execute(
-        "select s_acctbal, s_name, n_name, p_partkey, p_mfgr, s_address, s_phone, s_comment, ps_supplycost, " +
-            "r_name, p_type, p_size " +
-            "from region join nation on n_regionkey = r_regionkey and r_name = 'AMERICA' " +
-            "join supplier on s_nationkey = n_nationkey " +
-            "join partsupp on s_suppkey = ps_suppkey " +
-            "join part on p_partkey = ps_partkey and p_type like '%BRASS' and p_size = 15");
-
-    try {
-      assertTrue(res.next());
-      assertEquals("AMERICA", res.getString(10));
-      String [] pType = res.getString(11).split(" ");
-      assertEquals("BRASS", pType[pType.length - 1]);
-      assertEquals(15, res.getInt(12));
-      assertFalse(res.next());
-    } finally {
-      res.close();
-    }
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
   }
 
   @Test
   public void testTPCH14Expr() throws Exception {
-    ResultSet res = tpch.execute("select 100 * sum(" +
-        "case when p_type like 'PROMO%' then l_extendedprice else 0.0 end) / sum(l_extendedprice * (1 - l_discount)) "
-        + "as promo_revenue from lineitem, part where l_partkey = p_partkey");
-
-    try {
-      assertTrue(res.next());
-      assertEquals(33, res.getInt(1));
-    } finally {
-      res.close();
-    }
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/eaf0a585/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/client/TestDDLBuilder.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/client/TestDDLBuilder.java b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/client/TestDDLBuilder.java
index f91e8ec..7f9d15c 100644
--- a/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/client/TestDDLBuilder.java
+++ b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/client/TestDDLBuilder.java
@@ -43,6 +43,6 @@ public class TestDDLBuilder {
 
     TableDesc desc = new TableDesc("table1", schema, meta, new Path("/table1"));
 
-    assertEquals(FileUtil.readTextFile(new File("src/test/results/testBuildDDL.result")), DDLBuilder.buildDDL(desc));
+    assertEquals(FileUtil.readTextFile(new File("src/test/resources/results/testBuildDDL.result")), DDLBuilder.buildDDL(desc));
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/eaf0a585/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/engine/function/TestBuiltinFunctions.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/engine/function/TestBuiltinFunctions.java b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/engine/function/TestBuiltinFunctions.java
index 1b354ba..a688323 100644
--- a/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/engine/function/TestBuiltinFunctions.java
+++ b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/engine/function/TestBuiltinFunctions.java
@@ -18,180 +18,86 @@
 
 package org.apache.tajo.engine.function;
 
-import com.google.common.collect.Maps;
 import org.apache.tajo.IntegrationTest;
-import org.apache.tajo.TpchTestBase;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
+import org.apache.tajo.QueryTestCaseBase;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import java.io.IOException;
 import java.sql.ResultSet;
-import java.util.Map;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertTrue;
 
 @Category(IntegrationTest.class)
-public class TestBuiltinFunctions {
-  static TpchTestBase tpch;
-
-  @BeforeClass
-  public static void setUp() throws Exception {
-    tpch = TpchTestBase.getInstance();
-  }
-
-  @AfterClass
-  public static void tearDown() throws IOException {
-  }
-
+public class TestBuiltinFunctions extends QueryTestCaseBase {
   @Test
   public void testMaxLong() throws Exception {
-    ResultSet res = tpch.execute("select max(l_orderkey) as total_max from lineitem");
-    try {
-      res.next();
-      assertEquals(3, res.getInt(1));
-    } finally {
-      res.close();
-    }
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
   }
 
   @Test
   public void testMinLong() throws Exception {
-    ResultSet res = tpch.execute("select min(l_orderkey) as total_min from lineitem");
-    try {
-      res.next();
-      assertEquals(1, res.getInt(1));
-    } finally {
-      res.close();
-    }
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
   }
 
   @Test
   public void testCount() throws Exception {
-    ResultSet res = tpch.execute("select count(*) as rownum from lineitem");
-    try {
-      res.next();
-      assertEquals(5, res.getInt(1));
-    } finally {
-      res.close();
-    }
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
   }
 
   @Test
   public void testAvgDouble() throws Exception {
-    Map<Long, Float> result = Maps.newHashMap();
-    result.put(1l, 0.065f);
-    result.put(2l, 0.0f);
-    result.put(3l, 0.08f);
-
-    ResultSet res = tpch.execute("select l_orderkey, avg(l_discount) as revenue from lineitem group by l_orderkey");
-
-    try {
-      while(res.next()) {
-        assertTrue(result.get(res.getLong(1)) == res.getFloat(2));
-      }
-    } finally {
-      res.close();
-    }
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
   }
 
   @Test
   public void testAvgLong() throws Exception {
-    ResultSet res = tpch.execute("select avg(l_orderkey) as total_avg from lineitem");
-    try {
-      res.next();
-      assertEquals(2, res.getLong(1));
-    } finally {
-      res.close();
-    }
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
   }
 
   @Test
   public void testAvgInt() throws Exception {
-    ResultSet res = tpch.execute("select avg(l_partkey) as total_avg from lineitem");
-    try {
-      res.next();
-      System.out.println(res.getFloat(1));
-      assertTrue(1.8f == res.getFloat(1));
-    } finally {
-      res.close();
-    }
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
   }
 
   @Test
   public void testRandom() throws Exception {
-    ResultSet res = tpch.execute("select l_orderkey, random(3) as rndnum from lineitem group by l_orderkey, rndnum");
-
-    try {
-      while(res.next()) {
-        assertTrue(res.getInt(2) >= 0 && res.getInt(2) < 3);
-      }
-    } finally {
-      res.close();
+    ResultSet res = executeQuery();
+    while(res.next()) {
+      assertTrue(res.getInt(2) >= 0 && res.getInt(2) < 3);
     }
+    cleanupQuery(res);
   }
 
   @Test
   public void testSplitPart() throws Exception {
-    ResultSet res = tpch.execute("select split_part(l_shipinstruct, ' ', 1) from lineitem");
-
-    String [] result ={
-      "DELIVER",
-      "TAKE",
-      "TAKE",
-      "NONE",
-      "TAKE"
-    };
-
-    for (int i = 0; i < result.length; i++) {
-      assertTrue(res.next());
-      assertEquals(result[i], res.getString(1));
-    }
-    assertFalse(res.next());
-
-    res.close();
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
   }
 
   @Test
   public void testSplitPartByString() throws Exception {
-    ResultSet res = tpch.execute("select split_part(l_shipinstruct, 'KE', 1) from lineitem");
-
-    String [] result ={
-        "DELIVER IN PERSON",
-        "TA",
-        "TA",
-        "NONE",
-        "TA"
-    };
-
-    for (int i = 0; i < result.length; i++) {
-      assertTrue(res.next());
-      assertEquals(result[i], res.getString(1));
-    }
-    assertFalse(res.next());
-
-    res.close();
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
   }
 
   @Test
   public void testSplitPartNested() throws Exception {
-    ResultSet res = tpch.execute("select split_part(split_part(l_shipinstruct, ' ', 1), 'A', 2) from lineitem");
-
-    String [] result ={
-        "",
-        "KE",
-        "KE",
-        "",
-        "KE"
-    };
-
-    for (int i = 0; i < result.length; i++) {
-      assertTrue(res.next());
-      assertEquals(result[i], res.getString(1));
-    }
-    assertFalse(res.next());
-
-    res.close();
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/eaf0a585/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/engine/parser/TestHiveConverter.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/engine/parser/TestHiveConverter.java b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/engine/parser/TestHiveConverter.java
index 2e44701..2573391 100644
--- a/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/engine/parser/TestHiveConverter.java
+++ b/tajo-core/tajo-core-backend/src/test/java/org/apache/tajo/engine/parser/TestHiveConverter.java
@@ -85,7 +85,7 @@ public class TestHiveConverter {
 
   @Test
   public void testSelect1() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/select_1.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/select_1.sql"));
     Expr expr = parseQuery(sql);
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
@@ -93,7 +93,7 @@ public class TestHiveConverter {
 
   @Test
   public void testSelect3() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/select_3.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/select_3.sql"));
     Expr expr = parseQuery(sql);
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
@@ -101,7 +101,7 @@ public class TestHiveConverter {
 
   @Test
   public void testSelect4() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/select_4.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/select_4.sql"));
     Expr expr = parseQuery(sql);
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
@@ -109,7 +109,7 @@ public class TestHiveConverter {
 
   @Test
   public void testSelect5() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/select_5.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/select_5.sql"));
     Expr expr = parseQuery(sql);
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
@@ -117,7 +117,7 @@ public class TestHiveConverter {
 
   @Test
   public void testSelect7() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/select_7.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/select_7.sql"));
     Expr expr = parseQuery(sql);
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
@@ -125,7 +125,7 @@ public class TestHiveConverter {
 
   @Test
   public void testSelect8() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/select_8.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/select_8.sql"));
     Expr expr = parseQuery(sql);
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
@@ -133,34 +133,34 @@ public class TestHiveConverter {
 
   @Test
   public void testSelect9() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/select_9.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/select_9.sql"));
     Expr expr = parseQuery(sql);
-    sql = FileUtil.readTextFile(new File("src/test/queries/select_9.hiveql"));
+    sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/select_9.hiveql"));
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
   }
 
   @Test
   public void testSelect10() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/select_10.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/select_10.sql"));
     Expr expr = parseQuery(sql);
-    sql = FileUtil.readTextFile(new File("src/test/queries/select_10.hiveql"));
+    sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/select_10.hiveql"));
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
   }
 
   //@Test
   public void testSelect11() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/select_11.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/select_11.sql"));
     Expr expr = parseQuery(sql);
-    sql = FileUtil.readTextFile(new File("src/test/queries/select_11.hiveql"));
+    sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/select_11.hiveql"));
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
   }
 
   @Test
   public void testSelect12() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/select_12.hiveql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/select_12.hiveql"));
     Expr expr = parseQuery(sql);
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
@@ -168,16 +168,16 @@ public class TestHiveConverter {
 
   @Test
   public void testSelect13() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/select_13.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/select_13.sql"));
     Expr expr = parseQuery(sql);
-    sql = FileUtil.readTextFile(new File("src/test/queries/select_13.hiveql"));
+    sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/select_13.hiveql"));
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
   }
 
   @Test
   public void testSelect14() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/select_14.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/select_14.sql"));
     Expr expr = parseQuery(sql);
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
@@ -185,7 +185,7 @@ public class TestHiveConverter {
 
   @Test
   public void testGroupby1() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/groupby_1.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/groupby_1.sql"));
     Expr expr = parseQuery(sql);
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
@@ -193,7 +193,7 @@ public class TestHiveConverter {
 
   @Test
   public void testJoin2() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/join_2.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/join_2.sql"));
     Expr expr = parseQuery(sql);
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
@@ -201,7 +201,7 @@ public class TestHiveConverter {
 
   @Test
   public void testJoin5() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/join_5.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/join_5.sql"));
     Expr expr = parseQuery(sql);
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
@@ -209,7 +209,7 @@ public class TestHiveConverter {
 
   @Test
   public void testJoin6() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/join_6.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/join_6.sql"));
     Expr expr = parseQuery(sql);
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
@@ -217,7 +217,7 @@ public class TestHiveConverter {
 
   @Test
   public void testJoin7() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/join_7.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/join_7.sql"));
     Expr expr = parseQuery(sql);
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
@@ -225,7 +225,7 @@ public class TestHiveConverter {
 
   //@Test
   public void testJoin9() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/join_9.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/join_9.sql"));
     Expr expr = parseQuery(sql);
     Expr hiveExpr = parseHiveQL(sql);
     assertEquals(expr, hiveExpr);
@@ -233,7 +233,7 @@ public class TestHiveConverter {
 
   @Test
   public void testJoin12() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/join_12.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/join_12.sql"));
     Expr expr = parseQuery(sql);
     Expr hiveExpr = parseHiveQL(sql);
     assertEquals(expr, hiveExpr);
@@ -241,7 +241,7 @@ public class TestHiveConverter {
 
   @Test
   public void testJoin13() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/join_13.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/join_13.sql"));
     Expr expr = parseQuery(sql);
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
@@ -249,7 +249,7 @@ public class TestHiveConverter {
 
   @Test
   public void testJoin14() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/join_14.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/join_14.sql"));
     Expr expr = parseQuery(sql);
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
@@ -257,16 +257,16 @@ public class TestHiveConverter {
 
   @Test
   public void testJoin15() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/join_15.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/join_15.sql"));
     Expr expr = parseQuery(sql);
-    sql = FileUtil.readTextFile(new File("src/test/queries/join_15.hiveql"));
+    sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/join_15.hiveql"));
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
   }
 
   @Test
   public void testUnion1() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/union_1.hiveql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/union_1.hiveql"));
     Expr expr = parseQuery(sql);
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
@@ -274,7 +274,7 @@ public class TestHiveConverter {
 
   @Test
   public void testInsert1() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/insert_into_select_1.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/insert_into_select_1.sql"));
     Expr expr = parseQuery(sql);
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
@@ -283,52 +283,52 @@ public class TestHiveConverter {
 
   @Test
   public void testInsert2() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/insert_overwrite_into_select_2.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/insert_overwrite_into_select_2.sql"));
     Expr expr = parseQuery(sql);
-    sql = FileUtil.readTextFile(new File("src/test/queries/insert_overwrite_into_select_2.hiveql"));
+    sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/insert_overwrite_into_select_2.hiveql"));
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
   }
 
   @Test
   public void testCreate1() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/create_table_1.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/create_table_1.sql"));
     Expr expr = parseQuery(sql);
-    sql = FileUtil.readTextFile(new File("src/test/queries/create_table_1.hiveql"));
+    sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/create_table_1.hiveql"));
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
   }
 
   @Test
   public void testCreate2() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/create_table_2.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/create_table_2.sql"));
     Expr expr = parseQuery(sql);
-    sql = FileUtil.readTextFile(new File("src/test/queries/create_table_2.hiveql"));
+    sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/create_table_2.hiveql"));
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
   }
 
   @Test
   public void testCreate11() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/create_table_11.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/create_table_11.sql"));
     Expr expr = parseQuery(sql);
-    sql = FileUtil.readTextFile(new File("src/test/queries/create_table_11.hiveql"));
+    sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/create_table_11.hiveql"));
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
   }
 
   @Test
   public void testCreate12() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/create_table_12.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/create_table_12.sql"));
     Expr expr = parseQuery(sql);
-    sql = FileUtil.readTextFile(new File("src/test/queries/create_table_12.hiveql"));
+    sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/create_table_12.hiveql"));
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);
   }
 
   @Test
   public void testDrop() throws IOException {
-    String sql = FileUtil.readTextFile(new File("src/test/queries/drop_table.sql"));
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/drop_table.sql"));
     Expr expr = parseQuery(sql);
     Expr hiveExpr = parseHiveQL(sql);
     compareJsonResult(expr, hiveExpr);