You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tajo.apache.org by hy...@apache.org on 2013/05/09 07:38:37 UTC

[05/11] TAJO-57: Recognize Parser and Catalog Standard SQL data types. (hyunsik)

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/c1c6f83e/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestBNLJoinExec.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestBNLJoinExec.java b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestBNLJoinExec.java
index ea74a8c..8ade9cf 100644
--- a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestBNLJoinExec.java
+++ b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestBNLJoinExec.java
@@ -25,8 +25,8 @@ import org.junit.Test;
 import tajo.TajoTestingCluster;
 import tajo.TaskAttemptContext;
 import tajo.catalog.*;
-import tajo.catalog.proto.CatalogProtos.DataType;
 import tajo.catalog.proto.CatalogProtos.StoreType;
+import tajo.common.TajoDataTypes.Type;
 import tajo.conf.TajoConf;
 import tajo.datum.Datum;
 import tajo.datum.DatumFactory;
@@ -71,48 +71,48 @@ public class TestBNLJoinExec {
     sm = StorageManager.get(conf, testDir);
 
     Schema schema = new Schema();
-    schema.addColumn("managerId", DataType.INT);
-    schema.addColumn("empId", DataType.INT);
-    schema.addColumn("memId", DataType.INT);
-    schema.addColumn("deptName", DataType.STRING);
+    schema.addColumn("managerId", Type.INT4);
+    schema.addColumn("empId", Type.INT4);
+    schema.addColumn("memId", Type.INT4);
+    schema.addColumn("deptName", Type.TEXT);
 
-    TableMeta employeeMeta = TCatUtil.newTableMeta(schema, StoreType.CSV);
+    TableMeta employeeMeta = CatalogUtil.newTableMeta(schema, StoreType.CSV);
     Path employeePath = new Path(testDir, "employee.csv");
     Appender appender = StorageManager.getAppender(conf, employeeMeta, employeePath);
     appender.init();
     Tuple tuple = new VTuple(employeeMeta.getSchema().getColumnNum());
     for (int i = 0; i < OUTER_TUPLE_NUM; i++) {
-      tuple.put(new Datum[] { DatumFactory.createInt(i),
-          DatumFactory.createInt(i), DatumFactory.createInt(10 + i),
-          DatumFactory.createString("dept_" + i) });
+      tuple.put(new Datum[] { DatumFactory.createInt4(i),
+          DatumFactory.createInt4(i), DatumFactory.createInt4(10 + i),
+          DatumFactory.createText("dept_" + i) });
       appender.addTuple(tuple);
     }
     appender.flush();
     appender.close();
-    employee = TCatUtil.newTableDesc("employee", employeeMeta, employeePath);
+    employee = CatalogUtil.newTableDesc("employee", employeeMeta, employeePath);
     catalog.addTable(employee);
 
     Schema peopleSchema = new Schema();
-    peopleSchema.addColumn("empId", DataType.INT);
-    peopleSchema.addColumn("fk_memId", DataType.INT);
-    peopleSchema.addColumn("name", DataType.STRING);
-    peopleSchema.addColumn("age", DataType.INT);
-    TableMeta peopleMeta = TCatUtil.newTableMeta(peopleSchema, StoreType.CSV);
+    peopleSchema.addColumn("empId", Type.INT4);
+    peopleSchema.addColumn("fk_memId", Type.INT4);
+    peopleSchema.addColumn("name", Type.TEXT);
+    peopleSchema.addColumn("age", Type.INT4);
+    TableMeta peopleMeta = CatalogUtil.newTableMeta(peopleSchema, StoreType.CSV);
     Path peoplePath = new Path(testDir, "people.csv");
     appender = StorageManager.getAppender(conf, peopleMeta, peoplePath);
     appender.init();
     tuple = new VTuple(peopleMeta.getSchema().getColumnNum());
     for (int i = 1; i < INNER_TUPLE_NUM; i += 2) {
-      tuple.put(new Datum[] { DatumFactory.createInt(i),
-          DatumFactory.createInt(10 + i),
-          DatumFactory.createString("name_" + i),
-          DatumFactory.createInt(30 + i) });
+      tuple.put(new Datum[] { DatumFactory.createInt4(i),
+          DatumFactory.createInt4(10 + i),
+          DatumFactory.createText("name_" + i),
+          DatumFactory.createInt4(30 + i) });
       appender.addTuple(tuple);
     }
     appender.flush();
     appender.close();
 
-    people = TCatUtil.newTableDesc("people", peopleMeta, peoplePath);
+    people = CatalogUtil.newTableDesc("people", peopleMeta, peoplePath);
     catalog.addTable(people);
     analyzer = new QueryAnalyzer(catalog);
     planner = new LogicalPlanner(catalog);
@@ -130,9 +130,9 @@ public class TestBNLJoinExec {
 
   @Test
   public final void testCrossJoin() throws IOException {
-    Fragment[] empFrags = sm.splitNG(conf, "employee", employee.getMeta(), employee.getPath(),
+    Fragment[] empFrags = StorageManager.splitNG(conf, "employee", employee.getMeta(), employee.getPath(),
         Integer.MAX_VALUE);
-    Fragment[] peopleFrags = sm.splitNG(conf, "people", people.getMeta(), people.getPath(),
+    Fragment[] peopleFrags = StorageManager.splitNG(conf, "people", people.getMeta(), people.getPath(),
         Integer.MAX_VALUE);
 
     Fragment[] merged = TUtil.concat(empFrags, peopleFrags);
@@ -167,9 +167,9 @@ public class TestBNLJoinExec {
 
   @Test
   public final void testInnerJoin() throws IOException {
-    Fragment[] empFrags = sm.splitNG(conf, "employee", employee.getMeta(), employee.getPath(),
+    Fragment[] empFrags = StorageManager.splitNG(conf, "employee", employee.getMeta(), employee.getPath(),
         Integer.MAX_VALUE);
-    Fragment[] peopleFrags = sm.splitNG(conf, "people", people.getMeta(), people.getPath(),
+    Fragment[] peopleFrags = StorageManager.splitNG(conf, "people", people.getMeta(), people.getPath(),
         Integer.MAX_VALUE);
 
     Fragment[] merged = TUtil.concat(empFrags, peopleFrags);
@@ -213,10 +213,10 @@ public class TestBNLJoinExec {
     exec.init();
     while ((tuple = exec.next()) != null) {
       count++;
-      assertTrue(i == tuple.getInt(0).asInt());
-      assertTrue(i == tuple.getInt(1).asInt());
+      assertTrue(i == tuple.getInt(0).asInt4());
+      assertTrue(i == tuple.getInt(1).asInt4());
       assertTrue(("dept_" + i).equals(tuple.getString(2).asChars()));
-      assertTrue(10 + i == tuple.getInt(3).asInt());
+      assertTrue(10 + i == tuple.getInt(3).asInt4());
       i += 2;
     }
     exec.close();

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/c1c6f83e/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestBSTIndexExec.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestBSTIndexExec.java b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestBSTIndexExec.java
index cff7f7f..96009f3 100644
--- a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestBSTIndexExec.java
+++ b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestBSTIndexExec.java
@@ -27,8 +27,8 @@ import org.junit.Test;
 import tajo.TajoTestingCluster;
 import tajo.TaskAttemptContext;
 import tajo.catalog.*;
-import tajo.catalog.proto.CatalogProtos.DataType;
 import tajo.catalog.proto.CatalogProtos.StoreType;
+import tajo.common.TajoDataTypes.Type;
 import tajo.conf.TajoConf;
 import tajo.datum.Datum;
 import tajo.datum.DatumFactory;
@@ -85,12 +85,12 @@ public class TestBSTIndexExec {
     idxPath = new Path(workDir, "test.idx");
 
     Schema schema = new Schema();
-    schema.addColumn("managerId", DataType.INT);
-    schema.addColumn("empId", DataType.INT);
-    schema.addColumn("deptName", DataType.STRING);
+    schema.addColumn("managerId", Type.INT4);
+    schema.addColumn("empId", Type.INT4);
+    schema.addColumn("deptName", Type.TEXT);
 
     this.idxSchema = new Schema();
-    idxSchema.addColumn("managerId", DataType.INT);
+    idxSchema.addColumn("managerId", Type.INT4);
     SortSpec[] sortKeys = new SortSpec[1];
     sortKeys[0] = new SortSpec(idxSchema.getColumn("managerId"), true, false);
     this.comp = new TupleComparator(idxSchema, sortKeys);
@@ -101,7 +101,7 @@ public class TestBSTIndexExec {
     writer.open();
     long offset;
 
-    meta = TCatUtil.newTableMeta(schema, StoreType.CSV);
+    meta = CatalogUtil.newTableMeta(schema, StoreType.CSV);
     tablePath = StorageUtil.concatPath(workDir, "employee", "table.csv");
     fs = tablePath.getFileSystem(conf);
     fs.mkdirs(tablePath.getParent());
@@ -120,10 +120,10 @@ public class TestBSTIndexExec {
         this.randomValues.put(rndKey, 1);
       }
       
-      key.put(new Datum[] { DatumFactory.createInt(rndKey) });
-      tuple.put(new Datum[] { DatumFactory.createInt(rndKey),
-          DatumFactory.createInt(rnd.nextInt(10)),
-          DatumFactory.createString("dept_" + rnd.nextInt(10)) });
+      key.put(new Datum[] { DatumFactory.createInt4(rndKey) });
+      tuple.put(new Datum[] { DatumFactory.createInt4(rndKey),
+          DatumFactory.createInt4(rnd.nextInt(10)),
+          DatumFactory.createText("dept_" + rnd.nextInt(10)) });
       offset = appender.getOffset();
       appender.addTuple(tuple);
       writer.write(key, offset);
@@ -151,7 +151,7 @@ public class TestBSTIndexExec {
     this.rndKey = rnd.nextInt(250);
     final String QUERY = "select * from employee where managerId = " + rndKey;
     
-    Fragment[] frags = sm.splitNG(conf, "employee", meta, tablePath, Integer.MAX_VALUE);
+    Fragment[] frags = StorageManager.splitNG(conf, "employee", meta, tablePath, Integer.MAX_VALUE);
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testEqual");
     TaskAttemptContext ctx = new TaskAttemptContext(conf,
         TUtil.newQueryUnitAttemptId(), new Fragment[] { frags[0] }, workDir);
@@ -190,7 +190,7 @@ public class TestBSTIndexExec {
 
       Fragment[] fragments = ctx.getTables(scanNode.getTableId());
       
-      Datum[] datum = new Datum[]{DatumFactory.createInt(rndKey)};
+      Datum[] datum = new Datum[]{DatumFactory.createInt4(rndKey)};
 
       return new BSTIndexScanExec(ctx, sm, scanNode, fragments[0], idxPath,
           idxSchema, comp , datum);

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/c1c6f83e/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestExternalSortExec.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestExternalSortExec.java b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestExternalSortExec.java
index b43c80a..1e5eca1 100644
--- a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestExternalSortExec.java
+++ b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestExternalSortExec.java
@@ -25,8 +25,8 @@ import org.junit.Test;
 import tajo.TajoTestingCluster;
 import tajo.TaskAttemptContext;
 import tajo.catalog.*;
-import tajo.catalog.proto.CatalogProtos.DataType;
 import tajo.catalog.proto.CatalogProtos.StoreType;
+import tajo.common.TajoDataTypes.Type;
 import tajo.conf.TajoConf;
 import tajo.datum.Datum;
 import tajo.datum.DatumFactory;
@@ -72,20 +72,20 @@ public class TestExternalSortExec {
     sm = StorageManager.get(conf, testDir);
 
     Schema schema = new Schema();
-    schema.addColumn("managerId", DataType.INT);
-    schema.addColumn("empId", DataType.INT);
-    schema.addColumn("deptName", DataType.STRING);
+    schema.addColumn("managerId", Type.INT4);
+    schema.addColumn("empId", Type.INT4);
+    schema.addColumn("deptName", Type.TEXT);
 
-    TableMeta employeeMeta = TCatUtil.newTableMeta(schema, StoreType.CSV);
+    TableMeta employeeMeta = CatalogUtil.newTableMeta(schema, StoreType.CSV);
     Path employeePath = new Path(testDir, "employee.csv");
     Appender appender = StorageManager.getAppender(conf, employeeMeta, employeePath);
     appender.enableStats();
     appender.init();
     Tuple tuple = new VTuple(employeeMeta.getSchema().getColumnNum());
     for (int i = 0; i < numTuple; i++) {
-      tuple.put(new Datum[] { DatumFactory.createInt(rnd.nextInt(50)),
-          DatumFactory.createInt(rnd.nextInt(100)),
-          DatumFactory.createString("dept_" + 123) });
+      tuple.put(new Datum[] { DatumFactory.createInt4(rnd.nextInt(50)),
+          DatumFactory.createInt4(rnd.nextInt(100)),
+          DatumFactory.createText("dept_" + 123) });
       appender.addTuple(tuple);
     }
     appender.flush();

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/c1c6f83e/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestHashJoinExec.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestHashJoinExec.java b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestHashJoinExec.java
index 47f743c..0b8612e 100644
--- a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestHashJoinExec.java
+++ b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestHashJoinExec.java
@@ -25,8 +25,8 @@ import org.junit.Test;
 import tajo.TajoTestingCluster;
 import tajo.TaskAttemptContext;
 import tajo.catalog.*;
-import tajo.catalog.proto.CatalogProtos.DataType;
 import tajo.catalog.proto.CatalogProtos.StoreType;
+import tajo.common.TajoDataTypes.Type;
 import tajo.conf.TajoConf;
 import tajo.datum.Datum;
 import tajo.datum.DatumFactory;
@@ -68,51 +68,51 @@ public class TestHashJoinExec {
     sm = StorageManager.get(conf, testDir);
 
     Schema employeeSchema = new Schema();
-    employeeSchema.addColumn("managerId", DataType.INT);
-    employeeSchema.addColumn("empId", DataType.INT);
-    employeeSchema.addColumn("memId", DataType.INT);
-    employeeSchema.addColumn("deptName", DataType.STRING);
+    employeeSchema.addColumn("managerId", Type.INT4);
+    employeeSchema.addColumn("empId", Type.INT4);
+    employeeSchema.addColumn("memId", Type.INT4);
+    employeeSchema.addColumn("deptName", Type.TEXT);
 
-    TableMeta employeeMeta = TCatUtil.newTableMeta(employeeSchema,
+    TableMeta employeeMeta = CatalogUtil.newTableMeta(employeeSchema,
         StoreType.CSV);
     Path employeePath = new Path(testDir, "employee.csv");
     Appender appender = StorageManager.getAppender(conf, employeeMeta, employeePath);
     appender.init();
     Tuple tuple = new VTuple(employeeMeta.getSchema().getColumnNum());
     for (int i = 0; i < 10; i++) {
-      tuple.put(new Datum[] { DatumFactory.createInt(i),
-          DatumFactory.createInt(i), DatumFactory.createInt(10 + i),
-          DatumFactory.createString("dept_" + i) });
+      tuple.put(new Datum[] { DatumFactory.createInt4(i),
+          DatumFactory.createInt4(i), DatumFactory.createInt4(10 + i),
+          DatumFactory.createText("dept_" + i) });
       appender.addTuple(tuple);
     }
 
     appender.flush();
     appender.close();
-    employee = TCatUtil.newTableDesc("employee", employeeMeta, employeePath);
+    employee = CatalogUtil.newTableDesc("employee", employeeMeta, employeePath);
     catalog.addTable(employee);
 
     Schema peopleSchema = new Schema();
-    peopleSchema.addColumn("empId", DataType.INT);
-    peopleSchema.addColumn("fk_memId", DataType.INT);
-    peopleSchema.addColumn("name", DataType.STRING);
-    peopleSchema.addColumn("age", DataType.INT);
-    TableMeta peopleMeta = TCatUtil.newTableMeta(peopleSchema, StoreType.CSV);
+    peopleSchema.addColumn("empId", Type.INT4);
+    peopleSchema.addColumn("fk_memId", Type.INT4);
+    peopleSchema.addColumn("name", Type.TEXT);
+    peopleSchema.addColumn("age", Type.INT4);
+    TableMeta peopleMeta = CatalogUtil.newTableMeta(peopleSchema, StoreType.CSV);
     Path peoplePath = new Path(testDir, "people.csv");
     appender = StorageManager.getAppender(conf, peopleMeta, peoplePath);
     appender.init();
     tuple = new VTuple(peopleMeta.getSchema().getColumnNum());
     for (int i = 1; i < 10; i += 2) {
-      tuple.put(new Datum[] { DatumFactory.createInt(i),
-          DatumFactory.createInt(10 + i),
-          DatumFactory.createString("name_" + i),
-          DatumFactory.createInt(30 + i) });
+      tuple.put(new Datum[] { DatumFactory.createInt4(i),
+          DatumFactory.createInt4(10 + i),
+          DatumFactory.createText("name_" + i),
+          DatumFactory.createInt4(30 + i) });
       appender.addTuple(tuple);
     }
 
     appender.flush();
     appender.close();
 
-    people = TCatUtil.newTableDesc("people", peopleMeta, peoplePath);
+    people = CatalogUtil.newTableDesc("people", peopleMeta, peoplePath);
     catalog.addTable(people);
     analyzer = new QueryAnalyzer(catalog);
     planner = new LogicalPlanner(catalog);
@@ -130,9 +130,9 @@ public class TestHashJoinExec {
 
   @Test
   public final void testInnerJoin() throws IOException {
-    Fragment[] empFrags = sm.splitNG(conf, "employee", employee.getMeta(), employee.getPath(),
+    Fragment[] empFrags = StorageManager.splitNG(conf, "employee", employee.getMeta(), employee.getPath(),
         Integer.MAX_VALUE);
-    Fragment[] peopleFrags = sm.splitNG(conf, "people", people.getMeta(), people.getPath(),
+    Fragment[] peopleFrags = StorageManager.splitNG(conf, "people", people.getMeta(), people.getPath(),
         Integer.MAX_VALUE);
 
     Fragment[] merged = TUtil.concat(empFrags, peopleFrags);
@@ -166,10 +166,10 @@ public class TestHashJoinExec {
     exec.init();
     while ((tuple = exec.next()) != null) {
       count++;
-      assertTrue(i == tuple.getInt(0).asInt());
-      assertTrue(i == tuple.getInt(1).asInt());
+      assertTrue(i == tuple.getInt(0).asInt4());
+      assertTrue(i == tuple.getInt(1).asInt4());
       assertTrue(("dept_" + i).equals(tuple.getString(2).asChars()));
-      assertTrue(10 + i == tuple.getInt(3).asInt());
+      assertTrue(10 + i == tuple.getInt(3).asInt4());
 
       i += 2;
     }

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/c1c6f83e/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestHashPartitioner.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestHashPartitioner.java b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestHashPartitioner.java
index dd2f53b..183c499 100644
--- a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestHashPartitioner.java
+++ b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestHashPartitioner.java
@@ -42,33 +42,33 @@ public class TestHashPartitioner {
   public final void testGetPartition() {   
     Tuple tuple1 = new VTuple(3);    
     tuple1.put(new Datum[] {
-        DatumFactory.createInt(1),
-        DatumFactory.createInt(2),
-        DatumFactory.createInt(3)
+        DatumFactory.createInt4(1),
+        DatumFactory.createInt4(2),
+        DatumFactory.createInt4(3)
     });
     Tuple tuple2 = new VTuple(3);    
     tuple2.put(new Datum[] {
-        DatumFactory.createInt(1),
-        DatumFactory.createInt(2),
-        DatumFactory.createInt(4)
+        DatumFactory.createInt4(1),
+        DatumFactory.createInt4(2),
+        DatumFactory.createInt4(4)
     });
     Tuple tuple3 = new VTuple(3);    
     tuple3.put(new Datum[] {
-        DatumFactory.createInt(1),
-        DatumFactory.createInt(2),
-        DatumFactory.createInt(5)
+        DatumFactory.createInt4(1),
+        DatumFactory.createInt4(2),
+        DatumFactory.createInt4(5)
     });
     Tuple tuple4 = new VTuple(3);    
     tuple4.put(new Datum[] {
-        DatumFactory.createInt(2),
-        DatumFactory.createInt(2),
-        DatumFactory.createInt(3)
+        DatumFactory.createInt4(2),
+        DatumFactory.createInt4(2),
+        DatumFactory.createInt4(3)
     });
     Tuple tuple5 = new VTuple(3);    
     tuple5.put(new Datum[] {
-        DatumFactory.createInt(2),
-        DatumFactory.createInt(2),
-        DatumFactory.createInt(4)
+        DatumFactory.createInt4(2),
+        DatumFactory.createInt4(2),
+        DatumFactory.createInt4(4)
     });
     
     int [] partKeys = {0,1};

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/c1c6f83e/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestMergeJoinExec.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestMergeJoinExec.java b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestMergeJoinExec.java
index 46d264b..afddea5 100644
--- a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestMergeJoinExec.java
+++ b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestMergeJoinExec.java
@@ -26,8 +26,8 @@ import org.junit.Test;
 import tajo.TajoTestingCluster;
 import tajo.TaskAttemptContext;
 import tajo.catalog.*;
-import tajo.catalog.proto.CatalogProtos.DataType;
 import tajo.catalog.proto.CatalogProtos.StoreType;
+import tajo.common.TajoDataTypes.Type;
 import tajo.conf.TajoConf;
 import tajo.datum.Datum;
 import tajo.datum.DatumFactory;
@@ -70,65 +70,65 @@ public class TestMergeJoinExec {
     sm = StorageManager.get(conf, testDir);
 
     Schema employeeSchema = new Schema();
-    employeeSchema.addColumn("managerId", DataType.INT);
-    employeeSchema.addColumn("empId", DataType.INT);
-    employeeSchema.addColumn("memId", DataType.INT);
-    employeeSchema.addColumn("deptName", DataType.STRING);
+    employeeSchema.addColumn("managerId", Type.INT4);
+    employeeSchema.addColumn("empId", Type.INT4);
+    employeeSchema.addColumn("memId", Type.INT4);
+    employeeSchema.addColumn("deptName", Type.TEXT);
 
-    TableMeta employeeMeta = TCatUtil.newTableMeta(employeeSchema,
+    TableMeta employeeMeta = CatalogUtil.newTableMeta(employeeSchema,
         StoreType.CSV);
     Path employeePath = new Path(testDir, "employee.csv");
     Appender appender = StorageManager.getAppender(conf, employeeMeta, employeePath);
     appender.init();
     Tuple tuple = new VTuple(employeeMeta.getSchema().getColumnNum());
     for (int i = 0; i < 10; i++) {
-      tuple.put(new Datum[] { DatumFactory.createInt(i),
-          DatumFactory.createInt(i), DatumFactory.createInt(10 + i),
-          DatumFactory.createString("dept_" + i) });
+      tuple.put(new Datum[] { DatumFactory.createInt4(i),
+          DatumFactory.createInt4(i), DatumFactory.createInt4(10 + i),
+          DatumFactory.createText("dept_" + i) });
       appender.addTuple(tuple);
     }
     for (int i = 11; i < 20; i+=2) {
-      tuple.put(new Datum[] { DatumFactory.createInt(i),
-          DatumFactory.createInt(i), DatumFactory.createInt(10 + i),
-          DatumFactory.createString("dept_" + i) });
+      tuple.put(new Datum[] { DatumFactory.createInt4(i),
+          DatumFactory.createInt4(i), DatumFactory.createInt4(10 + i),
+          DatumFactory.createText("dept_" + i) });
       appender.addTuple(tuple);
     }
 
     appender.flush();
     appender.close();
-    employee = TCatUtil.newTableDesc("employee", employeeMeta,
+    employee = CatalogUtil.newTableDesc("employee", employeeMeta,
         employeePath);
     catalog.addTable(employee);
 
     Schema peopleSchema = new Schema();
-    peopleSchema.addColumn("empId", DataType.INT);
-    peopleSchema.addColumn("fk_memId", DataType.INT);
-    peopleSchema.addColumn("name", DataType.STRING);
-    peopleSchema.addColumn("age", DataType.INT);
-    TableMeta peopleMeta = TCatUtil.newTableMeta(peopleSchema, StoreType.CSV);
+    peopleSchema.addColumn("empId", Type.INT4);
+    peopleSchema.addColumn("fk_memId", Type.INT4);
+    peopleSchema.addColumn("name", Type.TEXT);
+    peopleSchema.addColumn("age", Type.INT4);
+    TableMeta peopleMeta = CatalogUtil.newTableMeta(peopleSchema, StoreType.CSV);
     Path peoplePath = new Path(testDir, "people.csv");
     appender = StorageManager.getAppender(conf, peopleMeta, peoplePath);
     appender.init();
     tuple = new VTuple(peopleMeta.getSchema().getColumnNum());
     for (int i = 1; i < 10; i += 2) {
-      tuple.put(new Datum[] { DatumFactory.createInt(i),
-          DatumFactory.createInt(10 + i),
-          DatumFactory.createString("name_" + i),
-          DatumFactory.createInt(30 + i) });
+      tuple.put(new Datum[] { DatumFactory.createInt4(i),
+          DatumFactory.createInt4(10 + i),
+          DatumFactory.createText("name_" + i),
+          DatumFactory.createInt4(30 + i) });
       appender.addTuple(tuple);
     }
     for (int i = 10; i < 20; i++) {
-      tuple.put(new Datum[] { DatumFactory.createInt(i),
-          DatumFactory.createInt(10 + i),
-          DatumFactory.createString("name_" + i),
-          DatumFactory.createInt(30 + i) });
+      tuple.put(new Datum[] { DatumFactory.createInt4(i),
+          DatumFactory.createInt4(10 + i),
+          DatumFactory.createText("name_" + i),
+          DatumFactory.createInt4(30 + i) });
       appender.addTuple(tuple);
     }
 
     appender.flush();
     appender.close();
 
-    people = TCatUtil.newTableDesc("people", peopleMeta, peoplePath);
+    people = CatalogUtil.newTableDesc("people", peopleMeta, peoplePath);
     catalog.addTable(people);
     analyzer = new QueryAnalyzer(catalog);
     planner = new LogicalPlanner(catalog);
@@ -215,10 +215,10 @@ public class TestMergeJoinExec {
     exec.init();
     while ((tuple = exec.next()) != null) {
       count++;
-      assertTrue(i == tuple.getInt(0).asInt());
-      assertTrue(i == tuple.getInt(1).asInt());
+      assertTrue(i == tuple.getInt(0).asInt4());
+      assertTrue(i == tuple.getInt(1).asInt4());
       assertTrue(("dept_" + i).equals(tuple.getString(2).asChars()));
-      assertTrue(10 + i == tuple.getInt(3).asInt());
+      assertTrue(10 + i == tuple.getInt(3).asInt4());
 
       i += 2;
     }

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/c1c6f83e/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestNLJoinExec.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestNLJoinExec.java b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestNLJoinExec.java
index f8b8877..d7a562f 100644
--- a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestNLJoinExec.java
+++ b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestNLJoinExec.java
@@ -25,8 +25,8 @@ import org.junit.Test;
 import tajo.TajoTestingCluster;
 import tajo.TaskAttemptContext;
 import tajo.catalog.*;
-import tajo.catalog.proto.CatalogProtos.DataType;
 import tajo.catalog.proto.CatalogProtos.StoreType;
+import tajo.common.TajoDataTypes.Type;
 import tajo.conf.TajoConf;
 import tajo.datum.Datum;
 import tajo.datum.DatumFactory;
@@ -67,52 +67,52 @@ public class TestNLJoinExec {
     sm = StorageManager.get(conf, testDir);
 
     Schema schema = new Schema();
-    schema.addColumn("managerId", DataType.INT);
-    schema.addColumn("empId", DataType.INT);
-    schema.addColumn("memId", DataType.INT);
-    schema.addColumn("deptName", DataType.STRING);
+    schema.addColumn("managerId", Type.INT4);
+    schema.addColumn("empId", Type.INT4);
+    schema.addColumn("memId", Type.INT4);
+    schema.addColumn("deptName", Type.TEXT);
 
-    TableMeta employeeMeta = TCatUtil.newTableMeta(schema, StoreType.CSV);
+    TableMeta employeeMeta = CatalogUtil.newTableMeta(schema, StoreType.CSV);
     Path employeePath = new Path(testDir, "employee.csv");
     Appender appender = StorageManager.getAppender(conf, employeeMeta, employeePath);
     appender.init();
     Tuple tuple = new VTuple(employeeMeta.getSchema().getColumnNum());
     for (int i = 0; i < 50; i++) {
       tuple.put(new Datum[] {
-          DatumFactory.createInt(i),
-          DatumFactory.createInt(i),
-          DatumFactory.createInt(10+i),
-          DatumFactory.createString("dept_" + i)});
+          DatumFactory.createInt4(i),
+          DatumFactory.createInt4(i),
+          DatumFactory.createInt4(10 + i),
+          DatumFactory.createText("dept_" + i)});
       appender.addTuple(tuple);
     }
     appender.flush();
     appender.close();
-    employee = TCatUtil.newTableDesc("employee", employeeMeta,
+    employee = CatalogUtil.newTableDesc("employee", employeeMeta,
         employeePath);
     catalog.addTable(employee);
     
     Schema peopleSchema = new Schema();
-    peopleSchema.addColumn("empId", DataType.INT);
-    peopleSchema.addColumn("fk_memId", DataType.INT);
-    peopleSchema.addColumn("name", DataType.STRING);
-    peopleSchema.addColumn("age", DataType.INT);
-    TableMeta peopleMeta = TCatUtil.newTableMeta(peopleSchema, StoreType.CSV);
+    peopleSchema.addColumn("empId", Type.INT4);
+    peopleSchema.addColumn("fk_memId", Type.INT4);
+    peopleSchema.addColumn("name", Type.TEXT);
+    peopleSchema.addColumn("age", Type.INT4);
+    TableMeta peopleMeta = CatalogUtil.newTableMeta(peopleSchema, StoreType.CSV);
     Path peoplePath = new Path(testDir, "people.csv");
     appender = StorageManager.getAppender(conf, peopleMeta, peoplePath);
     appender.init();
     tuple = new VTuple(peopleMeta.getSchema().getColumnNum());
     for (int i = 1; i < 50; i += 2) {
       tuple.put(new Datum[] {
-          DatumFactory.createInt(i),
-          DatumFactory.createInt(10+i),
-          DatumFactory.createString("name_" + i),
-          DatumFactory.createInt(30 + i)});
+          DatumFactory.createInt4(i),
+          DatumFactory.createInt4(10 + i),
+          DatumFactory.createText("name_" + i),
+          DatumFactory.createInt4(30 + i)});
       appender.addTuple(tuple);
     }
     appender.flush();
     appender.close();
     
-    people = TCatUtil.newTableDesc("people", peopleMeta,
+    people = CatalogUtil.newTableDesc("people", peopleMeta,
         peoplePath);
     catalog.addTable(people);
     analyzer = new QueryAnalyzer(catalog);
@@ -132,9 +132,9 @@ public class TestNLJoinExec {
   
   @Test
   public final void testCrossJoin() throws IOException {
-    Fragment[] empFrags = sm.splitNG(conf, "employee", employee.getMeta(), employee.getPath(),
+    Fragment[] empFrags = StorageManager.splitNG(conf, "employee", employee.getMeta(), employee.getPath(),
         Integer.MAX_VALUE);
-    Fragment[] peopleFrags = sm.splitNG(conf, "people", people.getMeta(), people.getPath(),
+    Fragment[] peopleFrags = StorageManager.splitNG(conf, "people", people.getMeta(), people.getPath(),
         Integer.MAX_VALUE);
     
     Fragment [] merged = TUtil.concat(empFrags, peopleFrags);
@@ -160,9 +160,9 @@ public class TestNLJoinExec {
 
   @Test
   public final void testInnerJoin() throws IOException {
-    Fragment[] empFrags = sm.splitNG(conf, "employee", employee.getMeta(), employee.getPath(),
+    Fragment[] empFrags = StorageManager.splitNG(conf, "employee", employee.getMeta(), employee.getPath(),
         Integer.MAX_VALUE);
-    Fragment[] peopleFrags = sm.splitNG(conf, "people", people.getMeta(), people.getPath(),
+    Fragment[] peopleFrags = StorageManager.splitNG(conf, "people", people.getMeta(), people.getPath(),
         Integer.MAX_VALUE);
     
     Fragment [] merged = TUtil.concat(empFrags, peopleFrags);
@@ -183,10 +183,10 @@ public class TestNLJoinExec {
     exec.init();
     while ((tuple = exec.next()) != null) {
       count++;
-      assertTrue(i == tuple.getInt(0).asInt());
-      assertTrue(i == tuple.getInt(1).asInt());
+      assertTrue(i == tuple.getInt(0).asInt4());
+      assertTrue(i == tuple.getInt(1).asInt4());
       assertTrue(("dept_" + i).equals(tuple.getString(2).asChars()));
-      assertTrue(10 + i == tuple.getInt(3).asInt());
+      assertTrue(10 + i == tuple.getInt(3).asInt4());
       i += 2;
     }
     exec.close();

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/c1c6f83e/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestPhysicalPlanner.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestPhysicalPlanner.java b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestPhysicalPlanner.java
index 8b5dee5..c54b39a 100644
--- a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestPhysicalPlanner.java
+++ b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestPhysicalPlanner.java
@@ -34,8 +34,8 @@ import tajo.QueryUnitAttemptId;
 import tajo.TajoTestingCluster;
 import tajo.TaskAttemptContext;
 import tajo.catalog.*;
-import tajo.catalog.proto.CatalogProtos.DataType;
 import tajo.catalog.proto.CatalogProtos.StoreType;
+import tajo.common.TajoDataTypes.Type;
 import tajo.conf.TajoConf;
 import tajo.datum.Datum;
 import tajo.datum.DatumFactory;
@@ -92,21 +92,21 @@ public class TestPhysicalPlanner {
     }
 
     Schema schema = new Schema();
-    schema.addColumn("name", DataType.STRING);
-    schema.addColumn("empId", DataType.INT);
-    schema.addColumn("deptName", DataType.STRING);
+    schema.addColumn("name", Type.TEXT);
+    schema.addColumn("empId", Type.INT4);
+    schema.addColumn("deptName", Type.TEXT);
 
     Schema schema2 = new Schema();
-    schema2.addColumn("deptName", DataType.STRING);
-    schema2.addColumn("manager", DataType.STRING);
+    schema2.addColumn("deptName", Type.TEXT);
+    schema2.addColumn("manager", Type.TEXT);
 
     Schema scoreSchema = new Schema();
-    scoreSchema.addColumn("deptName", DataType.STRING);
-    scoreSchema.addColumn("class", DataType.STRING);
-    scoreSchema.addColumn("score", DataType.INT);
-    scoreSchema.addColumn("nullable", DataType.STRING);
+    scoreSchema.addColumn("deptName", Type.TEXT);
+    scoreSchema.addColumn("class", Type.TEXT);
+    scoreSchema.addColumn("score", Type.INT4);
+    scoreSchema.addColumn("nullable", Type.TEXT);
 
-    TableMeta employeeMeta = TCatUtil.newTableMeta(schema, StoreType.CSV);
+    TableMeta employeeMeta = CatalogUtil.newTableMeta(schema, StoreType.CSV);
 
 
     Path employeePath = new Path(testDir, "employee.csv");
@@ -114,8 +114,8 @@ public class TestPhysicalPlanner {
     appender.init();
     Tuple tuple = new VTuple(employeeMeta.getSchema().getColumnNum());
     for (int i = 0; i < 100; i++) {
-      tuple.put(new Datum[] {DatumFactory.createString("name_" + i),
-          DatumFactory.createInt(i), DatumFactory.createString("dept_" + i)});
+      tuple.put(new Datum[] {DatumFactory.createText("name_" + i),
+          DatumFactory.createInt4(i), DatumFactory.createText("dept_" + i)});
       appender.addTuple(tuple);
     }
     appender.flush();
@@ -125,7 +125,7 @@ public class TestPhysicalPlanner {
     catalog.addTable(employee);
 
     Path scorePath = new Path(testDir, "score");
-    TableMeta scoreMeta = TCatUtil.newTableMeta(scoreSchema, StoreType.CSV, new Options());
+    TableMeta scoreMeta = CatalogUtil.newTableMeta(scoreSchema, StoreType.CSV, new Options());
     appender = StorageManager.getAppender(conf, scoreMeta, scorePath);
     appender.init();
     score = new TableDescImpl("score", scoreMeta, scorePath);
@@ -136,10 +136,10 @@ public class TestPhysicalPlanner {
         for (int j = 1; j <= 3; j++) {
           tuple.put(
               new Datum[] {
-                  DatumFactory.createString("name_" + i), // name_1 ~ 5 (cad: // 5)
-                  DatumFactory.createString(k + "rd"), // 3 or 4rd (cad: 2)
-                  DatumFactory.createInt(j), // 1 ~ 3
-              m % 3 == 1 ? DatumFactory.createString("one") : NullDatum.get()});
+                  DatumFactory.createText("name_" + i), // name_1 ~ 5 (cad: // 5)
+                  DatumFactory.createText(k + "rd"), // 3 or 4rd (cad: 2)
+                  DatumFactory.createInt4(j), // 1 ~ 3
+              m % 3 == 1 ? DatumFactory.createText("one") : NullDatum.get()});
           appender.addTuple(tuple);
           m++;
         }
@@ -170,7 +170,7 @@ public class TestPhysicalPlanner {
       "select count(deptName) from score", // 9
       "select managerId, empId, deptName from employee order by managerId, empId desc", // 10
       "select deptName, nullable from score group by deptName, nullable", // 11
-      "select 3 < 4 as ineq, 3.5 * 2 as real", // 12
+      "select 3 < 4 as ineq, 3.5 * 2 as score", // 12
 //      "select (3 > 2) = (1 > 0) and 3 > 1", // 12
       "select (1 > 0) and 3 > 1", // 13
       "select deptName, class, sum(score), max(score), min(score) from score", // 14
@@ -224,9 +224,9 @@ public class TestPhysicalPlanner {
     Tuple tuple;
     exec.init();
     while ((tuple = exec.next()) != null) {
-      assertEquals(6, tuple.get(2).asInt()); // sum
-      assertEquals(3, tuple.get(3).asInt()); // max
-      assertEquals(1, tuple.get(4).asInt()); // min
+      assertEquals(6, tuple.get(2).asInt4()); // sum
+      assertEquals(3, tuple.get(3).asInt4()); // max
+      assertEquals(1, tuple.get(4).asInt4()); // min
       i++;
     }
     exec.close();
@@ -254,9 +254,9 @@ public class TestPhysicalPlanner {
     exec.init();
     while ((tuple = exec.next()) != null) {
       assertEquals(DatumFactory.createNullDatum(), tuple.get(1));
-      assertEquals(12, tuple.get(2).asInt()); // sum
-      assertEquals(3, tuple.get(3).asInt()); // max
-      assertEquals(1, tuple.get(4).asInt()); // min
+      assertEquals(12, tuple.get(2).asInt4()); // sum
+      assertEquals(3, tuple.get(3).asInt4()); // max
+      assertEquals(1, tuple.get(4).asInt4()); // min
       i++;
     }
     exec.close();
@@ -296,9 +296,9 @@ public class TestPhysicalPlanner {
     Tuple tuple;
     exec.init();
     while ((tuple = exec.next()) != null) {
-      assertEquals(6, tuple.get(2).asInt()); // sum
-      assertEquals(3, tuple.get(3).asInt()); // max
-      assertEquals(1, tuple.get(4).asInt()); // min
+      assertEquals(6, tuple.get(2).asInt4()); // sum
+      assertEquals(3, tuple.get(3).asInt4()); // max
+      assertEquals(1, tuple.get(4).asInt4()); // min
       i++;
     }
     assertEquals(10, i);
@@ -306,9 +306,9 @@ public class TestPhysicalPlanner {
     exec.rescan();
     i = 0;
     while ((tuple = exec.next()) != null) {
-      assertEquals(6, tuple.getInt(2).asInt()); // sum
-      assertEquals(3, tuple.getInt(3).asInt()); // max
-      assertEquals(1, tuple.getInt(4).asInt()); // min
+      assertEquals(6, tuple.getInt(2).asInt4()); // sum
+      assertEquals(3, tuple.getInt(3).asInt4()); // max
+      assertEquals(1, tuple.getInt(4).asInt4()); // min
       i++;
     }
     exec.close();
@@ -334,7 +334,7 @@ public class TestPhysicalPlanner {
     LogicalNode plan = planner.createPlan(context);
     plan = LogicalOptimizer.optimize(context, plan);
 
-    TableMeta outputMeta = TCatUtil.newTableMeta(plan.getOutSchema(),
+    TableMeta outputMeta = CatalogUtil.newTableMeta(plan.getOutSchema(),
         StoreType.CSV);
 
     PhysicalPlanner phyPlanner = new PhysicalPlannerImpl(conf,sm);
@@ -348,9 +348,9 @@ public class TestPhysicalPlanner {
     Tuple tuple;
     int i = 0;
     while ((tuple = scanner.next()) != null) {
-      assertEquals(6, tuple.get(2).asInt()); // sum
-      assertEquals(3, tuple.get(3).asInt()); // max
-      assertEquals(1, tuple.get(4).asInt()); // min
+      assertEquals(6, tuple.get(2).asInt4()); // sum
+      assertEquals(3, tuple.get(3).asInt4()); // max
+      assertEquals(1, tuple.get(4).asInt4()); // min
       i++;
     }
     assertEquals(10, i);
@@ -374,7 +374,7 @@ public class TestPhysicalPlanner {
     LogicalNode plan = planner.createPlan(context);
     plan = LogicalOptimizer.optimize(context, plan);
 
-    TableMeta outputMeta = TCatUtil.newTableMeta(plan.getOutSchema(),
+    TableMeta outputMeta = CatalogUtil.newTableMeta(plan.getOutSchema(),
         StoreType.RCFILE);
 
     PhysicalPlanner phyPlanner = new PhysicalPlannerImpl(conf,sm);
@@ -388,9 +388,9 @@ public class TestPhysicalPlanner {
     Tuple tuple;
     int i = 0;
     while ((tuple = scanner.next()) != null) {
-      assertEquals(6, tuple.get(2).asInt()); // sum
-      assertEquals(3, tuple.get(3).asInt()); // max
-      assertEquals(1, tuple.get(4).asInt()); // min
+      assertEquals(6, tuple.get(2).asInt4()); // sum
+      assertEquals(3, tuple.get(3).asInt4()); // max
+      assertEquals(1, tuple.get(4).asInt4()); // min
       i++;
     }
     assertEquals(10, i);
@@ -421,14 +421,14 @@ public class TestPhysicalPlanner {
     LogicalNode plan = planner.createPlan(context);
 
     int numPartitions = 3;
-    Column key1 = new Column("score.deptName", DataType.STRING);
-    Column key2 = new Column("score.class", DataType.STRING);
+    Column key1 = new Column("score.deptName", Type.TEXT);
+    Column key2 = new Column("score.class", Type.TEXT);
     StoreTableNode storeNode = new StoreTableNode("partition");
     storeNode.setPartitions(PartitionType.HASH, new Column[]{key1, key2}, numPartitions);
     PlannerUtil.insertNode(plan, storeNode);
     plan = LogicalOptimizer.optimize(context, plan);
 
-    TableMeta outputMeta = TCatUtil.newTableMeta(plan.getOutSchema(),
+    TableMeta outputMeta = CatalogUtil.newTableMeta(plan.getOutSchema(),
         StoreType.CSV);
 
     FileSystem fs = sm.getFileSystem();
@@ -454,9 +454,9 @@ public class TestPhysicalPlanner {
     Tuple tuple;
     i = 0;
     while ((tuple = scanner.next()) != null) {
-      assertEquals(6, tuple.get(2).asInt()); // sum
-      assertEquals(3, tuple.get(3).asInt()); // max
-      assertEquals(1, tuple.get(4).asInt()); // min
+      assertEquals(6, tuple.get(2).asInt4()); // sum
+      assertEquals(3, tuple.get(3).asInt4()); // max
+      assertEquals(1, tuple.get(4).asInt4()); // min
       i++;
     }
     assertEquals(10, i);
@@ -486,7 +486,7 @@ public class TestPhysicalPlanner {
     PlannerUtil.insertNode(plan, storeNode);
     plan = LogicalOptimizer.optimize(context, plan);
 
-    TableMeta outputMeta = TCatUtil.newTableMeta(plan.getOutSchema(),
+    TableMeta outputMeta = CatalogUtil.newTableMeta(plan.getOutSchema(),
         StoreType.CSV);
 
     PhysicalPlanner phyPlanner = new PhysicalPlannerImpl(conf,sm);
@@ -511,9 +511,9 @@ public class TestPhysicalPlanner {
     Tuple tuple;
     i = 0;
     while ((tuple = scanner.next()) != null) {
-      assertEquals(60, tuple.get(2).asInt()); // sum
-      assertEquals(3, tuple.get(3).asInt()); // max
-      assertEquals(1, tuple.get(4).asInt()); // min
+      assertEquals(60, tuple.get(2).asInt4()); // sum
+      assertEquals(3, tuple.get(3).asInt4()); // max
+      assertEquals(1, tuple.get(4).asInt4()); // min
       i++;
     }
     assertEquals(1, i);
@@ -539,9 +539,9 @@ public class TestPhysicalPlanner {
 
     exec.init();
     Tuple tuple = exec.next();
-    assertEquals(30, tuple.get(0).asLong());
-    assertEquals(3, tuple.get(1).asInt());
-    assertEquals(1, tuple.get(2).asInt());
+    assertEquals(30, tuple.get(0).asInt8());
+    assertEquals(3, tuple.get(1).asInt4());
+    assertEquals(1, tuple.get(2).asInt4());
     assertNull(exec.next());
     exec.close();
   }
@@ -561,7 +561,7 @@ public class TestPhysicalPlanner {
     PhysicalExec exec = phyPlanner.createPlan(ctx, plan);
 
     Tuple tuple = exec.next();
-    assertEquals(30, tuple.get(0).asLong());
+    assertEquals(30, tuple.get(0).asInt8());
     assertNull(exec.next());
   }
 
@@ -630,7 +630,7 @@ public class TestPhysicalPlanner {
     tuple = exec.next();
     exec.close();
     assertEquals(true, tuple.get(0).asBool());
-    assertTrue(7.0d == tuple.get(1).asDouble());
+    assertTrue(7.0d == tuple.get(1).asFloat8());
 
     context = analyzer.parse(QUERIES[13]);
     plan = planner.createPlan(context);
@@ -710,8 +710,8 @@ public class TestPhysicalPlanner {
   @Test
   public final void testBug() throws IOException {
     Schema s1 = new Schema();
-    s1.addColumn("o_orderdate", DataType.STRING);
-    s1.addColumn("o_shippriority", DataType.INT);
+    s1.addColumn("o_orderdate", Type.TEXT);
+    s1.addColumn("o_shippriority", Type.INT4);
     s1.addColumn("o_orderkey", DataType.LONG);
 
     Options opt = new Options();
@@ -777,7 +777,7 @@ public class TestPhysicalPlanner {
     exec.close();
 
     Schema keySchema = new Schema();
-    keySchema.addColumn("?empId", DataType.INT);
+    keySchema.addColumn("?empId", Type.INT4);
     SortSpec[] sortSpec = new SortSpec[1];
     sortSpec[0] = new SortSpec(keySchema.getColumn(0), true, false);
     TupleComparator comp = new TupleComparator(keySchema, sortSpec);
@@ -786,7 +786,7 @@ public class TestPhysicalPlanner {
         keySchema, comp);
     reader.open();
     Path outputPath = StorageUtil.concatPath(workDir, "output", "output");
-    TableMeta meta = TCatUtil.newTableMeta(plan.getOutSchema(), StoreType.CSV, new Options());
+    TableMeta meta = CatalogUtil.newTableMeta(plan.getOutSchema(), StoreType.CSV, new Options());
     SeekableScanner scanner = (SeekableScanner)
         StorageManager.getScanner(conf, meta, outputPath);
     scanner.init();
@@ -801,12 +801,12 @@ public class TestPhysicalPlanner {
 
     Tuple keytuple = new VTuple(1);
     for(int i = 1 ; i < 100 ; i ++) {
-      keytuple.put(0, DatumFactory.createInt(i));
+      keytuple.put(0, DatumFactory.createInt4(i));
       long offsets = reader.find(keytuple);
       scanner.seek(offsets);
       tuple = scanner.next();
       assertTrue("[seek check " + (i) + " ]" , ("name_" + i).equals(tuple.get(0).asChars()));
-      assertTrue("[seek check " + (i) + " ]" , i == tuple.get(1).asInt());
+      assertTrue("[seek check " + (i) + " ]" , i == tuple.get(1).asInt4());
     }
 
 
@@ -815,12 +815,12 @@ public class TestPhysicalPlanner {
         new File(new Path(workDir, "output").toUri()), keySchema, comp);
     Map<String,List<String>> kvs = Maps.newHashMap();
     Tuple startTuple = new VTuple(1);
-    startTuple.put(0, DatumFactory.createInt(50));
+    startTuple.put(0, DatumFactory.createInt4(50));
     kvs.put("start", Lists.newArrayList(
         new String(Base64.encodeBase64(
             RowStoreUtil.RowStoreEncoder.toBytes(keySchema, startTuple), false))));
     Tuple endTuple = new VTuple(1);
-    endTuple.put(0, DatumFactory.createInt(80));
+    endTuple.put(0, DatumFactory.createInt4(80));
     kvs.put("end", Lists.newArrayList(
         new String(Base64.encodeBase64(
             RowStoreUtil.RowStoreEncoder.toBytes(keySchema, endTuple), false))));
@@ -828,11 +828,11 @@ public class TestPhysicalPlanner {
 
     scanner.seek(chunk.startOffset());
     keytuple = scanner.next();
-    assertEquals(50, keytuple.get(1).asInt());
+    assertEquals(50, keytuple.get(1).asInt4());
 
     long endOffset = chunk.startOffset() + chunk.length();
     while((keytuple = scanner.next()) != null && scanner.getNextOffset() <= endOffset) {
-      assertTrue(keytuple.get(1).asInt() <= 80);
+      assertTrue(keytuple.get(1).asInt4() <= 80);
     }
 
     scanner.close();

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/c1c6f83e/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestSortExec.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestSortExec.java b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestSortExec.java
index 694875f..f91a37c 100644
--- a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestSortExec.java
+++ b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/planner/physical/TestSortExec.java
@@ -25,8 +25,8 @@ import org.junit.Test;
 import tajo.TajoTestingCluster;
 import tajo.TaskAttemptContext;
 import tajo.catalog.*;
-import tajo.catalog.proto.CatalogProtos.DataType;
 import tajo.catalog.proto.CatalogProtos.StoreType;
+import tajo.common.TajoDataTypes.Type;
 import tajo.conf.TajoConf;
 import tajo.datum.Datum;
 import tajo.datum.DatumFactory;
@@ -65,11 +65,11 @@ public class TestSortExec {
     sm = StorageManager.get(conf, workDir);
 
     Schema schema = new Schema();
-    schema.addColumn("managerId", DataType.INT);
-    schema.addColumn("empId", DataType.INT);
-    schema.addColumn("deptName", DataType.STRING);
+    schema.addColumn("managerId", Type.INT4);
+    schema.addColumn("empId", Type.INT4);
+    schema.addColumn("deptName", Type.TEXT);
 
-    employeeMeta = TCatUtil.newTableMeta(schema, StoreType.CSV);
+    employeeMeta = CatalogUtil.newTableMeta(schema, StoreType.CSV);
 
     tablePath = StorageUtil.concatPath(workDir, "employee", "table1");
     sm.getFileSystem().mkdirs(tablePath.getParent());
@@ -79,9 +79,9 @@ public class TestSortExec {
     Tuple tuple = new VTuple(employeeMeta.getSchema().getColumnNum());
     for (int i = 0; i < 100; i++) {
       tuple.put(new Datum[] {
-          DatumFactory.createInt(rnd.nextInt(5)),
-          DatumFactory.createInt(rnd.nextInt(10)),
-          DatumFactory.createString("dept_" + rnd.nextInt(10))});
+          DatumFactory.createInt4(rnd.nextInt(5)),
+          DatumFactory.createInt4(rnd.nextInt(10)),
+          DatumFactory.createText("dept_" + rnd.nextInt(10))});
       appender.addTuple(tuple);
     }
     appender.flush();
@@ -139,11 +139,11 @@ public class TestSortExec {
    */
   public void testTAJO_946() {
     Schema schema = new Schema();
-    schema.addColumn("l_orderkey", DataType.LONG);
+    schema.addColumn("l_orderkey", Type.INT8);
     Tuple s = new VTuple(1);
-    s.put(0, DatumFactory.createLong(0));
+    s.put(0, DatumFactory.createInt8(0));
     Tuple e = new VTuple(1);
-    e.put(0, DatumFactory.createLong(6000000000l));
+    e.put(0, DatumFactory.createInt8(6000000000l));
     TupleRange expected = new TupleRange(schema, s, e);
     RangePartitionAlgorithm partitioner
         = new UniformRangePartition(schema, expected, true);

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/c1c6f83e/tajo-core/tajo-core-backend/src/test/java/tajo/engine/query/TestNullValues.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/query/TestNullValues.java b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/query/TestNullValues.java
index 167b15e..c3b37aa 100644
--- a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/query/TestNullValues.java
+++ b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/query/TestNullValues.java
@@ -24,7 +24,7 @@ import tajo.IntegrationTest;
 import tajo.TajoTestingCluster;
 import tajo.catalog.Options;
 import tajo.catalog.Schema;
-import tajo.catalog.proto.CatalogProtos;
+import tajo.common.TajoDataTypes.Type;
 import tajo.storage.CSVFile;
 
 import java.sql.ResultSet;
@@ -42,8 +42,8 @@ public class TestNullValues {
   public final void testIsNull() throws Exception {
     String [] table = new String[] {"nulltable1"};
     Schema schema = new Schema();
-    schema.addColumn("col1", CatalogProtos.DataType.INT);
-    schema.addColumn("col2", CatalogProtos.DataType.STRING);
+    schema.addColumn("col1", Type.INT4);
+    schema.addColumn("col2", Type.TEXT);
     Schema [] schemas = new Schema[] {schema};
     String [] data = {
         "1|filled|",
@@ -64,8 +64,8 @@ public class TestNullValues {
   public final void testIsNotNull() throws Exception {
     String [] table = new String[] {"nulltable2"};
     Schema schema = new Schema();
-    schema.addColumn("col1", CatalogProtos.DataType.INT);
-    schema.addColumn("col2", CatalogProtos.DataType.STRING);
+    schema.addColumn("col1", Type.INT4);
+    schema.addColumn("col2", Type.TEXT);
     Schema [] schemas = new Schema[] {schema};
     String [] data = {
         "1|filled|",
@@ -88,16 +88,16 @@ public class TestNullValues {
   public final void testIsNotNull2() throws Exception {
     String [] table = new String[] {"nulltable3"};
     Schema schema = new Schema();
-    schema.addColumn("col1", CatalogProtos.DataType.LONG);
-    schema.addColumn("col2", CatalogProtos.DataType.LONG);
-    schema.addColumn("col3", CatalogProtos.DataType.LONG);
-    schema.addColumn("col4", CatalogProtos.DataType.LONG);
-    schema.addColumn("col5", CatalogProtos.DataType.LONG);
-    schema.addColumn("col6", CatalogProtos.DataType.LONG);
-    schema.addColumn("col7", CatalogProtos.DataType.LONG);
-    schema.addColumn("col8", CatalogProtos.DataType.LONG);
-    schema.addColumn("col9", CatalogProtos.DataType.LONG);
-    schema.addColumn("col10", CatalogProtos.DataType.LONG);
+    schema.addColumn("col1", Type.INT8);
+    schema.addColumn("col2", Type.INT8);
+    schema.addColumn("col3", Type.INT8);
+    schema.addColumn("col4", Type.INT8);
+    schema.addColumn("col5", Type.INT8);
+    schema.addColumn("col6", Type.INT8);
+    schema.addColumn("col7", Type.INT8);
+    schema.addColumn("col8", Type.INT8);
+    schema.addColumn("col9", Type.INT8);
+    schema.addColumn("col10", Type.INT8);
     Schema [] schemas = new Schema[] {schema};
     String [] data = {
         ",,,,672287821,1301460,1,313895860387,126288907,1024",

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/c1c6f83e/tajo-core/tajo-core-backend/src/test/java/tajo/engine/query/TestResultSetImpl.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/query/TestResultSetImpl.java b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/query/TestResultSetImpl.java
index f1f09ef..ebeecd3 100644
--- a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/query/TestResultSetImpl.java
+++ b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/query/TestResultSetImpl.java
@@ -28,12 +28,12 @@ import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import tajo.IntegrationTest;
 import tajo.TajoTestingCluster;
+import tajo.catalog.CatalogUtil;
 import tajo.catalog.Schema;
-import tajo.catalog.TCatUtil;
 import tajo.catalog.TableMeta;
-import tajo.catalog.proto.CatalogProtos.DataType;
 import tajo.catalog.proto.CatalogProtos.StoreType;
 import tajo.catalog.statistics.TableStat;
+import tajo.common.TajoDataTypes.Type;
 import tajo.conf.TajoConf;
 import tajo.datum.DatumFactory;
 import tajo.storage.Appender;
@@ -62,9 +62,9 @@ public class TestResultSetImpl {
     sm = new StorageManager(conf);
 
     Schema scoreSchema = new Schema();
-    scoreSchema.addColumn("deptname", DataType.STRING);
-    scoreSchema.addColumn("score", DataType.INT);
-    scoreMeta = TCatUtil.newTableMeta(scoreSchema, StoreType.CSV);
+    scoreSchema.addColumn("deptname", Type.TEXT);
+    scoreSchema.addColumn("score", Type.INT4);
+    scoreMeta = CatalogUtil.newTableMeta(scoreSchema, StoreType.CSV);
     TableStat stat = new TableStat();
 
     Path p = sm.getTablePath("score");
@@ -78,8 +78,8 @@ public class TestResultSetImpl {
     for (int i = 0; i < tupleNum; i++) {
       tuple = new VTuple(2);
       String key = "test" + (i % deptSize);
-      tuple.put(0, DatumFactory.createString(key));
-      tuple.put(1, DatumFactory.createInt(i + 1));
+      tuple.put(0, DatumFactory.createText(key));
+      tuple.put(1, DatumFactory.createInt4(i + 1));
       written += key.length() + Integer.SIZE;
       appender.addTuple(tuple);
     }

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/c1c6f83e/tajo-core/tajo-core-backend/src/test/java/tajo/engine/util/TestTupleUtil.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/util/TestTupleUtil.java b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/util/TestTupleUtil.java
index 9a23083..bee9b39 100644
--- a/tajo-core/tajo-core-backend/src/test/java/tajo/engine/util/TestTupleUtil.java
+++ b/tajo-core/tajo-core-backend/src/test/java/tajo/engine/util/TestTupleUtil.java
@@ -20,7 +20,7 @@ package tajo.engine.util;
 
 import org.junit.Test;
 import tajo.catalog.Schema;
-import tajo.catalog.proto.CatalogProtos.DataType;
+import tajo.common.TajoDataTypes.Type;
 import tajo.datum.Datum;
 import tajo.datum.DatumFactory;
 import tajo.engine.planner.PlannerUtil;
@@ -40,32 +40,32 @@ public class TestTupleUtil {
   @Test
   public final void testToBytesAndToTuple() {
     Schema schema = new Schema();
-    schema.addColumn("col1", DataType.BOOLEAN);
-    schema.addColumn("col2", DataType.BYTE);
-    schema.addColumn("col3", DataType.CHAR);
-    schema.addColumn("col4", DataType.SHORT);
-    schema.addColumn("col5", DataType.INT);
-    schema.addColumn("col6", DataType.LONG);
-    schema.addColumn("col7", DataType.FLOAT);
-    schema.addColumn("col8", DataType.DOUBLE);
-    schema.addColumn("col9", DataType.STRING);
-    schema.addColumn("col10", DataType.BYTES);
-    schema.addColumn("col11", DataType.IPv4);
+    schema.addColumn("col1", Type.BOOLEAN);
+    schema.addColumn("col2", Type.BIT);
+    schema.addColumn("col3", Type.CHAR);
+    schema.addColumn("col4", Type.INT2);
+    schema.addColumn("col5", Type.INT4);
+    schema.addColumn("col6", Type.INT8);
+    schema.addColumn("col7", Type.FLOAT4);
+    schema.addColumn("col8", Type.FLOAT8);
+    schema.addColumn("col9", Type.TEXT);
+    schema.addColumn("col10", Type.BLOB);
+    schema.addColumn("col11", Type.INET4);
     //schema.addColumn("col11", DataType.IPv6);
     
     Tuple tuple = new VTuple(11);
     tuple.put(new Datum[] {
         DatumFactory.createBool(true),
-        DatumFactory.createByte((byte) 0x99),
+        DatumFactory.createBit((byte) 0x99),
         DatumFactory.createChar('7'),
-        DatumFactory.createShort((short) 17),
-        DatumFactory.createInt(59),
-        DatumFactory.createLong(23l),
-        DatumFactory.createFloat(77.9f),
-        DatumFactory.createDouble(271.9f),        
-        DatumFactory.createString("hyunsik"),
-        DatumFactory.createBytes("hyunsik".getBytes()),
-        DatumFactory.createIPv4("192.168.0.1")
+        DatumFactory.createInt2((short) 17),
+        DatumFactory.createInt4(59),
+        DatumFactory.createInt8(23l),
+        DatumFactory.createFloat4(77.9f),
+        DatumFactory.createFloat8(271.9f),
+        DatumFactory.createText("hyunsik"),
+        DatumFactory.createBlob("hyunsik".getBytes()),
+        DatumFactory.createInet4("192.168.0.1")
     });
     
     byte [] bytes = RowStoreUtil.RowStoreEncoder.toBytes(schema, tuple);
@@ -80,29 +80,29 @@ public class TestTupleUtil {
     Tuple eTuple = new VTuple(7);
 
     Schema schema = new Schema();
-    schema.addColumn("numByte", DataType.BYTE);
-    schema.addColumn("numChar", DataType.CHAR);
-    schema.addColumn("numShort", DataType.SHORT);
-    schema.addColumn("numInt", DataType.INT);
-    schema.addColumn("numLong", DataType.LONG);
-    schema.addColumn("numFloat", DataType.FLOAT);
-    schema.addColumn("numDouble", DataType.FLOAT);
-
-    sTuple.put(0, DatumFactory.createByte((byte) 44));
+    schema.addColumn("numByte", Type.BIT);
+    schema.addColumn("numChar", Type.CHAR);
+    schema.addColumn("numShort", Type.INT2);
+    schema.addColumn("numInt", Type.INT4);
+    schema.addColumn("numLong", Type.INT8);
+    schema.addColumn("numFloat", Type.FLOAT4);
+    schema.addColumn("numDouble", Type.FLOAT4);
+
+    sTuple.put(0, DatumFactory.createBit((byte) 44));
     sTuple.put(1, DatumFactory.createChar('a'));
-    sTuple.put(2, DatumFactory.createShort((short) 10));
-    sTuple.put(3, DatumFactory.createInt(5));
-    sTuple.put(4, DatumFactory.createLong(100));
-    sTuple.put(5, DatumFactory.createFloat(100));
-    sTuple.put(6, DatumFactory.createDouble(100));
+    sTuple.put(2, DatumFactory.createInt2((short) 10));
+    sTuple.put(3, DatumFactory.createInt4(5));
+    sTuple.put(4, DatumFactory.createInt8(100));
+    sTuple.put(5, DatumFactory.createFloat4(100));
+    sTuple.put(6, DatumFactory.createFloat8(100));
 
-    eTuple.put(0, DatumFactory.createByte((byte) 99));
+    eTuple.put(0, DatumFactory.createBit((byte) 99));
     eTuple.put(1, DatumFactory.createChar('p'));
-    eTuple.put(2, DatumFactory.createShort((short) 70));
-    eTuple.put(3, DatumFactory.createInt(70));
-    eTuple.put(4, DatumFactory.createLong(10000));
-    eTuple.put(5, DatumFactory.createFloat(150));
-    eTuple.put(6, DatumFactory.createDouble(170));
+    eTuple.put(2, DatumFactory.createInt2((short) 70));
+    eTuple.put(3, DatumFactory.createInt4(70));
+    eTuple.put(4, DatumFactory.createInt8(10000));
+    eTuple.put(5, DatumFactory.createFloat4(150));
+    eTuple.put(6, DatumFactory.createFloat8(170));
 
     RangePartitionAlgorithm partitioner = new UniformRangePartition(schema, new TupleRange(schema, sTuple, eTuple));
     TupleRange [] ranges = partitioner.partition(5);
@@ -119,16 +119,16 @@ public class TestTupleUtil {
   @Test
   public void testQueryToRange() throws UnsupportedEncodingException {
     Schema schema = new Schema();
-    schema.addColumn("intval", DataType.INT);
-    schema.addColumn("floatval", DataType.FLOAT);
+    schema.addColumn("intval", Type.INT4);
+    schema.addColumn("floatval", Type.FLOAT4);
 
     Tuple s = new VTuple(2);
-    s.put(0, DatumFactory.createInt(5));
-    s.put(1, DatumFactory.createFloat(10));
+    s.put(0, DatumFactory.createInt4(5));
+    s.put(1, DatumFactory.createFloat4(10));
 
     Tuple e = new VTuple(2);
-    e.put(0, DatumFactory.createInt(10));
-    e.put(1, DatumFactory.createFloat(20));
+    e.put(0, DatumFactory.createInt4(10));
+    e.put(1, DatumFactory.createFloat4(20));
 
     TupleRange expected = new TupleRange(schema, s, e);
     int card = (int) TupleUtil.computeCardinality(schema, expected);
@@ -155,12 +155,12 @@ public class TestTupleUtil {
   @Test
   public void testQueryToRangeWithOneRange() throws UnsupportedEncodingException {
     Schema schema = new Schema();
-    schema.addColumn("partkey", DataType.FLOAT);
+    schema.addColumn("partkey", Type.FLOAT4);
 
     Tuple s = new VTuple(1);
-    s.put(0, DatumFactory.createFloat(28082));
+    s.put(0, DatumFactory.createFloat4(28082));
     Tuple e = new VTuple(1);
-    e.put(0, DatumFactory.createFloat(28082));
+    e.put(0, DatumFactory.createFloat4(28082));
 
     TupleRange expected = new TupleRange(schema, s, e);
     int card = (int) TupleUtil.computeCardinality(schema, expected);
@@ -186,11 +186,11 @@ public class TestTupleUtil {
    */
   public void testRangeToQueryHeavyTest() throws UnsupportedEncodingException {
     Schema schema = new Schema();
-    schema.addColumn("c_custkey", DataType.INT);
+    schema.addColumn("c_custkey", Type.INT4);
     Tuple s = new VTuple(1);
-    s.put(0, DatumFactory.createInt(4));
+    s.put(0, DatumFactory.createInt4(4));
     Tuple e = new VTuple(1);
-    e.put(0, DatumFactory.createInt(149995));
+    e.put(0, DatumFactory.createInt4(149995));
     TupleRange expected = new TupleRange(schema, s, e);
     TupleRange [] ranges = TupleUtil.getPartitions(schema, 31, expected);
 
@@ -208,14 +208,14 @@ public class TestTupleUtil {
    */
   public void testRangeToQueryTest() throws UnsupportedEncodingException {
     Schema schema = new Schema();
-    schema.addColumn("l_returnflag", DataType.STRING);
-    schema.addColumn("l_linestatus", DataType.STRING);
+    schema.addColumn("l_returnflag", Type.TEXT);
+    schema.addColumn("l_linestatus", Type.TEXT);
     Tuple s = new VTuple(2);
-    s.put(0, DatumFactory.createString("A"));
-    s.put(1, DatumFactory.createString("F"));
+    s.put(0, DatumFactory.createText("A"));
+    s.put(1, DatumFactory.createText("F"));
     Tuple e = new VTuple(2);
-    e.put(0, DatumFactory.createString("R"));
-    e.put(1, DatumFactory.createString("O"));
+    e.put(0, DatumFactory.createText("R"));
+    e.put(1, DatumFactory.createText("O"));
     TupleRange expected = new TupleRange(schema, s, e);
 
     RangePartitionAlgorithm partitioner = new UniformRangePartition(schema, expected, true);

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/c1c6f83e/tajo-core/tajo-core-backend/src/test/java/tajo/master/TestExecutionBlockCursor.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/tajo/master/TestExecutionBlockCursor.java b/tajo-core/tajo-core-backend/src/test/java/tajo/master/TestExecutionBlockCursor.java
index c6a5d43..36cdef3 100644
--- a/tajo-core/tajo-core-backend/src/test/java/tajo/master/TestExecutionBlockCursor.java
+++ b/tajo-core/tajo-core-backend/src/test/java/tajo/master/TestExecutionBlockCursor.java
@@ -21,10 +21,7 @@ import org.junit.Test;
 import tajo.QueryIdFactory;
 import tajo.TajoTestingCluster;
 import tajo.benchmark.TPCH;
-import tajo.catalog.CatalogService;
-import tajo.catalog.TCatUtil;
-import tajo.catalog.TableDesc;
-import tajo.catalog.TableMeta;
+import tajo.catalog.*;
 import tajo.catalog.proto.CatalogProtos;
 import tajo.conf.TajoConf;
 import tajo.engine.parser.QueryAnalyzer;
@@ -56,8 +53,8 @@ public class TestExecutionBlockCursor {
     tpch.loadSchemas();
     tpch.loadOutSchema();
     for (String table : tpch.getTableNames()) {
-      TableMeta m = TCatUtil.newTableMeta(tpch.getSchema(table), CatalogProtos.StoreType.CSV);
-      TableDesc d = TCatUtil.newTableDesc(table, m, new Path("file:///"));
+      TableMeta m = CatalogUtil.newTableMeta(tpch.getSchema(table), CatalogProtos.StoreType.CSV);
+      TableDesc d = CatalogUtil.newTableDesc(table, m, new Path("file:///"));
       catalog.addTable(d);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/c1c6f83e/tajo-core/tajo-core-backend/src/test/java/tajo/storage/TestFragment.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/tajo/storage/TestFragment.java b/tajo-core/tajo-core-backend/src/test/java/tajo/storage/TestFragment.java
index 87a45ed..674eaa6 100644
--- a/tajo-core/tajo-core-backend/src/test/java/tajo/storage/TestFragment.java
+++ b/tajo-core/tajo-core-backend/src/test/java/tajo/storage/TestFragment.java
@@ -23,11 +23,11 @@ import com.google.gson.Gson;
 import org.apache.hadoop.fs.Path;
 import org.junit.Before;
 import org.junit.Test;
+import tajo.catalog.CatalogUtil;
 import tajo.catalog.Schema;
-import tajo.catalog.TCatUtil;
 import tajo.catalog.TableMeta;
-import tajo.catalog.proto.CatalogProtos.DataType;
 import tajo.catalog.proto.CatalogProtos.StoreType;
+import tajo.common.TajoDataTypes.Type;
 import tajo.engine.json.GsonCreator;
 
 import java.util.Arrays;
@@ -43,9 +43,9 @@ public class TestFragment {
   @Before
   public final void setUp() throws Exception {
     schema1 = new Schema();
-    schema1.addColumn("id", DataType.INT);
-    schema1.addColumn("name", DataType.STRING);
-    meta1 = TCatUtil.newTableMeta(schema1, StoreType.CSV);
+    schema1.addColumn("id", Type.INT4);
+    schema1.addColumn("name", Type.TEXT);
+    meta1 = CatalogUtil.newTableMeta(schema1, StoreType.CSV);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/c1c6f83e/tajo-core/tajo-core-backend/src/test/java/tajo/storage/TestHCFile.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/tajo/storage/TestHCFile.java b/tajo-core/tajo-core-backend/src/test/java/tajo/storage/TestHCFile.java
deleted file mode 100644
index aa4e9f0..0000000
--- a/tajo-core/tajo-core-backend/src/test/java/tajo/storage/TestHCFile.java
+++ /dev/null
@@ -1,268 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package tajo.storage;
-
-import com.google.common.collect.Lists;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import tajo.TajoTestingCluster;
-import tajo.catalog.Schema;
-import tajo.catalog.TCatUtil;
-import tajo.catalog.TableMeta;
-import tajo.catalog.proto.CatalogProtos.CompressType;
-import tajo.catalog.proto.CatalogProtos.DataType;
-import tajo.catalog.proto.CatalogProtos.StoreType;
-import tajo.conf.TajoConf;
-import tajo.datum.Datum;
-import tajo.datum.DatumFactory;
-import tajo.storage.exception.UnknownCodecException;
-import tajo.storage.exception.UnknownDataTypeException;
-import tajo.storage.hcfile.ColumnMeta;
-import tajo.storage.hcfile.HCFile.Appender;
-import tajo.storage.hcfile.HCFile.Scanner;
-import tajo.storage.hcfile.HCTupleAppender;
-import tajo.storage.hcfile.HColumnMetaWritable;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Random;
-
-import static org.junit.Assert.assertEquals;
-
-public class TestHCFile {
-
-  private static TajoTestingCluster util;
-  private static TajoConf conf;
-  private static Random random;
-
-  @Before
-  public void setUp() throws Exception {
-    util = new TajoTestingCluster();
-    util.startMiniDFSCluster(1);
-    conf = util.getConfiguration();
-    conf.setInt("dfs.blocksize", 65535);
-    random = new Random(System.currentTimeMillis());
-  }
-
-  @After
-  public void tearDown() throws Exception {
-    util.shutdownMiniDFSCluster();
-  }
-
-  @Test
-  public void testInt()
-      throws UnknownCodecException, IOException, UnknownDataTypeException {
-    int rowNum = 1000;
-    Path path = new Path("hdfs:///hcfile.int");
-    List<Integer> data = Lists.newArrayList();
-
-    ColumnMeta
-        meta = new HColumnMetaWritable(0, DataType.INT, CompressType.COMP_NONE,
-        false, false, true);
-    long before = System.currentTimeMillis();
-    Appender appender = new Appender(conf, meta, path);
-
-    for (int i = 0; i < rowNum; i++) {
-      data.add(i);
-      appender.append(DatumFactory.createInt(data.get(data.size()-1)));
-    }
-    appender.close();
-    long after = System.currentTimeMillis();
-    System.out.println("write time: " + (after-before));
-
-    before = System.currentTimeMillis();
-    Scanner scanner = new Scanner(conf, path);
-
-    for (Integer i : data) {
-      assertEquals(i.intValue(), scanner.get().asInt());
-    }
-    after = System.currentTimeMillis();
-    System.out.println("sequential read time: " + (after-before));
-    scanner.close();
-
-    before = System.currentTimeMillis();
-    scanner = new Scanner(conf, path);
-    scanner.first();
-    assertEquals(data.get(0).intValue(), scanner.get().asInt());
-    after = System.currentTimeMillis();
-    System.out.println("after first() read time: " + (after-before));
-    scanner.close();
-
-    before = System.currentTimeMillis();
-    scanner = new Scanner(conf, path);
-    scanner.last();
-    assertEquals(data.get(data.size()-1).intValue(), scanner.get().asInt());
-    after = System.currentTimeMillis();
-    System.out.println("after last() read time: " + (after-before));
-    scanner.close();
-
-    before = System.currentTimeMillis();
-    scanner = new Scanner(conf, path);
-    int randomIndex = random.nextInt(rowNum);
-    scanner.pos(randomIndex);
-    assertEquals(data.get(randomIndex).intValue(), scanner.get().asInt());
-    after = System.currentTimeMillis();
-    System.out.println("after pos() read time: " + (after-before));
-    scanner.close();
-  }
-
-  @Test
-  public void testString()
-      throws IOException, UnknownCodecException, UnknownDataTypeException {
-    int rowNum = 1000;
-    Path path = new Path("hdfs:///hcfile.string");
-    List<String> data = Lists.newArrayList();
-
-    ColumnMeta meta = new HColumnMetaWritable(0, DataType.STRING, CompressType.COMP_NONE,
-        false, false, true);
-    Appender appender = new Appender(conf, meta, path);
-
-    String randomStr;
-    for (int i = 0; i < rowNum; i++) {
-      randomStr = getRandomString(10);
-      data.add(randomStr);
-      appender.append(DatumFactory.createString(randomStr));
-    }
-    appender.close();
-
-    Scanner scanner = new Scanner(conf, path);
-    for (String s : data) {
-      assertEquals(s, scanner.get().asChars());
-    }
-    scanner.close();
-  }
-
-  @Test
-  public void testHCTupleAppender()
-      throws UnknownCodecException, IOException, UnknownDataTypeException {
-    int tupleNum = 1000;
-
-    Path tablePath = new Path("hdfs:///table");
-    Schema schema = new Schema();
-    schema.addColumn("id", DataType.INT);
-    schema.addColumn("age", DataType.LONG);
-    schema.addColumn("description", DataType.STRING);
-    schema.addColumn("char", DataType.CHAR);
-    TableMeta meta = TCatUtil.newTableMeta(schema, StoreType.HCFILE);
-
-    HCTupleAppender appender = new HCTupleAppender(conf, meta, 2, tablePath);
-    Datum stringDatum = DatumFactory.createString("abcdefghijklmnopqrstuvwxyz");
-
-    int i;
-    Tuple tuple = new VTuple(4);
-    for(i = 0; i < tupleNum; i++) {
-      tuple.put(0, DatumFactory.createInt(i));
-      tuple.put(1, DatumFactory.createLong(25l));
-      tuple.put(2, stringDatum);
-      tuple.put(3, DatumFactory.createChar('a'));
-      appender.addTuple(tuple);
-    }
-    appender.close();
-
-    FileSystem fs = tablePath.getFileSystem(conf);
-    FileStatus[] files = fs.listStatus(new Path(tablePath, "data"));
-    Path[] shardDirs = new Path[files.length];
-    for (i = 0; i < files.length; i++) {
-      shardDirs[i] = files[i].getPath();
-    }
-    Arrays.sort(shardDirs, new NumericPathComparator());
-
-    Scanner scanner;
-    Datum datum;
-    int cnt = 0;
-
-    for (i = 0; i < shardDirs.length; i++) {
-      scanner = new Scanner(conf, new Path(shardDirs[i], "id_0"));
-      while ((datum=scanner.get()) != null) {
-        assertEquals(cnt++, datum.asInt());
-      }
-      scanner.close();
-
-      scanner = new Scanner(conf, new Path(shardDirs[i], "age_0"));
-      while ((datum=scanner.get()) != null) {
-        assertEquals(25l, datum.asLong());
-      }
-      scanner.close();
-
-      scanner = new Scanner(conf, new Path(shardDirs[i], "description_0"));
-      while ((datum=scanner.get()) != null) {
-        assertEquals("abcdefghijklmnopqrstuvwxyz", datum.asChars());
-      }
-      scanner.close();
-
-      scanner = new Scanner(conf, new Path(shardDirs[i], "char_0"));
-      while ((datum=scanner.get()) != null) {
-        assertEquals('a', datum.asChar());
-      }
-      scanner.close();
-    }
-  }
-
-//  @Test
-//  public void testOrders()
-//      throws IOException, UnknownCodecException, UnknownDataTypeException {
-//    Path tablePath = new Path("file:///home/jihoon/work/develop/tpch/customer");
-//    Path metaPath = new Path(tablePath, ".meta");
-//    Path dataDir = new Path(tablePath, "data");
-//    Path outPath = new Path("file:///home/jihoon/work/develop/ColumnLoader/target/test-data/customer");
-//    FileSystem fs = metaPath.getFileSystem(conf);
-//
-//    FSDataInputStream in = fs.open(metaPath);
-//    TableProto proto = (TableProto) FileUtil.loadProto(in, TableProto.getDefaultInstance());
-//    TableMeta meta = new TableMetaImpl(proto);
-//    in.close();
-//
-//    Tuple tuple;
-//    Fragment fragment;
-//    CSVFile.CSVScanner scanner;
-//    HCTupleAppender appender = new HCTupleAppender(conf, meta, meta.getSchema().getColumn(0), outPath);
-//
-//    for (FileStatus file : fs.listStatus(dataDir)) {
-//      if (file.getPath().getName().equals(".index")) {
-//        continue;
-//      }
-//      fragment = new Fragment("0", file.getPath(), meta, 0, file.getLen());
-//      scanner = new CSVScanner(conf, meta.getSchema(), fragment);
-//      while ((tuple=scanner.next()) != null) {
-//        appender.addTuple(tuple);
-//      }
-//      scanner.close();
-//    }
-//    appender.close();
-//
-//  }
-
-  private static String getRandomString(int length) {
-    StringBuffer buffer = new StringBuffer();
-
-    String chars[] =
-        "a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z".split(",");
-
-    for (int i=0 ; i<length ; i++)
-    {
-      buffer.append(chars[random.nextInt(chars.length)]);
-    }
-    return buffer.toString();
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/c1c6f83e/tajo-core/tajo-core-backend/src/test/java/tajo/storage/TestHColumnReader.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/tajo/storage/TestHColumnReader.java b/tajo-core/tajo-core-backend/src/test/java/tajo/storage/TestHColumnReader.java
deleted file mode 100644
index df72f39..0000000
--- a/tajo-core/tajo-core-backend/src/test/java/tajo/storage/TestHColumnReader.java
+++ /dev/null
@@ -1,145 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package tajo.storage;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import tajo.TajoTestingCluster;
-import tajo.catalog.Column;
-import tajo.catalog.Schema;
-import tajo.catalog.TCatUtil;
-import tajo.catalog.TableMeta;
-import tajo.catalog.proto.CatalogProtos.DataType;
-import tajo.catalog.proto.CatalogProtos.StoreType;
-import tajo.datum.Datum;
-import tajo.datum.DatumFactory;
-import tajo.storage.hcfile.HCTupleAppender;
-import tajo.storage.hcfile.HColumnReader;
-import tajo.util.FileUtil;
-
-import java.io.IOException;
-
-import static org.junit.Assert.assertEquals;
-
-public class TestHColumnReader {
-  private TajoTestingCluster util;
-  private Configuration conf;
-  private int i, tupleNum = 150000;
-  private Path tablePath = new Path("hdfs:///customer");
-
-
-  @Before
-  public void setup() throws Exception {
-    util = new TajoTestingCluster();
-    util.startMiniDFSCluster(1);
-    conf = util.getConfiguration();
-
-    Schema schema = new Schema(
-        new Column[]{
-            new Column("id", DataType.INT),
-            new Column("name", DataType.STRING2)});
-    TableMeta tableMeta = TCatUtil.newTableMeta(schema, StoreType.HCFILE);
-    FileUtil.writeProto(util.getDefaultFileSystem(),
-        new Path(tablePath, ".meta"), tableMeta.getProto());
-
-    HCTupleAppender appender = new HCTupleAppender(conf, tableMeta, 1, tablePath);
-    Tuple tuple = new VTuple(2);
-
-    for (i = 0; i < tupleNum; i++) {
-      tuple.put(0, DatumFactory.createInt(i));
-      tuple.put(1, DatumFactory.createString2("abcdefghijklmnopqrstuvwxyz"));
-      appender.addTuple(tuple);
-    }
-
-    appender.close();
-  }
-
-  @After
-  public void teardown() throws Exception {
-    util.shutdownMiniDFSCluster();
-  }
-
-  @Test
-  public void testSeqscan() throws IOException {
-
-    Datum datum;
-    HColumnReader reader = new HColumnReader(conf, tablePath, "id");
-    for (i = 0; (datum=reader.get()) != null; i++) {
-      assertEquals(i, datum.asInt());
-    }
-
-    reader.close();
-
-    assertEquals(i, tupleNum);
-
-    reader = new HColumnReader(conf, tablePath, "name");
-    for (i = 0; (datum=reader.get()) != null; i++) {
-      assertEquals("abcdefghijklmnopqrstuvwxyz", datum.asChars());
-    }
-
-    reader.close();
-
-    assertEquals(i, tupleNum);
-  }
-
-  @Test
-  public void testRandscan() throws IOException {
-    Datum datum;
-    HColumnReader idReader = new HColumnReader(conf, tablePath, 0);
-    HColumnReader nameReader = new HColumnReader(conf, tablePath, "name");
-    idReader.pos(100000);
-    nameReader.pos(100000);
-    for (i = 100000; (datum=idReader.get()) != null; i++) {
-      assertEquals(i, datum.asInt());
-      assertEquals("abcdefghijklmnopqrstuvwxyz", nameReader.get().asChars());
-    }
-    assertEquals(i, tupleNum);
-
-    idReader.pos(3000);
-    nameReader.pos(3000);
-    for (i = 3000; i < 50000; i++) {
-      datum = idReader.get();
-      assertEquals(i, datum.asInt());
-      assertEquals("abcdefghijklmnopqrstuvwxyz", nameReader.get().asChars());
-    }
-    assertEquals(50000, i);
-
-    idReader.pos(30000);
-    nameReader.pos(30000);
-    for (i = 30000; (datum=idReader.get()) != null; i++) {
-      assertEquals(i, datum.asInt());
-      assertEquals("abcdefghijklmnopqrstuvwxyz", nameReader.get().asChars());
-    }
-    assertEquals(i, tupleNum);
-
-    idReader.pos(0);
-    nameReader.pos(0);
-    for (i = 0; (datum=idReader.get()) != null; i++) {
-      assertEquals(i, datum.asInt());
-      assertEquals("abcdefghijklmnopqrstuvwxyz", nameReader.get().asChars());
-    }
-    assertEquals(i, tupleNum);
-
-    idReader.close();
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/c1c6f83e/tajo-core/tajo-core-backend/src/test/java/tajo/storage/TestRowFile.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/tajo/storage/TestRowFile.java b/tajo-core/tajo-core-backend/src/test/java/tajo/storage/TestRowFile.java
index cd3058f..b7b5b0a 100644
--- a/tajo-core/tajo-core-backend/src/test/java/tajo/storage/TestRowFile.java
+++ b/tajo-core/tajo-core-backend/src/test/java/tajo/storage/TestRowFile.java
@@ -27,14 +27,14 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 import tajo.TajoTestingCluster;
+import tajo.catalog.CatalogUtil;
 import tajo.catalog.Schema;
-import tajo.catalog.TCatUtil;
 import tajo.catalog.TableMeta;
 import tajo.catalog.TableMetaImpl;
-import tajo.catalog.proto.CatalogProtos.DataType;
 import tajo.catalog.proto.CatalogProtos.StoreType;
 import tajo.catalog.proto.CatalogProtos.TableProto;
 import tajo.catalog.statistics.TableStat;
+import tajo.common.TajoDataTypes.Type;
 import tajo.conf.TajoConf.ConfVars;
 import tajo.datum.Datum;
 import tajo.datum.DatumFactory;
@@ -65,11 +65,11 @@ public class TestRowFile {
   @Test
   public void test() throws IOException {
     Schema schema = new Schema();
-    schema.addColumn("id", DataType.INT);
-    schema.addColumn("age", DataType.LONG);
-    schema.addColumn("description", DataType.STRING2);
+    schema.addColumn("id", Type.INT4);
+    schema.addColumn("age", Type.INT8);
+    schema.addColumn("description", Type.TEXT);
 
-    TableMeta meta = TCatUtil.newTableMeta(schema, StoreType.ROWFILE);
+    TableMeta meta = CatalogUtil.newTableMeta(schema, StoreType.ROWFILE);
 
     Path tablePath = new Path("hdfs:///test");
     Path metaPath = new Path(tablePath, ".meta");
@@ -87,14 +87,14 @@ public class TestRowFile {
 
     int tupleNum = 100000;
     Tuple tuple;
-    Datum stringDatum = DatumFactory.createString2("abcdefghijklmnopqrstuvwxyz");
+    Datum stringDatum = DatumFactory.createText("abcdefghijklmnopqrstuvwxyz");
     Set<Integer> idSet = Sets.newHashSet();
 
     tuple = new VTuple(3);
     long start = System.currentTimeMillis();
     for(int i = 0; i < tupleNum; i++) {
-      tuple.put(0, DatumFactory.createInt(i + 1));
-      tuple.put(1, DatumFactory.createLong(25l));
+      tuple.put(0, DatumFactory.createInt4(i + 1));
+      tuple.put(1, DatumFactory.createInt8(25l));
       tuple.put(2, stringDatum);
       appender.addTuple(tuple);
       idSet.add(i+1);
@@ -140,8 +140,8 @@ public class TestRowFile {
       scanner = new RowFile.RowFileScanner(conf, meta, fragment);
       scanner.init();
       while ((tuple=scanner.next()) != null) {
-        if (!idSet.remove(tuple.get(0).asInt())) {
-          System.out.println("duplicated! " + tuple.get(0).asInt());
+        if (!idSet.remove(tuple.get(0).asInt4())) {
+          System.out.println("duplicated! " + tuple.get(0).asInt4());
         }
         tupleCnt++;
       }

http://git-wip-us.apache.org/repos/asf/incubator-tajo/blob/c1c6f83e/tajo-core/tajo-core-backend/src/test/java/tajo/worker/SlowFunc.java
----------------------------------------------------------------------
diff --git a/tajo-core/tajo-core-backend/src/test/java/tajo/worker/SlowFunc.java b/tajo-core/tajo-core-backend/src/test/java/tajo/worker/SlowFunc.java
deleted file mode 100644
index a6f2a3a..0000000
--- a/tajo-core/tajo-core-backend/src/test/java/tajo/worker/SlowFunc.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package tajo.worker;
-
-import org.mortbay.log.Log;
-import tajo.catalog.Column;
-import tajo.catalog.function.AggFunction;
-import tajo.catalog.function.FunctionContext;
-import tajo.catalog.proto.CatalogProtos.DataType;
-import tajo.datum.Datum;
-import tajo.storage.Tuple;
-
-public class SlowFunc extends AggFunction {
-  private Datum param;
-
-  public SlowFunc() {
-    super(new Column[] { new Column("name", DataType.STRING) });
-  }
-
-  @Override
-  public FunctionContext newContext() {
-    return null;
-  }
-
-  @Override
-  public void eval(FunctionContext ctx, Tuple params) {
-    param = params.get(0);
-  }
-
-  @Override
-  public Datum getPartialResult(FunctionContext ctx) {
-    return null;
-  }
-
-  @Override
-  public DataType[] getPartialResultType() {
-    return new DataType[] {DataType.STRING};
-  }
-
-  @Override
-  public Datum terminate(FunctionContext ctx) {
-    try {
-      Thread.sleep(1000);
-      Log.info("Sleepy... z...z...z");
-    } catch (InterruptedException ie) {
-    }
-    return param;
-  }
-}