You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by li...@apache.org on 2015/09/17 11:45:10 UTC

[1/9] incubator-kylin git commit: KYLIN-1010 Decompose project job

Repository: incubator-kylin
Updated Branches:
  refs/heads/KYLIN-1010 [created] 6c59e1077


http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/resources/jarfile/SampleBadJavaProgram.jarfile
----------------------------------------------------------------------
diff --git a/job/src/test/resources/jarfile/SampleBadJavaProgram.jarfile b/job/src/test/resources/jarfile/SampleBadJavaProgram.jarfile
deleted file mode 100644
index 75a43d5..0000000
Binary files a/job/src/test/resources/jarfile/SampleBadJavaProgram.jarfile and /dev/null differ

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/resources/jarfile/SampleJavaProgram.jarfile
----------------------------------------------------------------------
diff --git a/job/src/test/resources/jarfile/SampleJavaProgram.jarfile b/job/src/test/resources/jarfile/SampleJavaProgram.jarfile
deleted file mode 100644
index 8ca85c4..0000000
Binary files a/job/src/test/resources/jarfile/SampleJavaProgram.jarfile and /dev/null differ

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/resources/json/dummy_jobinstance.json
----------------------------------------------------------------------
diff --git a/job/src/test/resources/json/dummy_jobinstance.json b/job/src/test/resources/json/dummy_jobinstance.json
deleted file mode 100644
index b46fba2..0000000
--- a/job/src/test/resources/json/dummy_jobinstance.json
+++ /dev/null
@@ -1,195 +0,0 @@
-{
-  "name" : "Dummy_Job",
-  "type" : "REBUILD",
-  "uuid" : "8ad83b8c-6bda-4b79-864a-9566c0f8ce2c",
-  "last_modified" : 0,
-  "related_cube" : "test_kylin_cube_with_slr_empty",
-  "related_branch" : null,
-  "related_segment" : null,
-  "input_parameters" : {
-    "storageLocationIdentifier" : "table_abc"
-  },
-  "job_status" : "PENDING",
-  "exec_start_time" : 0,
-  "exec_end_time" : 0,
-  "progress" : 0.0,
-  "duration" : 0,
-  "mr_waiting" : 0,
-  "steps" : [ {
-    "interruptCmd" : null,
-    "name" : "Build Dimension Dictionary",
-    "sequence_id" : 0,
-    "exec_cmd" : "hbase org.apache.hadoop.util.RunJar kylin-job.jar org.apache.kylin.job.hadoop.dict.CreateDictionaryJob  -cubename test_kylin_cube_with_slr_empty",
-    "pre_exec_cmd" : null,
-    "post_exec_cmd" : null,
-    "interrupt_cmd" : null,
-    "exec_start_time" : 0,
-    "exec_end_time" : 0,
-    "exec_wait_time" : 0,
-    "step_status" : "PENDING",
-    "cmd_type" : "MRCLI",
-    "cmd_output" : null,
-    "info" : null,
-    "run_async" : false
-  }, {
-    "interruptCmd" : null,
-    "name" : "Create Intermediate Flat Hive Table",
-    "sequence_id" : 1,
-    "exec_cmd" : "hive -e \"DROP TABLE IF EXISTS kylin_intermediate_test_kylin_cube_with_slr_desc_8ad83b8c_6bda_4b79_864a_9566c0f8ce2c;\nCREATE EXTERNAL TABLE IF NOT EXISTS kylin_intermediate_test_kylin_cube_with_slr_desc_8ad83b8c_6bda_4b79_864a_9566c0f8ce2c\n(\nCAL_DT date\n,META_CATEG_NAME string\n,CATEG_LVL2_NAME string\n,CATEG_LVL3_NAME string\n,LSTG_FORMAT_NAME string\n,SITE_ID int\n,SELLER_TYPE_CD smallint\n,SELLER_ID bigint\n,PRICE decimal\n)\nROW FORMAT DELIMITED FIELDS TERMINATED BY '\\177'\nSTORED AS SEQUENCEFILE\nLOCATION '/tmp/8ad83b8c-6bda-4b79-864a-9566c0f8ce2c/kylin_intermediate_test_kylin_cube_with_slr_desc_8ad83b8c_6bda_4b79_864a_9566c0f8ce2c';\nSET dfs.block.size=67108864;\nSET hive.exec.compress.output=true;\nSET mapred.output.compression.type=BLOCK;\nSET mapred.output.compression.codec=com.hadoop.compression.lzo.LzoCodec;\nSET hive.auto.convert.join.noconditionaltask = true;\nSET hive.auto.convert.join.noconditionaltask.size = 300000000;\nINSERT OVERWRITE TABLE k
 ylin_intermediate_test_kylin_cube_with_slr_desc_8ad83b8c_6bda_4b79_864a_9566c0f8ce2c\nSELECT\nTEST_CAL_DT.CAL_DT\n,TEST_CATEGORY_GROUPINGS.META_CATEG_NAME\n,TEST_CATEGORY_GROUPINGS.CATEG_LVL2_NAME\n,TEST_CATEGORY_GROUPINGS.CATEG_LVL3_NAME\n,TEST_KYLIN_FACT.LSTG_FORMAT_NAME\n,TEST_SITES.SITE_ID\n,TEST_SELLER_TYPE_DIM.SELLER_TYPE_CD\n,TEST_KYLIN_FACT.SELLER_ID\n,TEST_KYLIN_FACT.PRICE\nFROM TEST_KYLIN_FACT\nINNER JOIN TEST_CAL_DT\nON TEST_KYLIN_FACT.CAL_DT = TEST_CAL_DT.CAL_DT\nINNER JOIN TEST_CATEGORY_GROUPINGS\nON TEST_KYLIN_FACT.LEAF_CATEG_ID = TEST_CATEGORY_GROUPINGS.LEAF_CATEG_ID AND TEST_KYLIN_FACT.LSTG_SITE_ID = TEST_CATEGORY_GROUPINGS.SITE_ID\nINNER JOIN TEST_SITES\nON TEST_KYLIN_FACT.LSTG_SITE_ID = TEST_SITES.SITE_ID\nINNER JOIN TEST_SELLER_TYPE_DIM\nON TEST_KYLIN_FACT.SLR_SEGMENT_CD = TEST_SELLER_TYPE_DIM.SELLER_TYPE_CD\n;\n\"",
-    "pre_exec_cmd" : null,
-    "post_exec_cmd" : null,
-    "interrupt_cmd" : null,
-    "exec_start_time" : 0,
-    "exec_end_time" : 0,
-    "exec_wait_time" : 0,
-    "step_status" : "PENDING",
-    "cmd_type" : "MRCLI",
-    "cmd_output" : null,
-    "info" : null,
-    "run_async" : false
-  }, {
-    "interruptCmd" : null,
-    "name" : "Build Base Cuboid Data",
-    "sequence_id" : 2,
-    "exec_cmd" : "hbase org.apache.hadoop.util.RunJar kylin-job.jar org.apache.kylin.job.hadoop.cube.BaseCuboidJob  -D mapred.compress.map.output=true -D mapred.map.output.compression.codec=com.hadoop.compression.lzo.LzoCodec -D mapred.output.compress=true -D mapred.output.compression.codec=com.hadoop.compression.lzo.LzoCodec -D mapred.output.compression.type=BLOCK -D dfs.block.size=67108864 -D ebay.alert.email=DL-eBay-Kylin@corp.ebay.com -D ebay.job.name=Kylin_Olap_Cube_Builder -cubename test_kylin_cube_with_slr_empty -input /tmp/8ad83b8c-6bda-4b79-864a-9566c0f8ce2c/kylin_intermediate_test_kylin_cube_with_slr_desc_8ad83b8c_6bda_4b79_864a_9566c0f8ce2c -output /tmp/8ad83b8c-6bda-4b79-864a-9566c0f8ce2c/test_kylin_cube_with_slr_empty/cuboid/base_cuboid -jobname Kylin_Base_Cuboid_Builder_test_kylin_cube_with_slr_empty_Step_2 -level 0",
-    "pre_exec_cmd" : null,
-    "post_exec_cmd" : null,
-    "interrupt_cmd" : null,
-    "exec_start_time" : 0,
-    "exec_end_time" : 0,
-    "exec_wait_time" : 0,
-    "step_status" : "PENDING",
-    "cmd_type" : "MRCLI",
-    "cmd_output" : null,
-    "info" : null,
-    "run_async" : false
-  }, {
-    "interruptCmd" : null,
-    "name" : "Build N-Dimension Cuboid Data : 7-Dimension",
-    "sequence_id" : 3,
-    "exec_cmd" : "hbase org.apache.hadoop.util.RunJar kylin-job.jar org.apache.kylin.job.hadoop.cube.NDCuboidJob  -D mapred.compress.map.output=true -D mapred.map.output.compression.codec=com.hadoop.compression.lzo.LzoCodec -D mapred.output.compress=true -D mapred.output.compression.codec=com.hadoop.compression.lzo.LzoCodec -D mapred.output.compression.type=BLOCK -D dfs.block.size=67108864 -D ebay.alert.email=DL-eBay-Kylin@corp.ebay.com -D ebay.job.name=Kylin_Olap_Cube_Builder -cubename test_kylin_cube_with_slr_empty -input /tmp/8ad83b8c-6bda-4b79-864a-9566c0f8ce2c/test_kylin_cube_with_slr_empty/cuboid/base_cuboid -output /tmp/8ad83b8c-6bda-4b79-864a-9566c0f8ce2c/test_kylin_cube_with_slr_empty/cuboid/7d_cuboid -jobname Kylin_ND-Cuboid_Builder_test_kylin_cube_with_slr_empty_Step_3 -level 1",
-    "pre_exec_cmd" : null,
-    "post_exec_cmd" : null,
-    "interrupt_cmd" : null,
-    "exec_start_time" : 0,
-    "exec_end_time" : 0,
-    "exec_wait_time" : 0,
-    "step_status" : "PENDING",
-    "cmd_type" : "MRCLI",
-    "cmd_output" : null,
-    "info" : null,
-    "run_async" : false
-  }, {
-    "interruptCmd" : null,
-    "name" : "Build N-Dimension Cuboid Data : 6-Dimension",
-    "sequence_id" : 4,
-    "exec_cmd" : "hbase org.apache.hadoop.util.RunJar kylin-job.jar org.apache.kylin.job.hadoop.cube.NDCuboidJob  -D mapred.compress.map.output=true -D mapred.map.output.compression.codec=com.hadoop.compression.lzo.LzoCodec -D mapred.output.compress=true -D mapred.output.compression.codec=com.hadoop.compression.lzo.LzoCodec -D mapred.output.compression.type=BLOCK -D dfs.block.size=67108864 -D ebay.alert.email=DL-eBay-Kylin@corp.ebay.com -D ebay.job.name=Kylin_Olap_Cube_Builder -cubename test_kylin_cube_with_slr_empty -input /tmp/8ad83b8c-6bda-4b79-864a-9566c0f8ce2c/test_kylin_cube_with_slr_empty/cuboid/7d_cuboid -output /tmp/8ad83b8c-6bda-4b79-864a-9566c0f8ce2c/test_kylin_cube_with_slr_empty/cuboid/6d_cuboid -jobname Kylin_ND-Cuboid_Builder_test_kylin_cube_with_slr_empty_Step_4 -level 2",
-    "pre_exec_cmd" : null,
-    "post_exec_cmd" : null,
-    "interrupt_cmd" : null,
-    "exec_start_time" : 0,
-    "exec_end_time" : 0,
-    "exec_wait_time" : 0,
-    "step_status" : "PENDING",
-    "cmd_type" : "MRCLI",
-    "cmd_output" : null,
-    "info" : null,
-    "run_async" : false
-  }, {
-    "interruptCmd" : null,
-    "name" : "Build N-Dimension Cuboid Data : 5-Dimension",
-    "sequence_id" : 5,
-    "exec_cmd" : "hbase org.apache.hadoop.util.RunJar kylin-job.jar org.apache.kylin.job.hadoop.cube.NDCuboidJob  -D mapred.compress.map.output=true -D mapred.map.output.compression.codec=com.hadoop.compression.lzo.LzoCodec -D mapred.output.compress=true -D mapred.output.compression.codec=com.hadoop.compression.lzo.LzoCodec -D mapred.output.compression.type=BLOCK -D dfs.block.size=67108864 -D ebay.alert.email=DL-eBay-Kylin@corp.ebay.com -D ebay.job.name=Kylin_Olap_Cube_Builder -cubename test_kylin_cube_with_slr_empty -input /tmp/8ad83b8c-6bda-4b79-864a-9566c0f8ce2c/test_kylin_cube_with_slr_empty/cuboid/6d_cuboid -output /tmp/8ad83b8c-6bda-4b79-864a-9566c0f8ce2c/test_kylin_cube_with_slr_empty/cuboid/5d_cuboid -jobname Kylin_ND-Cuboid_Builder_test_kylin_cube_with_slr_empty_Step_5 -level 3",
-    "pre_exec_cmd" : null,
-    "post_exec_cmd" : null,
-    "interrupt_cmd" : null,
-    "exec_start_time" : 0,
-    "exec_end_time" : 0,
-    "exec_wait_time" : 0,
-    "step_status" : "PENDING",
-    "cmd_type" : "MRCLI",
-    "cmd_output" : null,
-    "info" : null,
-    "run_async" : false
-  }, {
-    "interruptCmd" : null,
-    "name" : "Build N-Dimension Cuboid Data : 4-Dimension",
-    "sequence_id" : 6,
-    "exec_cmd" : "hbase org.apache.hadoop.util.RunJar kylin-job.jar org.apache.kylin.job.hadoop.cube.NDCuboidJob  -D mapred.compress.map.output=true -D mapred.map.output.compression.codec=com.hadoop.compression.lzo.LzoCodec -D mapred.output.compress=true -D mapred.output.compression.codec=com.hadoop.compression.lzo.LzoCodec -D mapred.output.compression.type=BLOCK -D dfs.block.size=67108864 -D ebay.alert.email=DL-eBay-Kylin@corp.ebay.com -D ebay.job.name=Kylin_Olap_Cube_Builder -cubename test_kylin_cube_with_slr_empty -input /tmp/8ad83b8c-6bda-4b79-864a-9566c0f8ce2c/test_kylin_cube_with_slr_empty/cuboid/5d_cuboid -output /tmp/8ad83b8c-6bda-4b79-864a-9566c0f8ce2c/test_kylin_cube_with_slr_empty/cuboid/4d_cuboid -jobname Kylin_ND-Cuboid_Builder_test_kylin_cube_with_slr_empty_Step_6 -level 4",
-    "pre_exec_cmd" : null,
-    "post_exec_cmd" : null,
-    "interrupt_cmd" : null,
-    "exec_start_time" : 0,
-    "exec_end_time" : 0,
-    "exec_wait_time" : 0,
-    "step_status" : "PENDING",
-    "cmd_type" : "MRCLI",
-    "cmd_output" : null,
-    "info" : null,
-    "run_async" : false
-  }, {
-    "interruptCmd" : null,
-    "name" : "Get Cuboid Data Key Distribution",
-    "sequence_id" : 7,
-    "exec_cmd" : "hbase org.apache.hadoop.util.RunJar kylin-job.jar org.apache.kylin.job.hadoop.cube.RangeKeyDistributionJob  -D mapred.compress.map.output=true -D mapred.map.output.compression.codec=com.hadoop.compression.lzo.LzoCodec -D mapred.output.compress=true -D mapred.output.compression.codec=com.hadoop.compression.lzo.LzoCodec -D mapred.output.compression.type=BLOCK -D dfs.block.size=67108864 -D ebay.alert.email=DL-eBay-Kylin@corp.ebay.com -D ebay.job.name=Kylin_Olap_Cube_Builder -input /tmp/8ad83b8c-6bda-4b79-864a-9566c0f8ce2c/test_kylin_cube_with_slr_empty/cuboid/* -output /tmp/8ad83b8c-6bda-4b79-864a-9566c0f8ce2c/test_kylin_cube_with_slr_empty/rowkey_stats -jobname Kylin_Region_Splits_Calculator_test_kylin_cube_with_slr_empty_Step_7",
-    "pre_exec_cmd" : null,
-    "post_exec_cmd" : null,
-    "interrupt_cmd" : null,
-    "exec_start_time" : 0,
-    "exec_end_time" : 0,
-    "exec_wait_time" : 0,
-    "step_status" : "PENDING",
-    "cmd_type" : "MRCLI",
-    "cmd_output" : null,
-    "info" : null,
-    "run_async" : false
-  }, {
-    "interruptCmd" : null,
-    "name" : "Create HBase Table",
-    "sequence_id" : 8,
-    "exec_cmd" : "hbase org.apache.hadoop.util.RunJar kylin-job.jar org.apache.kylin.job.hadoop.hbase.CreateHTableJob  -cubename test_kylin_cube_with_slr_empty -input /tmp/8ad83b8c-6bda-4b79-864a-9566c0f8ce2c/test_kylin_cube_with_slr_empty/rowkey_stats/part-r-00000 -htablename table_abc",
-    "pre_exec_cmd" : null,
-    "post_exec_cmd" : null,
-    "interrupt_cmd" : null,
-    "exec_start_time" : 0,
-    "exec_end_time" : 0,
-    "exec_wait_time" : 0,
-    "step_status" : "PENDING",
-    "cmd_type" : "MRCLI",
-    "cmd_output" : null,
-    "info" : null,
-    "run_async" : false
-  }, {
-    "interruptCmd" : null,
-    "name" : "Convert Cuboid Data to HFile",
-    "sequence_id" : 9,
-    "exec_cmd" : "hbase org.apache.hadoop.util.RunJar kylin-job.jar org.apache.kylin.job.hadoop.cube.CubeHFileJob  -D mapred.compress.map.output=true -D mapred.map.output.compression.codec=com.hadoop.compression.lzo.LzoCodec -D mapred.output.compress=true -D mapred.output.compression.codec=com.hadoop.compression.lzo.LzoCodec -D mapred.output.compression.type=BLOCK -D dfs.block.size=67108864 -D ebay.alert.email=DL-eBay-Kylin@corp.ebay.com -D ebay.job.name=Kylin_Olap_Cube_Builder -cubename test_kylin_cube_with_slr_empty -input /tmp/8ad83b8c-6bda-4b79-864a-9566c0f8ce2c/test_kylin_cube_with_slr_empty/cuboid/* -output /tmp/8ad83b8c-6bda-4b79-864a-9566c0f8ce2c/test_kylin_cube_with_slr_empty/hfile -htablename table_abc -jobname Kylin_HFile_Generator_test_kylin_cube_with_slr_empty_Step_9",
-    "pre_exec_cmd" : null,
-    "post_exec_cmd" : null,
-    "interrupt_cmd" : null,
-    "exec_start_time" : 0,
-    "exec_end_time" : 0,
-    "exec_wait_time" : 0,
-    "step_status" : "PENDING",
-    "cmd_type" : "MRCLI",
-    "cmd_output" : null,
-    "info" : null,
-    "run_async" : false
-  }, {
-    "interruptCmd" : null,
-    "name" : "Load HFile to HBase Table",
-    "sequence_id" : 10,
-    "exec_cmd" : "hadoop fs -chmod 777 /tmp/8ad83b8c-6bda-4b79-864a-9566c0f8ce2c/test_kylin_cube_with_slr_empty/hfile/*;hbase org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles /tmp/8ad83b8c-6bda-4b79-864a-9566c0f8ce2c/test_kylin_cube_with_slr_empty/hfile/ table_abc",
-    "pre_exec_cmd" : null,
-    "post_exec_cmd" : null,
-    "interrupt_cmd" : null,
-    "exec_start_time" : 0,
-    "exec_end_time" : 0,
-    "exec_wait_time" : 0,
-    "step_status" : "PENDING",
-    "cmd_type" : "MRCLI",
-    "cmd_output" : null,
-    "info" : null,
-    "run_async" : false
-  } ]
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 360c6b1..9ad1ca0 100644
--- a/pom.xml
+++ b/pom.xml
@@ -603,7 +603,6 @@
         <module>source-hive</module>
         <module>source-kafka</module>
         <module>storage-hbase</module>
-        <module>job</module>
         <module>query</module>
         <module>server</module>
         <module>jdbc</module>

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/query/pom.xml
----------------------------------------------------------------------
diff --git a/query/pom.xml b/query/pom.xml
index 0f7267f..36a03b0 100644
--- a/query/pom.xml
+++ b/query/pom.xml
@@ -47,6 +47,12 @@
             <version>${project.parent.version}</version>
         </dependency>
         <dependency>
+            <groupId>org.apache.kylin</groupId>
+            <artifactId>kylin-invertedindex</artifactId>
+            <version>${project.parent.version}</version>
+        </dependency>
+        
+        <dependency>
             <groupId>org.apache.calcite</groupId>
             <artifactId>calcite-linq4j</artifactId>
         </dependency>

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/server/pom.xml
----------------------------------------------------------------------
diff --git a/server/pom.xml b/server/pom.xml
index 30743c3..ea39b81 100644
--- a/server/pom.xml
+++ b/server/pom.xml
@@ -53,14 +53,8 @@
         </dependency>
         <dependency>
             <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-job</artifactId>
+            <artifactId>kylin-storage-hbase</artifactId>
             <version>${project.parent.version}</version>
-            <exclusions>
-                <exclusion>
-                    <groupId>org.apache.hbase</groupId>
-                    <artifactId>hbase-common</artifactId>
-                </exclusion>
-            </exclusions>
         </dependency>
 
         <!-- Test & Env -->
@@ -78,13 +72,6 @@
             <scope>test</scope>
             <version>${project.parent.version}</version>
         </dependency>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-job</artifactId>
-            <version>${project.parent.version}</version>
-            <type>test-jar</type>
-            <scope>test</scope>
-        </dependency>
         
         <!-- depends on kylin-jdbc just for running jdbc test cases in server module -->
         <dependency>

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java b/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
index 7484742..fe345b9 100644
--- a/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
+++ b/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
@@ -75,7 +75,7 @@ public class DebugTomcat {
 
     private static void overrideDevJobJarLocations() {
         KylinConfig conf = KylinConfig.getInstanceFromEnv();
-        File devJobJar = findFile("../job/target", "kylin-job-.*-SNAPSHOT-job.jar");
+        File devJobJar = findFile("../assembly/target", "kylin-job-.*-SNAPSHOT-job.jar");
         if (devJobJar != null) {
             conf.overrideMRJobJarPath(devJobJar.getAbsolutePath());
         }

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/server/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java b/server/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
index 21f364a..0cf3822 100644
--- a/server/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
+++ b/server/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
@@ -6,7 +6,7 @@ import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.rest.service.AclService;
 import org.apache.kylin.rest.service.UserService;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.h2.util.StringUtils;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
index 461f00d..b702338 100644
--- a/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -62,7 +62,7 @@ import org.apache.kylin.rest.security.AclPermission;
 import org.apache.kylin.source.hive.HiveSourceTableLoader;
 import org.apache.kylin.source.hive.cardinality.HiveColumnCardinalityJob;
 import org.apache.kylin.source.hive.cardinality.HiveColumnCardinalityUpdateJob;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.util.HBaseRegionSizeCalculator;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/server/src/main/java/org/apache/kylin/rest/service/QueryService.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/service/QueryService.java b/server/src/main/java/org/apache/kylin/rest/service/QueryService.java
index 33039b3..d550262 100644
--- a/server/src/main/java/org/apache/kylin/rest/service/QueryService.java
+++ b/server/src/main/java/org/apache/kylin/rest/service/QueryService.java
@@ -64,7 +64,7 @@ import org.apache.kylin.rest.request.SQLRequest;
 import org.apache.kylin.rest.response.SQLResponse;
 import org.apache.kylin.rest.util.QueryUtil;
 import org.apache.kylin.rest.util.Serializer;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.h2.util.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/source-hive/pom.xml
----------------------------------------------------------------------
diff --git a/source-hive/pom.xml b/source-hive/pom.xml
index 04eb7f0..872355a 100644
--- a/source-hive/pom.xml
+++ b/source-hive/pom.xml
@@ -59,6 +59,12 @@
             <scope>provided</scope>
         </dependency>
         <dependency>
+            <groupId>org.apache.mrunit</groupId>
+            <artifactId>mrunit</artifactId>
+            <classifier>hadoop2</classifier>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
             <groupId>junit</groupId>
             <artifactId>junit</artifactId>
             <scope>test</scope>

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/source-hive/src/test/java/org/apache/kylin/source/hive/cardinality/ColumnCardinalityReducerTest.java
----------------------------------------------------------------------
diff --git a/source-hive/src/test/java/org/apache/kylin/source/hive/cardinality/ColumnCardinalityReducerTest.java b/source-hive/src/test/java/org/apache/kylin/source/hive/cardinality/ColumnCardinalityReducerTest.java
new file mode 100644
index 0000000..5e8788a
--- /dev/null
+++ b/source-hive/src/test/java/org/apache/kylin/source/hive/cardinality/ColumnCardinalityReducerTest.java
@@ -0,0 +1,115 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.source.hive.cardinality;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.StringTokenizer;
+
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mrunit.mapreduce.ReduceDriver;
+import org.apache.hadoop.mrunit.types.Pair;
+import org.apache.kylin.common.hll.HyperLogLogPlusCounter;
+import org.apache.kylin.common.util.Bytes;
+import org.apache.kylin.cube.kv.RowConstants;
+import org.apache.kylin.source.hive.cardinality.ColumnCardinalityMapper;
+import org.apache.kylin.source.hive.cardinality.ColumnCardinalityReducer;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * @author ysong1
+ * 
+ */
+public class ColumnCardinalityReducerTest {
+
+    public final static String strArr = "abc,tests,test,test,as,sts,test,tss,sets";
+
+    ReduceDriver<IntWritable, BytesWritable, IntWritable, LongWritable> reduceDriver;
+    String localTempDir = System.getProperty("java.io.tmpdir") + File.separator;
+
+    @Before
+    public void setUp() {
+        ColumnCardinalityReducer reducer = new ColumnCardinalityReducer();
+        reduceDriver = ReduceDriver.newReduceDriver(reducer);
+    }
+
+    private byte[] getBytes(String str) throws IOException {
+        HyperLogLogPlusCounter hllc = new HyperLogLogPlusCounter();
+        StringTokenizer tokenizer = new StringTokenizer(str, ColumnCardinalityMapper.DEFAULT_DELIM);
+        int i = 0;
+        while (tokenizer.hasMoreTokens()) {
+            String temp = i + "_" + tokenizer.nextToken();
+            i++;
+            hllc.add(Bytes.toBytes(temp));
+        }
+        ByteBuffer buf = ByteBuffer.allocate(RowConstants.ROWVALUE_BUFFER_SIZE);
+        buf.clear();
+        hllc.writeRegisters(buf);
+        buf.flip();
+        return buf.array();
+    }
+
+    @Test
+    public void testReducer() throws IOException {
+        IntWritable key1 = new IntWritable(1);
+        List<BytesWritable> values1 = new ArrayList<BytesWritable>();
+        values1.add(new BytesWritable(getBytes(strArr)));
+
+        IntWritable key2 = new IntWritable(2);
+        List<BytesWritable> values2 = new ArrayList<BytesWritable>();
+        values2.add(new BytesWritable(getBytes(strArr + " x")));
+
+        IntWritable key3 = new IntWritable(3);
+        List<BytesWritable> values3 = new ArrayList<BytesWritable>();
+        values3.add(new BytesWritable(getBytes(strArr + " xx")));
+
+        IntWritable key4 = new IntWritable(4);
+        List<BytesWritable> values4 = new ArrayList<BytesWritable>();
+        values4.add(new BytesWritable(getBytes(strArr + " xxx")));
+
+        IntWritable key5 = new IntWritable(5);
+        List<BytesWritable> values5 = new ArrayList<BytesWritable>();
+        values5.add(new BytesWritable(getBytes(strArr + " xxxx")));
+
+        reduceDriver.withInput(key1, values1);
+        reduceDriver.withInput(key2, values2);
+        reduceDriver.withInput(key3, values3);
+        reduceDriver.withInput(key4, values4);
+        reduceDriver.withInput(key5, values5);
+
+        List<Pair<IntWritable, LongWritable>> result = reduceDriver.run();
+
+        assertEquals(5, result.size());
+
+        int outputKey1 = result.get(0).getFirst().get();
+        LongWritable value1 = result.get(0).getSecond();
+        assertTrue(outputKey1 == 1);
+        assertTrue((10 == value1.get()) || (9 == value1.get()));
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/source-kafka/pom.xml
----------------------------------------------------------------------
diff --git a/source-kafka/pom.xml b/source-kafka/pom.xml
index 3693771..5b87364 100644
--- a/source-kafka/pom.xml
+++ b/source-kafka/pom.xml
@@ -27,6 +27,12 @@
 
         <!-- Env & Test -->
         <dependency>
+            <groupId>org.apache.kafka</groupId>
+            <artifactId>kafka_2.10</artifactId>
+            <version>${kafka.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-common</artifactId>
             <scope>provided</scope>
@@ -43,11 +49,6 @@
             <scope>provided</scope>
         </dependency>
         <dependency>
-            <groupId>org.apache.kafka</groupId>
-            <artifactId>kafka_2.10</artifactId>
-            <version>${kafka.version}</version>
-        </dependency>
-        <dependency>
             <groupId>junit</groupId>
             <artifactId>junit</artifactId>
             <scope>test</scope>

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/storage-hbase/pom.xml
----------------------------------------------------------------------
diff --git a/storage-hbase/pom.xml b/storage-hbase/pom.xml
index 76992cb..7ec8f80 100644
--- a/storage-hbase/pom.xml
+++ b/storage-hbase/pom.xml
@@ -137,7 +137,6 @@
                                     <include>org.apache.kylin:kylin-core-dictionary</include>
                                     <include>org.apache.kylin:kylin-core-cube</include>
                                     <include>org.apache.kylin:kylin-storage</include>
-                                    <include>org.apache.kylin:kylin-invertedindex</include>
                                     <include>com.ning:compress-lzf</include>
                                     <include>com.n3twork.druid:extendedset</include>
                                     <include>com.esotericsoftware:kryo-shaded</include>

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
new file mode 100644
index 0000000..d1bb216
--- /dev/null
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
@@ -0,0 +1,234 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.storage.hbase;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.kylin.common.persistence.StorageException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @author yangli9
+ * 
+ */
+public class HBaseConnection {
+
+    private static final Logger logger = LoggerFactory.getLogger(HBaseConnection.class);
+
+    private static final Map<String, Configuration> ConfigCache = new ConcurrentHashMap<String, Configuration>();
+    private static final Map<String, HConnection> ConnPool = new ConcurrentHashMap<String, HConnection>();
+
+    static {
+        Runtime.getRuntime().addShutdownHook(new Thread() {
+            @Override
+            public void run() {
+                for (HConnection conn : ConnPool.values()) {
+                    try {
+                        conn.close();
+                    } catch (IOException e) {
+                        e.printStackTrace();
+                    }
+                }
+            }
+        });
+    }
+
+    /**
+     * e.g.
+     * 0. hbase (recommended way)
+     * 1. hbase:zk-1.hortonworks.com,zk-2.hortonworks.com,zk-3.hortonworks.com:2181:/hbase-unsecure
+     * 2. hbase:zk-1.hortonworks.com,zk-2.hortonworks.com,zk-3.hortonworks.com:2181
+     * 3. hbase:zk-1.hortonworks.com:2181:/hbase-unsecure
+     * 4. hbase:zk-1.hortonworks.com:2181
+     */
+    public static Configuration newHBaseConfiguration(String url) {
+        Configuration conf = HBaseConfiguration.create();
+        // reduce rpc retry
+        conf.set(HConstants.HBASE_CLIENT_PAUSE, "3000");
+        conf.set(HConstants.HBASE_CLIENT_RETRIES_NUMBER, "5");
+        conf.set(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, "60000");
+        // conf.set(ScannerCallable.LOG_SCANNER_ACTIVITY, "true");
+        if (StringUtils.isEmpty(url)) {
+            return conf;
+        }
+
+        // chop off "hbase"
+        if (url.startsWith("hbase") == false) {
+            throw new IllegalArgumentException("hbase url must start with 'hbase' -- " + url);
+        }
+
+        url = StringUtils.substringAfter(url, "hbase");
+        if (StringUtils.isEmpty(url)) {
+            return conf;
+        }
+
+        // case of "hbase:domain.com:2181:/hbase-unsecure"
+        Pattern urlPattern = Pattern.compile("[:]((?:[\\w\\-.]+)(?:\\,[\\w\\-.]+)*)[:](\\d+)(?:[:](.+))");
+        Matcher m = urlPattern.matcher(url);
+        if (m.matches() == false)
+            throw new IllegalArgumentException("HBase URL '" + url + "' is invalid, expected url is like '" + "hbase:domain.com:2181:/hbase-unsecure" + "'");
+
+        logger.debug("Creating hbase conf by parsing -- " + url);
+
+        String quorums = m.group(1);
+        String quorum = null;
+        try {
+            String[] tokens = quorums.split(",");
+            for (String s : tokens) {
+                quorum = s;
+                InetAddress.getByName(quorum);
+            }
+        } catch (UnknownHostException e) {
+            throw new IllegalArgumentException("Zookeeper quorum is invalid: " + quorum + "; urlString=" + url, e);
+        }
+        conf.set(HConstants.ZOOKEEPER_QUORUM, quorums);
+
+        String port = m.group(2);
+        conf.set(HConstants.ZOOKEEPER_CLIENT_PORT, port);
+
+        String znodePath = m.group(3);
+        conf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, znodePath);
+
+        return conf;
+    }
+
+    // returned HConnection can be shared by multiple threads and does not require close()
+    @SuppressWarnings("resource")
+    public static HConnection get(String url) {
+        // find configuration
+        Configuration conf = ConfigCache.get(url);
+        if (conf == null) {
+            conf = newHBaseConfiguration(url);
+            ConfigCache.put(url, conf);
+        }
+
+        HConnection connection = ConnPool.get(url);
+        try {
+            while (true) {
+                // I don't use DCL since recreate a connection is not a big issue.
+                if (connection == null || connection.isClosed()) {
+                    logger.info("connection is null or closed, creating a new one");
+                    connection = HConnectionManager.createConnection(conf);
+                    ConnPool.put(url, connection);
+                }
+
+                if (connection == null || connection.isClosed()) {
+                    Thread.sleep(10000);// wait a while and retry
+                } else {
+                    break;
+                }
+            }
+
+        } catch (Throwable t) {
+            logger.error("Error when open connection " + url, t);
+            throw new StorageException("Error when open connection " + url, t);
+        }
+
+        return connection;
+    }
+
+    public static boolean tableExists(HConnection conn, String tableName) throws IOException {
+        HBaseAdmin hbase = new HBaseAdmin(conn);
+        try {
+            return hbase.tableExists(TableName.valueOf(tableName));
+        } finally {
+            hbase.close();
+        }
+    }
+
+    public static boolean tableExists(String hbaseUrl, String tableName) throws IOException {
+        return tableExists(HBaseConnection.get(hbaseUrl), tableName);
+    }
+
+    public static void createHTableIfNeeded(String hbaseUrl, String tableName, String... families) throws IOException {
+        createHTableIfNeeded(HBaseConnection.get(hbaseUrl), tableName, families);
+    }
+
+    public static void deleteTable(String hbaseUrl, String tableName) throws IOException {
+        deleteTable(HBaseConnection.get(hbaseUrl), tableName);
+    }
+
+    public static void createHTableIfNeeded(HConnection conn, String tableName, String... families) throws IOException {
+        HBaseAdmin hbase = new HBaseAdmin(conn);
+
+        try {
+            if (tableExists(conn, tableName)) {
+                logger.debug("HTable '" + tableName + "' already exists");
+                return;
+            }
+
+            logger.debug("Creating HTable '" + tableName + "'");
+
+            HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName));
+
+            if (null != families && families.length > 0) {
+                for (String family : families) {
+                    HColumnDescriptor fd = new HColumnDescriptor(family);
+                    fd.setInMemory(true); // metadata tables are best in memory
+                    desc.addFamily(fd);
+                }
+            }
+            hbase.createTable(desc);
+
+            logger.debug("HTable '" + tableName + "' created");
+        } finally {
+            hbase.close();
+        }
+    }
+
+    public static void deleteTable(HConnection conn, String tableName) throws IOException {
+        HBaseAdmin hbase = new HBaseAdmin(conn);
+
+        try {
+            if (!tableExists(conn, tableName)) {
+                logger.debug("HTable '" + tableName + "' does not exists");
+                return;
+            }
+
+            logger.debug("delete HTable '" + tableName + "'");
+
+            if (hbase.isTableEnabled(tableName)) {
+                hbase.disableTable(tableName);
+            }
+            hbase.deleteTable(tableName);
+
+            logger.debug("HTable '" + tableName + "' deleted");
+        } finally {
+            hbase.close();
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
index 97ee64d..59f9e84 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
@@ -47,7 +47,6 @@ import org.apache.kylin.common.persistence.ResourceStore;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.BytesUtil;
 import org.apache.kylin.engine.mr.HadoopUtil;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeStorageQuery.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeStorageQuery.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeStorageQuery.java
index 836f142..aca3ca9 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeStorageQuery.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeStorageQuery.java
@@ -58,8 +58,8 @@ import org.apache.kylin.metadata.realization.SQLDigest;
 import org.apache.kylin.metadata.tuple.ITupleIterator;
 import org.apache.kylin.storage.ICachableStorageQuery;
 import org.apache.kylin.storage.StorageContext;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.cube.v1.coprocessor.observer.ObserverEnabler;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
 import org.apache.kylin.storage.hbase.steps.RowValueDecoder;
 import org.apache.kylin.storage.translate.ColumnValueRange;
 import org.apache.kylin.storage.translate.DerivedFilterTranslator;

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
index 7d1d833..7f28baf 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
@@ -43,12 +43,13 @@ import org.apache.kylin.gridtable.GTInfo;
 import org.apache.kylin.gridtable.GTRecord;
 import org.apache.kylin.gridtable.GTScanRequest;
 import org.apache.kylin.gridtable.IGTScanner;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 
 import com.google.common.base.Function;
 import com.google.common.collect.Collections2;
 import com.google.common.collect.Iterators;
 import com.google.protobuf.ByteString;
+
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos;
 
 public class CubeHBaseEndpointRPC extends CubeHBaseRPC {

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
index e673f32..8838578 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
@@ -18,7 +18,7 @@ import org.apache.kylin.gridtable.GTInfo;
 import org.apache.kylin.gridtable.GTScanRequest;
 import org.apache.kylin.gridtable.IGTScanner;
 import org.apache.kylin.gridtable.IGTStore;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 
 /**
  * for test use only

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/ii/InvertedIndexStorageQuery.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/ii/InvertedIndexStorageQuery.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/ii/InvertedIndexStorageQuery.java
index a93e460..eef7143 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/ii/InvertedIndexStorageQuery.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/ii/InvertedIndexStorageQuery.java
@@ -27,8 +27,8 @@ import org.apache.kylin.metadata.realization.SQLDigest;
 import org.apache.kylin.metadata.tuple.ITupleIterator;
 import org.apache.kylin.storage.ICachableStorageQuery;
 import org.apache.kylin.storage.StorageContext;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.ii.coprocessor.endpoint.EndpointTupleIterator;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
 import org.apache.kylin.storage.tuple.TupleInfo;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseConnection.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseConnection.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseConnection.java
deleted file mode 100644
index abc4273..0000000
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseConnection.java
+++ /dev/null
@@ -1,234 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.storage.hbase.steps;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HConnectionManager;
-import org.apache.kylin.common.persistence.StorageException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * @author yangli9
- * 
- */
-public class HBaseConnection {
-
-    private static final Logger logger = LoggerFactory.getLogger(HBaseConnection.class);
-
-    private static final Map<String, Configuration> ConfigCache = new ConcurrentHashMap<String, Configuration>();
-    private static final Map<String, HConnection> ConnPool = new ConcurrentHashMap<String, HConnection>();
-
-    static {
-        Runtime.getRuntime().addShutdownHook(new Thread() {
-            @Override
-            public void run() {
-                for (HConnection conn : ConnPool.values()) {
-                    try {
-                        conn.close();
-                    } catch (IOException e) {
-                        e.printStackTrace();
-                    }
-                }
-            }
-        });
-    }
-
-    /**
-     * e.g.
-     * 0. hbase (recommended way)
-     * 1. hbase:zk-1.hortonworks.com,zk-2.hortonworks.com,zk-3.hortonworks.com:2181:/hbase-unsecure
-     * 2. hbase:zk-1.hortonworks.com,zk-2.hortonworks.com,zk-3.hortonworks.com:2181
-     * 3. hbase:zk-1.hortonworks.com:2181:/hbase-unsecure
-     * 4. hbase:zk-1.hortonworks.com:2181
-     */
-    public static Configuration newHBaseConfiguration(String url) {
-        Configuration conf = HBaseConfiguration.create();
-        // reduce rpc retry
-        conf.set(HConstants.HBASE_CLIENT_PAUSE, "3000");
-        conf.set(HConstants.HBASE_CLIENT_RETRIES_NUMBER, "5");
-        conf.set(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, "60000");
-        // conf.set(ScannerCallable.LOG_SCANNER_ACTIVITY, "true");
-        if (StringUtils.isEmpty(url)) {
-            return conf;
-        }
-
-        // chop off "hbase"
-        if (url.startsWith("hbase") == false) {
-            throw new IllegalArgumentException("hbase url must start with 'hbase' -- " + url);
-        }
-
-        url = StringUtils.substringAfter(url, "hbase");
-        if (StringUtils.isEmpty(url)) {
-            return conf;
-        }
-
-        // case of "hbase:domain.com:2181:/hbase-unsecure"
-        Pattern urlPattern = Pattern.compile("[:]((?:[\\w\\-.]+)(?:\\,[\\w\\-.]+)*)[:](\\d+)(?:[:](.+))");
-        Matcher m = urlPattern.matcher(url);
-        if (m.matches() == false)
-            throw new IllegalArgumentException("HBase URL '" + url + "' is invalid, expected url is like '" + "hbase:domain.com:2181:/hbase-unsecure" + "'");
-
-        logger.debug("Creating hbase conf by parsing -- " + url);
-
-        String quorums = m.group(1);
-        String quorum = null;
-        try {
-            String[] tokens = quorums.split(",");
-            for (String s : tokens) {
-                quorum = s;
-                InetAddress.getByName(quorum);
-            }
-        } catch (UnknownHostException e) {
-            throw new IllegalArgumentException("Zookeeper quorum is invalid: " + quorum + "; urlString=" + url, e);
-        }
-        conf.set(HConstants.ZOOKEEPER_QUORUM, quorums);
-
-        String port = m.group(2);
-        conf.set(HConstants.ZOOKEEPER_CLIENT_PORT, port);
-
-        String znodePath = m.group(3);
-        conf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, znodePath);
-
-        return conf;
-    }
-
-    // returned HConnection can be shared by multiple threads and does not require close()
-    @SuppressWarnings("resource")
-    public static HConnection get(String url) {
-        // find configuration
-        Configuration conf = ConfigCache.get(url);
-        if (conf == null) {
-            conf = newHBaseConfiguration(url);
-            ConfigCache.put(url, conf);
-        }
-
-        HConnection connection = ConnPool.get(url);
-        try {
-            while (true) {
-                // I don't use DCL since recreate a connection is not a big issue.
-                if (connection == null || connection.isClosed()) {
-                    logger.info("connection is null or closed, creating a new one");
-                    connection = HConnectionManager.createConnection(conf);
-                    ConnPool.put(url, connection);
-                }
-
-                if (connection == null || connection.isClosed()) {
-                    Thread.sleep(10000);// wait a while and retry
-                } else {
-                    break;
-                }
-            }
-
-        } catch (Throwable t) {
-            logger.error("Error when open connection " + url, t);
-            throw new StorageException("Error when open connection " + url, t);
-        }
-
-        return connection;
-    }
-
-    public static boolean tableExists(HConnection conn, String tableName) throws IOException {
-        HBaseAdmin hbase = new HBaseAdmin(conn);
-        try {
-            return hbase.tableExists(TableName.valueOf(tableName));
-        } finally {
-            hbase.close();
-        }
-    }
-
-    public static boolean tableExists(String hbaseUrl, String tableName) throws IOException {
-        return tableExists(HBaseConnection.get(hbaseUrl), tableName);
-    }
-
-    public static void createHTableIfNeeded(String hbaseUrl, String tableName, String... families) throws IOException {
-        createHTableIfNeeded(HBaseConnection.get(hbaseUrl), tableName, families);
-    }
-
-    public static void deleteTable(String hbaseUrl, String tableName) throws IOException {
-        deleteTable(HBaseConnection.get(hbaseUrl), tableName);
-    }
-
-    public static void createHTableIfNeeded(HConnection conn, String tableName, String... families) throws IOException {
-        HBaseAdmin hbase = new HBaseAdmin(conn);
-
-        try {
-            if (tableExists(conn, tableName)) {
-                logger.debug("HTable '" + tableName + "' already exists");
-                return;
-            }
-
-            logger.debug("Creating HTable '" + tableName + "'");
-
-            HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName));
-
-            if (null != families && families.length > 0) {
-                for (String family : families) {
-                    HColumnDescriptor fd = new HColumnDescriptor(family);
-                    fd.setInMemory(true); // metadata tables are best in memory
-                    desc.addFamily(fd);
-                }
-            }
-            hbase.createTable(desc);
-
-            logger.debug("HTable '" + tableName + "' created");
-        } finally {
-            hbase.close();
-        }
-    }
-
-    public static void deleteTable(HConnection conn, String tableName) throws IOException {
-        HBaseAdmin hbase = new HBaseAdmin(conn);
-
-        try {
-            if (!tableExists(conn, tableName)) {
-                logger.debug("HTable '" + tableName + "' does not exists");
-                return;
-            }
-
-            logger.debug("delete HTable '" + tableName + "'");
-
-            if (hbase.isTableEnabled(tableName)) {
-                hbase.disableTable(tableName);
-            }
-            hbase.deleteTable(tableName);
-
-            logger.debug("HTable '" + tableName + "' deleted");
-        } finally {
-            hbase.close();
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
index eba7551..b4c0d61 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
@@ -37,6 +37,7 @@ import org.apache.kylin.engine.mr.common.BatchConstants;
 import org.apache.kylin.engine.mr.steps.FactDistinctColumnsReducer;
 import org.apache.kylin.engine.streaming.IStreamingOutput;
 import org.apache.kylin.metadata.model.IBuildable;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
index 5bbc3e1..5c7d46e 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
@@ -52,12 +52,11 @@ import org.apache.kylin.invertedindex.IIManager;
 import org.apache.kylin.invertedindex.IISegment;
 import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.realization.RealizationStatusEnum;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * @author yangli9
  */
 public class DeployCoprocessorCLI {
 
@@ -284,8 +283,7 @@ public class DeployCoprocessorCLI {
                 }
 
                 String jarPath = valueMatcher.group(1).trim();
-                String clsName = valueMatcher.group(2).trim();
-
+                //String clsName = valueMatcher.group(2).trim();
                 //if (CubeObserverClass.equals(clsName)) {
                 result.add(jarPath);
                 //}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
index 826ce0a..e71089a 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
@@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.Pair;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 
 import com.google.common.collect.Lists;
 

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
index b53cf6f..dbf064e 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
@@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.token.TokenUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.kylin.common.util.Bytes;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 
 /**
  * @author yangli9

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
index ab15b72..6ea79ec 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
@@ -5,6 +5,7 @@ import java.util.concurrent.TimeUnit;
 
 import com.google.common.base.Function;
 import com.google.common.collect.Iterables;
+
 import org.apache.commons.lang.StringUtils;
 import org.apache.curator.RetryPolicy;
 import org.apache.curator.framework.CuratorFramework;
@@ -16,7 +17,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.job.lock.JobLock;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/ITInvertedIndexHBaseTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/ITInvertedIndexHBaseTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/ITInvertedIndexHBaseTest.java
index d410414..25c011e 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/ITInvertedIndexHBaseTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/ITInvertedIndexHBaseTest.java
@@ -34,7 +34,7 @@ import org.apache.kylin.invertedindex.index.TableRecord;
 import org.apache.kylin.invertedindex.index.TableRecordInfo;
 import org.apache.kylin.invertedindex.model.IIDesc;
 import org.apache.kylin.invertedindex.model.IIKeyValueCodec;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
 import org.apache.kylin.storage.hbase.cube.v1.HBaseClientKVIterator;
 import org.junit.After;

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/HbaseImporter.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/HbaseImporter.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/HbaseImporter.java
index deea585..d5346d2 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/HbaseImporter.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/HbaseImporter.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.mapreduce.Import;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 
 import com.google.common.base.Preconditions;
 

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/streaming/pom.xml
----------------------------------------------------------------------
diff --git a/streaming/pom.xml b/streaming/pom.xml
index 2f430df..63fb179 100644
--- a/streaming/pom.xml
+++ b/streaming/pom.xml
@@ -21,11 +21,6 @@
         </dependency>
         <dependency>
             <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-invertedindex</artifactId>
-            <version>${project.parent.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
             <artifactId>kylin-storage-hbase</artifactId>
             <version>${project.parent.version}</version>
         </dependency>
@@ -59,6 +54,7 @@
             <groupId>org.apache.kafka</groupId>
             <artifactId>kafka_2.10</artifactId>
             <version>${kafka.version}</version>
+            <scope>provided</scope>
         </dependency>
 
         <dependency>

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/streaming/src/main/java/org/apache/kylin/job/streaming/CubeStreamConsumer.java
----------------------------------------------------------------------
diff --git a/streaming/src/main/java/org/apache/kylin/job/streaming/CubeStreamConsumer.java b/streaming/src/main/java/org/apache/kylin/job/streaming/CubeStreamConsumer.java
index 38787a8..5bfacda 100644
--- a/streaming/src/main/java/org/apache/kylin/job/streaming/CubeStreamConsumer.java
+++ b/streaming/src/main/java/org/apache/kylin/job/streaming/CubeStreamConsumer.java
@@ -47,7 +47,7 @@ import org.apache.kylin.engine.mr.common.BatchConstants;
 import org.apache.kylin.engine.mr.steps.FactDistinctColumnsReducer;
 import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.model.TblColRef;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.steps.HBaseCuboidWriter;
 import org.apache.kylin.storage.hbase.steps.CubeHTableUtil;
 import org.apache.kylin.streaming.MicroStreamBatch;

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/streaming/src/main/java/org/apache/kylin/job/streaming/StreamingBootstrap.java
----------------------------------------------------------------------
diff --git a/streaming/src/main/java/org/apache/kylin/job/streaming/StreamingBootstrap.java b/streaming/src/main/java/org/apache/kylin/job/streaming/StreamingBootstrap.java
index da7b20a..4212fea 100644
--- a/streaming/src/main/java/org/apache/kylin/job/streaming/StreamingBootstrap.java
+++ b/streaming/src/main/java/org/apache/kylin/job/streaming/StreamingBootstrap.java
@@ -66,7 +66,7 @@ import org.apache.kylin.invertedindex.model.IIDesc;
 import org.apache.kylin.job.monitor.StreamingMonitor;
 import org.apache.kylin.metadata.model.IntermediateColumnDesc;
 import org.apache.kylin.metadata.model.TblColRef;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.streaming.JsonStreamParser;
 import org.apache.kylin.streaming.KafkaClusterConfig;
 import org.apache.kylin.streaming.KafkaConsumer;

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/streaming/src/main/java/org/apache/kylin/streaming/invertedindex/IIStreamConsumer.java
----------------------------------------------------------------------
diff --git a/streaming/src/main/java/org/apache/kylin/streaming/invertedindex/IIStreamConsumer.java b/streaming/src/main/java/org/apache/kylin/streaming/invertedindex/IIStreamConsumer.java
index 3d8a6f8..2a9e3e9 100644
--- a/streaming/src/main/java/org/apache/kylin/streaming/invertedindex/IIStreamConsumer.java
+++ b/streaming/src/main/java/org/apache/kylin/streaming/invertedindex/IIStreamConsumer.java
@@ -46,7 +46,7 @@ import org.apache.kylin.invertedindex.index.Slice;
 import org.apache.kylin.invertedindex.model.IIDesc;
 import org.apache.kylin.invertedindex.model.IIKeyValueCodec;
 import org.apache.kylin.invertedindex.model.IIRow;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.streaming.MicroStreamBatch;
 import org.apache.kylin.streaming.MicroStreamBatchConsumer;
 import org.apache.kylin.streaming.StreamingManager;


[4/9] incubator-kylin git commit: KYLIN-1010 Decompose project job

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/job/DeployUtil.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/job/DeployUtil.java b/job/src/test/java/org/apache/kylin/job/DeployUtil.java
deleted file mode 100644
index 8c92f87..0000000
--- a/job/src/test/java/org/apache/kylin/job/DeployUtil.java
+++ /dev/null
@@ -1,262 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job;
-
-import java.io.ByteArrayInputStream;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.List;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.persistence.ResourceStore;
-import org.apache.kylin.common.persistence.ResourceTool;
-import org.apache.kylin.common.util.AbstractKylinTestCase;
-import org.apache.kylin.cube.CubeInstance;
-import org.apache.kylin.cube.CubeManager;
-import org.apache.kylin.cube.CubeUpdate;
-import org.apache.kylin.job.dataGen.FactTableGenerator;
-import org.apache.kylin.job.streaming.KafkaDataLoader;
-import org.apache.kylin.job.streaming.StreamingTableDataGenerator;
-import org.apache.kylin.metadata.MetadataManager;
-import org.apache.kylin.metadata.model.ColumnDesc;
-import org.apache.kylin.metadata.model.TableDesc;
-import org.apache.kylin.metadata.model.TblColRef;
-import org.apache.kylin.source.hive.HiveClient;
-import org.apache.kylin.streaming.StreamMessage;
-import org.apache.kylin.streaming.StreamingConfig;
-import org.apache.kylin.streaming.TimedJsonStreamParser;
-import org.apache.maven.model.Model;
-import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.Lists;
-
-public class DeployUtil {
-    @SuppressWarnings("unused")
-    private static final Logger logger = LoggerFactory.getLogger(DeployUtil.class);
-
-    public static void initCliWorkDir() throws IOException {
-        execCliCommand("rm -rf " + getHadoopCliWorkingDir());
-        execCliCommand("mkdir -p " + config().getKylinJobLogDir());
-    }
-
-    public static void deployMetadata() throws IOException {
-        // install metadata to hbase
-        ResourceTool.reset(config());
-        ResourceTool.copy(KylinConfig.createInstanceFromUri(AbstractKylinTestCase.LOCALMETA_TEST_DATA), config());
-
-        // update cube desc signature.
-        for (CubeInstance cube : CubeManager.getInstance(config()).listAllCubes()) {
-            cube.getDescriptor().setSignature(cube.getDescriptor().calculateSignature());
-            CubeUpdate cubeBuilder = new CubeUpdate(cube);
-            CubeManager.getInstance(config()).updateCube(cubeBuilder);
-        }
-    }
-
-    public static void overrideJobJarLocations() {
-        File jobJar = getJobJarFile();
-        File coprocessorJar = getCoprocessorJarFile();
-
-        config().overrideMRJobJarPath(jobJar.getAbsolutePath());
-        config().overrideCoprocessorLocalJar(coprocessorJar.getAbsolutePath());
-        config().overrideSparkJobJarPath(getSparkJobJarFile().getAbsolutePath());
-    }
-
-    private static String getPomVersion() {
-        try {
-            MavenXpp3Reader pomReader = new MavenXpp3Reader();
-            Model model = pomReader.read(new FileReader("../pom.xml"));
-            return model.getVersion();
-        } catch (Exception e) {
-            throw new RuntimeException(e.getMessage(), e);
-        }
-    }
-
-    private static File getJobJarFile() {
-        return new File("../job/target", "kylin-job-" + getPomVersion() + "-job.jar");
-    }
-
-    private static File getCoprocessorJarFile() {
-        return new File("../storage-hbase/target", "kylin-storage-hbase-" + getPomVersion() + "-coprocessor.jar");
-    }
-
-    private static File getSparkJobJarFile() {
-        return new File("../engine-spark/target", "kylin-engine-spark-" + getPomVersion() + "-job.jar");
-    }
-
-    private static void execCliCommand(String cmd) throws IOException {
-        config().getCliCommandExecutor().execute(cmd);
-    }
-
-    private static String getHadoopCliWorkingDir() {
-        return config().getCliWorkingDir();
-    }
-
-    private static KylinConfig config() {
-        return KylinConfig.getInstanceFromEnv();
-    }
-
-    // ============================================================================
-
-    static final String TABLE_CAL_DT = "edw.test_cal_dt";
-    static final String TABLE_CATEGORY_GROUPINGS = "default.test_category_groupings";
-    static final String TABLE_KYLIN_FACT = "default.test_kylin_fact";
-    static final String TABLE_SELLER_TYPE_DIM = "edw.test_seller_type_dim";
-    static final String TABLE_SITES = "edw.test_sites";
-
-    static final String[] TABLE_NAMES = new String[] { TABLE_CAL_DT, TABLE_CATEGORY_GROUPINGS, TABLE_KYLIN_FACT, TABLE_SELLER_TYPE_DIM, TABLE_SITES };
-
-    public static void prepareTestDataForNormalCubes(String cubeName) throws Exception {
-
-        String factTableName = TABLE_KYLIN_FACT.toUpperCase();
-        String content = null;
-
-        boolean buildCubeUsingProvidedData = Boolean.parseBoolean(System.getProperty("buildCubeUsingProvidedData"));
-        if (!buildCubeUsingProvidedData) {
-            System.out.println("build cube with random dataset");
-            // data is generated according to cube descriptor and saved in resource store
-            content = FactTableGenerator.generate(cubeName, "10000", "0.6", null);
-            assert content != null;
-            overrideFactTableData(content, factTableName);
-        } else {
-            System.out.println("build normal cubes with provided dataset");
-        }
-
-        deployHiveTables();
-    }
-
-    public static void prepareTestDataForStreamingCube(long startTime, long endTime, StreamingConfig streamingConfig) throws IOException {
-        CubeInstance cubeInstance = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube(streamingConfig.getCubeName());
-        List<String> data = StreamingTableDataGenerator.generate(10000, startTime, endTime, cubeInstance.getFactTable());
-        TableDesc tableDesc = cubeInstance.getFactTableDesc();
-
-        //load into kafka
-        KafkaDataLoader.loadIntoKafka(streamingConfig, data);
-        logger.info("Write {} messages into topic {}", data.size(), streamingConfig.getTopic());
-
-        //csv data for H2 use
-        List<TblColRef> tableColumns = Lists.newArrayList();
-        for (ColumnDesc columnDesc : tableDesc.getColumns()) {
-            tableColumns.add(new TblColRef(columnDesc));
-        }
-        TimedJsonStreamParser timedJsonStreamParser = new TimedJsonStreamParser(tableColumns, "formatTs=true");
-        StringBuilder sb = new StringBuilder();
-        for (String json : data) {
-            List<String> rowColumns = timedJsonStreamParser.parse(new StreamMessage(0, json.getBytes())).getStreamMessage();
-            sb.append(StringUtils.join(rowColumns, ","));
-            sb.append(System.getProperty("line.separator"));
-        }
-        overrideFactTableData(sb.toString(), cubeInstance.getFactTable());
-    }
-
-    public static void overrideFactTableData(String factTableContent, String factTableName) throws IOException {
-        // Write to resource store
-        ResourceStore store = ResourceStore.getStore(config());
-
-        InputStream in = new ByteArrayInputStream(factTableContent.getBytes("UTF-8"));
-        String factTablePath = "/data/" + factTableName + ".csv";
-        store.deleteResource(factTablePath);
-        store.putResource(factTablePath, in, System.currentTimeMillis());
-        in.close();
-    }
-
-    private static void deployHiveTables() throws Exception {
-
-        MetadataManager metaMgr = MetadataManager.getInstance(config());
-
-        // scp data files, use the data from hbase, instead of local files
-        File temp = File.createTempFile("temp", ".csv");
-        temp.createNewFile();
-        for (String tablename : TABLE_NAMES) {
-            tablename = tablename.toUpperCase();
-
-            File localBufferFile = new File(temp.getParent() + "/" + tablename + ".csv");
-            localBufferFile.createNewFile();
-
-            InputStream hbaseDataStream = metaMgr.getStore().getResource("/data/" + tablename + ".csv");
-            FileOutputStream localFileStream = new FileOutputStream(localBufferFile);
-            IOUtils.copy(hbaseDataStream, localFileStream);
-
-            hbaseDataStream.close();
-            localFileStream.close();
-
-            localBufferFile.deleteOnExit();
-        }
-        String tableFileDir = temp.getParent();
-        temp.delete();
-
-        HiveClient hiveClient = new HiveClient();
-
-        // create hive tables
-        hiveClient.executeHQL("CREATE DATABASE IF NOT EXISTS EDW");
-        hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_CAL_DT.toUpperCase())));
-        hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_CATEGORY_GROUPINGS.toUpperCase())));
-        hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_KYLIN_FACT.toUpperCase())));
-        hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_SELLER_TYPE_DIM.toUpperCase())));
-        hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_SITES.toUpperCase())));
-
-        // load data to hive tables
-        // LOAD DATA LOCAL INPATH 'filepath' [OVERWRITE] INTO TABLE tablename
-        hiveClient.executeHQL(generateLoadDataHql(TABLE_CAL_DT, tableFileDir));
-        hiveClient.executeHQL(generateLoadDataHql(TABLE_CATEGORY_GROUPINGS, tableFileDir));
-        hiveClient.executeHQL(generateLoadDataHql(TABLE_KYLIN_FACT, tableFileDir));
-        hiveClient.executeHQL(generateLoadDataHql(TABLE_SELLER_TYPE_DIM, tableFileDir));
-        hiveClient.executeHQL(generateLoadDataHql(TABLE_SITES, tableFileDir));
-    }
-
-    private static String generateLoadDataHql(String tableName, String tableFileDir) {
-        return "LOAD DATA LOCAL INPATH '" + tableFileDir + "/" + tableName.toUpperCase() + ".csv' OVERWRITE INTO TABLE " + tableName.toUpperCase();
-    }
-
-    private static String[] generateCreateTableHql(TableDesc tableDesc) {
-
-        String dropsql = "DROP TABLE IF EXISTS " + tableDesc.getIdentity();
-        StringBuilder ddl = new StringBuilder();
-
-        ddl.append("CREATE TABLE " + tableDesc.getIdentity() + "\n");
-        ddl.append("(" + "\n");
-
-        for (int i = 0; i < tableDesc.getColumns().length; i++) {
-            ColumnDesc col = tableDesc.getColumns()[i];
-            if (i > 0) {
-                ddl.append(",");
-            }
-            ddl.append(col.getName() + " " + getHiveDataType((col.getDatatype())) + "\n");
-        }
-
-        ddl.append(")" + "\n");
-        ddl.append("ROW FORMAT DELIMITED FIELDS TERMINATED BY ','" + "\n");
-        ddl.append("STORED AS TEXTFILE");
-
-        return new String[] { dropsql, ddl.toString() };
-    }
-
-    private static String getHiveDataType(String javaDataType) {
-        String hiveDataType = javaDataType.toLowerCase().startsWith("varchar") ? "string" : javaDataType;
-        hiveDataType = javaDataType.toLowerCase().startsWith("integer") ? "int" : hiveDataType;
-
-        return hiveDataType.toLowerCase();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/job/ExportHBaseData.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/job/ExportHBaseData.java b/job/src/test/java/org/apache/kylin/job/ExportHBaseData.java
deleted file mode 100644
index 2d8bb05..0000000
--- a/job/src/test/java/org/apache/kylin/job/ExportHBaseData.java
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job;
-
-import java.io.File;
-import java.io.IOException;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.AbstractKylinTestCase;
-import org.apache.kylin.common.util.CliCommandExecutor;
-import org.apache.kylin.common.util.SSHClient;
-import org.apache.kylin.engine.mr.common.BatchConstants;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
-import org.apache.kylin.storage.hbase.steps.HBaseMiniclusterHelper;
-
-public class ExportHBaseData {
-
-    KylinConfig kylinConfig;
-    HTableDescriptor[] allTables;
-    Configuration config;
-    HBaseAdmin hbase;
-    CliCommandExecutor cli;
-    String exportHdfsFolder;
-    String exportLocalFolderParent;
-    String exportLocalFolder;
-    String backupArchive;
-    String tableNameBase;
-    long currentTIME;
-
-    public ExportHBaseData() {
-        try {
-            setup();
-        } catch (IOException e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    private void setup() throws IOException {
-
-        KylinConfig.destoryInstance();
-        System.setProperty(KylinConfig.KYLIN_CONF, AbstractKylinTestCase.SANDBOX_TEST_DATA);
-
-        kylinConfig = KylinConfig.getInstanceFromEnv();
-        cli = kylinConfig.getCliCommandExecutor();
-
-        currentTIME = System.currentTimeMillis();
-        exportHdfsFolder = kylinConfig.getHdfsWorkingDirectory() + "hbase-export/" + currentTIME + "/";
-        exportLocalFolderParent = BatchConstants.CFG_KYLIN_LOCAL_TEMP_DIR + "hbase-export/";
-        exportLocalFolder = exportLocalFolderParent + currentTIME + "/";
-        backupArchive = exportLocalFolderParent + "hbase-export-at-" + currentTIME + ".tar.gz";
-
-        String metadataUrl = kylinConfig.getMetadataUrl();
-        // split TABLE@HBASE_URL
-        int cut = metadataUrl.indexOf('@');
-        tableNameBase = metadataUrl.substring(0, cut);
-        String hbaseUrl = cut < 0 ? metadataUrl : metadataUrl.substring(cut + 1);
-
-        HConnection conn = HBaseConnection.get(hbaseUrl);
-        try {
-            hbase = new HBaseAdmin(conn);
-            config = hbase.getConfiguration();
-            allTables = hbase.listTables();
-        } catch (IOException e) {
-            e.printStackTrace();
-            throw e;
-        }
-    }
-
-    public void tearDown() {
-
-        // cleanup hdfs
-        try {
-            if (cli != null && exportHdfsFolder != null) {
-                cli.execute("hadoop fs -rm -r " + exportHdfsFolder);
-            }
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-        // cleanup sandbox disk
-        try {
-            if (cli != null && exportLocalFolder != null) {
-                cli.execute("rm -r " + exportLocalFolder);
-            }
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-
-        // delete archive file on sandbox
-        try {
-            if (cli != null && backupArchive != null) {
-                cli.execute("rm " + backupArchive);
-            }
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-    }
-
-    public void exportTables() throws IOException {
-        cli.execute("mkdir -p " + exportLocalFolderParent);
-
-        for (HTableDescriptor table : allTables) {
-            String tName = table.getNameAsString();
-            if (!tName.equals(tableNameBase) && !tName.startsWith(HBaseMiniclusterHelper.SHARED_STORAGE_PREFIX))
-                continue;
-
-            cli.execute("hbase org.apache.hadoop.hbase.mapreduce.Export " + tName + " " + exportHdfsFolder + tName);
-        }
-
-        cli.execute("hadoop fs -copyToLocal " + exportHdfsFolder + " " + exportLocalFolderParent);
-        cli.execute("tar -zcvf " + backupArchive + " --directory=" + exportLocalFolderParent + " " + currentTIME);
-        downloadToLocal();
-    }
-
-    public void downloadToLocal() throws IOException {
-        String localArchive = "../examples/test_case_data/minicluster/hbase-export.tar.gz";
-
-        if (kylinConfig.getRunAsRemoteCommand()) {
-            SSHClient ssh = new SSHClient(kylinConfig.getRemoteHadoopCliHostname(), kylinConfig.getRemoteHadoopCliPort(), kylinConfig.getRemoteHadoopCliUsername(), kylinConfig.getRemoteHadoopCliPassword());
-            try {
-                ssh.scpFileToLocal(backupArchive, localArchive);
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-        } else {
-            FileUtils.copyFile(new File(backupArchive), new File(localArchive));
-        }
-    }
-
-    public static void main(String[] args) {
-        ExportHBaseData export = new ExportHBaseData();
-        try {
-            export.exportTables();
-        } catch (IOException e) {
-            e.printStackTrace();
-        } finally {
-            export.tearDown();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/job/ITKafkaBasedIIStreamBuilderTest.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/job/ITKafkaBasedIIStreamBuilderTest.java b/job/src/test/java/org/apache/kylin/job/ITKafkaBasedIIStreamBuilderTest.java
deleted file mode 100644
index 6a615cb..0000000
--- a/job/src/test/java/org/apache/kylin/job/ITKafkaBasedIIStreamBuilderTest.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- *
- *
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *
- *  contributor license agreements. See the NOTICE file distributed with
- *
- *  this work for additional information regarding copyright ownership.
- *
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *
- *  (the "License"); you may not use this file except in compliance with
- *
- *  the License. You may obtain a copy of the License at
- *
- *
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- *
- *
- *  Unless required by applicable law or agreed to in writing, software
- *
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *
- *  See the License for the specific language governing permissions and
- *
- *  limitations under the License.
- *
- * /
- */
-
-package org.apache.kylin.job;
-
-import java.io.File;
-
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.AbstractKylinTestCase;
-import org.apache.kylin.common.util.ClassUtil;
-import org.apache.kylin.job.streaming.StreamingBootstrap;
-import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- */
-@Ignore("this test case will break existing metadata store")
-public class ITKafkaBasedIIStreamBuilderTest {
-
-    private static final Logger logger = LoggerFactory.getLogger(ITKafkaBasedIIStreamBuilderTest.class);
-
-    private KylinConfig kylinConfig;
-
-    @BeforeClass
-    public static void beforeClass() throws Exception {
-        ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
-        System.setProperty("hdp.version", "2.2.4.2-2"); // mapred-site.xml ref this
-    }
-
-    @Before
-    public void before() throws Exception {
-        HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA);
-
-        kylinConfig = KylinConfig.getInstanceFromEnv();
-        DeployUtil.initCliWorkDir();
-        DeployUtil.deployMetadata();
-        DeployUtil.overrideJobJarLocations();
-    }
-
-    @Test
-    public void test() throws Exception {
-        final StreamingBootstrap bootstrap = StreamingBootstrap.getInstance(kylinConfig);
-        bootstrap.start("eagle", 0);
-        Thread.sleep(30 * 60 * 1000);
-        logger.info("time is up, stop streaming");
-        bootstrap.stop();
-        Thread.sleep(5 * 1000);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/job/dataGen/ColumnConfig.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/job/dataGen/ColumnConfig.java b/job/src/test/java/org/apache/kylin/job/dataGen/ColumnConfig.java
deleted file mode 100644
index 44ba8f4..0000000
--- a/job/src/test/java/org/apache/kylin/job/dataGen/ColumnConfig.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.dataGen;
-
-import java.util.ArrayList;
-
-import com.fasterxml.jackson.annotation.JsonAutoDetect;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-/**
- */
-@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE)
-public class ColumnConfig {
-    @JsonProperty("columnName")
-    private String columnName;
-    @JsonProperty("valueSet")
-    private ArrayList<String> valueSet;
-    @JsonProperty("exclusive")
-    private boolean exclusive;
-    @JsonProperty("asRange")
-    private boolean asRange;
-
-    public boolean isAsRange() {
-        return asRange;
-    }
-
-    public void setAsRange(boolean asRange) {
-        this.asRange = asRange;
-    }
-
-    public boolean isExclusive() {
-        return exclusive;
-    }
-
-    public void setExclusive(boolean exclusive) {
-        this.exclusive = exclusive;
-    }
-
-    public String getColumnName() {
-        return columnName;
-    }
-
-    public void setColumnName(String columnName) {
-        this.columnName = columnName;
-    }
-
-    public ArrayList<String> getValueSet() {
-        return valueSet;
-    }
-
-    public void setValueSet(ArrayList<String> valueSet) {
-        this.valueSet = valueSet;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/job/dataGen/FactTableGenerator.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/job/dataGen/FactTableGenerator.java b/job/src/test/java/org/apache/kylin/job/dataGen/FactTableGenerator.java
deleted file mode 100644
index a965753..0000000
--- a/job/src/test/java/org/apache/kylin/job/dataGen/FactTableGenerator.java
+++ /dev/null
@@ -1,647 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.dataGen;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-import java.util.TreeMap;
-import java.util.TreeSet;
-
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.persistence.ResourceStore;
-import org.apache.kylin.common.util.Array;
-import org.apache.kylin.cube.CubeInstance;
-import org.apache.kylin.cube.CubeManager;
-import org.apache.kylin.cube.model.CubeDesc;
-import org.apache.kylin.cube.model.DimensionDesc;
-import org.apache.kylin.metadata.MetadataManager;
-import org.apache.kylin.metadata.model.ColumnDesc;
-import org.apache.kylin.metadata.model.DataType;
-import org.apache.kylin.metadata.model.JoinDesc;
-import org.apache.kylin.metadata.model.MeasureDesc;
-import org.apache.kylin.metadata.model.TblColRef;
-
-/**
- */
-public class FactTableGenerator {
-    CubeInstance cube = null;
-    CubeDesc desc = null;
-    ResourceStore store = null;
-    String factTableName = null;
-
-    GenConfig genConf = null;
-
-    Random r = null;
-
-    String cubeName;
-    long randomSeed;
-    int rowCount;
-    int unlinkableRowCount;
-    int unlinkableRowCountMax;
-    double conflictRatio;
-    double linkableRatio;
-
-    // the names of lookup table columns which is in relation with fact
-    // table(appear as fk in fact table)
-    TreeMap<String, LinkedList<String>> lookupTableKeys = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
-
-    // possible values of lookupTableKeys, extracted from existing lookup
-    // tables.
-    // The key is in the format of tablename/columnname
-    TreeMap<String, ArrayList<String>> feasibleValues = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
-
-    // lookup table name -> sets of all composite keys
-    TreeMap<String, HashSet<Array<String>>> lookupTableCompositeKeyValues = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
-
-    private void init(String cubeName, int rowCount, double conflictRaio, double linkableRatio, long randomSeed) {
-        this.rowCount = rowCount;
-        this.conflictRatio = conflictRaio;
-        this.cubeName = cubeName;
-        this.randomSeed = randomSeed;
-        this.linkableRatio = linkableRatio;
-
-        this.unlinkableRowCountMax = (int) (this.rowCount * (1 - linkableRatio));
-        this.unlinkableRowCount = 0;
-
-        r = new Random(randomSeed);
-
-        KylinConfig config = KylinConfig.getInstanceFromEnv();
-        cube = CubeManager.getInstance(config).getCube(cubeName);
-        desc = cube.getDescriptor();
-        factTableName = desc.getFactTable();
-        store = ResourceStore.getStore(config);
-    }
-
-    /*
-     * users can specify the value preference for each column
-     */
-    private void loadConfig() {
-        try {
-            InputStream configStream = null;
-            configStream = store.getResource("/data/data_gen_config.json");
-            this.genConf = GenConfig.loadConfig(configStream);
-
-            if (configStream != null)
-                configStream.close();
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-    }
-
-    private void loadLookupTableValues(String lookupTableName, LinkedList<String> columnNames, int distinctRowCount) throws Exception {
-        KylinConfig config = KylinConfig.getInstanceFromEnv();
-
-        // only deal with composite keys
-        if (columnNames.size() > 1 && !lookupTableCompositeKeyValues.containsKey(lookupTableName)) {
-            lookupTableCompositeKeyValues.put(lookupTableName, new HashSet<Array<String>>());
-        }
-
-        InputStream tableStream = null;
-        BufferedReader tableReader = null;
-        try {
-            TreeMap<String, Integer> zeroBasedInice = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
-            for (String columnName : columnNames) {
-                ColumnDesc cDesc = MetadataManager.getInstance(config).getTableDesc(lookupTableName).findColumnByName(columnName);
-                zeroBasedInice.put(columnName, cDesc.getZeroBasedIndex());
-            }
-
-            String path = "/data/" + lookupTableName + ".csv";
-            tableStream = store.getResource(path);
-            tableReader = new BufferedReader(new InputStreamReader(tableStream));
-            tableReader.mark(0);
-            int rowCount = 0;
-            int curRowNum = 0;
-            String curRow;
-
-            while (tableReader.readLine() != null)
-                rowCount++;
-
-            HashSet<Integer> rows = new HashSet<Integer>();
-            distinctRowCount = (distinctRowCount < rowCount) ? distinctRowCount : rowCount;
-            while (rows.size() < distinctRowCount) {
-                rows.add(r.nextInt(rowCount));
-            }
-
-            // reopen the stream
-            tableReader.close();
-            tableStream.close();
-            tableStream = null;
-            tableReader = null;
-
-            tableStream = store.getResource(path);
-            tableReader = new BufferedReader(new InputStreamReader(tableStream));
-
-            while ((curRow = tableReader.readLine()) != null) {
-                if (rows.contains(curRowNum)) {
-                    String[] tokens = curRow.split(",");
-
-                    String[] comboKeys = null;
-                    int index = 0;
-                    if (columnNames.size() > 1)
-                        comboKeys = new String[columnNames.size()];
-
-                    for (String columnName : columnNames) {
-                        int zeroBasedIndex = zeroBasedInice.get(columnName);
-                        if (!feasibleValues.containsKey(lookupTableName + "/" + columnName))
-                            feasibleValues.put(lookupTableName + "/" + columnName, new ArrayList<String>());
-                        feasibleValues.get(lookupTableName + "/" + columnName).add(tokens[zeroBasedIndex]);
-
-                        if (columnNames.size() > 1) {
-                            comboKeys[index] = tokens[zeroBasedIndex];
-                            index++;
-                        }
-                    }
-
-                    if (columnNames.size() > 1) {
-                        Array<String> wrap = new Array<String>(comboKeys);
-                        if (lookupTableCompositeKeyValues.get(lookupTableName).contains(wrap)) {
-                            throw new Exception("The composite key already exist in the lookup table");
-                        }
-                        lookupTableCompositeKeyValues.get(lookupTableName).add(wrap);
-                    }
-                }
-                curRowNum++;
-            }
-
-            if (tableStream != null)
-                tableStream.close();
-            if (tableReader != null)
-                tableReader.close();
-
-        } catch (IOException e) {
-            e.printStackTrace();
-            System.exit(1);
-        }
-    }
-
-    // prepare the candidate values for each joined column
-    private void prepare() throws Exception {
-        // load config
-        loadConfig();
-
-        TreeSet<String> factTableColumns = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
-
-        for (DimensionDesc dim : desc.getDimensions()) {
-            for (TblColRef col : dim.getColumnRefs()) {
-                if (col.getTable().equals(factTableName))
-                    factTableColumns.add(col.getName());
-            }
-
-            JoinDesc join = dim.getJoin();
-            if (join != null) {
-                String lookupTable = dim.getTable();
-                for (String column : join.getPrimaryKey()) {
-                    if (!lookupTableKeys.containsKey(lookupTable)) {
-                        lookupTableKeys.put(lookupTable, new LinkedList<String>());
-                    }
-
-                    if (!lookupTableKeys.get(lookupTable).contains(column))
-                        lookupTableKeys.get(lookupTable).add(column);
-                }
-            }
-        }
-
-        int distinctRowCount = (int) (this.rowCount / this.conflictRatio);
-        distinctRowCount = (distinctRowCount == 0) ? 1 : distinctRowCount;
-        // lookup tables
-        for (String lookupTable : lookupTableKeys.keySet()) {
-            this.loadLookupTableValues(lookupTable, lookupTableKeys.get(lookupTable), distinctRowCount);
-        }
-    }
-
-    private List<DimensionDesc> getSortedDimentsionDescs() {
-        List<DimensionDesc> dimensions = desc.getDimensions();
-        Collections.sort(dimensions, new Comparator<DimensionDesc>() {
-            @Override
-            public int compare(DimensionDesc o1, DimensionDesc o2) {
-                JoinDesc j1 = o2.getJoin();
-                JoinDesc j2 = o1.getJoin();
-                return Integer.valueOf(j1 != null ? j1.getPrimaryKey().length : 0).compareTo(j2 != null ? j2.getPrimaryKey().length : 0);
-            }
-        });
-        return dimensions;
-    }
-
-    /**
-     * Generate the fact table and return it as text
-     *
-     * @return
-     * @throws Exception
-     */
-    private String cookData() throws Exception {
-        // the columns on the fact table can be classified into three groups:
-        // 1. foreign keys
-        TreeMap<String, String> factTableCol2LookupCol = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
-        // 2. metrics or directly used dimensions
-        TreeSet<String> usedCols = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
-        // 3. others, not referenced anywhere
-
-        TreeMap<String, String> lookupCol2factTableCol = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
-
-        // find fact table columns in fks
-        List<DimensionDesc> dimensions = getSortedDimentsionDescs();
-        for (DimensionDesc dim : dimensions) {
-            JoinDesc jDesc = dim.getJoin();
-            if (jDesc != null) {
-                String[] fks = jDesc.getForeignKey();
-                String[] pks = jDesc.getPrimaryKey();
-                int num = fks.length;
-                for (int i = 0; i < num; ++i) {
-                    String value = dim.getTable() + "/" + pks[i];
-
-                    lookupCol2factTableCol.put(value, fks[i]);
-
-                    if (factTableCol2LookupCol.containsKey(fks[i])) {
-                        if (!factTableCol2LookupCol.get(fks[i]).equals(value)) {
-                            System.out.println("Warning: Disambiguation on the mapping of column " + fks[i] + ", " + factTableCol2LookupCol.get(fks[i]) + "(chosen) or " + value);
-                            continue;
-                        }
-                    }
-                    factTableCol2LookupCol.put(fks[i], value);
-                }
-            }
-            //else, deal with it in next roung
-        }
-
-        // find fact table columns in direct dimension
-        // DO NOT merge this with the previous loop
-        for (DimensionDesc dim : dimensions) {
-            JoinDesc jDesc = dim.getJoin();
-            if (jDesc == null) {
-                // column on fact table used directly as a dimension
-                for (String aColumn : dim.getColumn()) {
-                    if (!factTableCol2LookupCol.containsKey(aColumn))
-                        usedCols.add(aColumn);
-                }
-            }
-        }
-
-        // find fact table columns in measures
-        for (MeasureDesc mDesc : desc.getMeasures()) {
-            List<TblColRef> pcols = mDesc.getFunction().getParameter().getColRefs();
-            if (pcols != null) {
-                for (TblColRef col : pcols) {
-                    if (!factTableCol2LookupCol.containsKey(col.getName()))
-                        usedCols.add(col.getName());
-                }
-            }
-        }
-
-        return createTable(this.rowCount, factTableCol2LookupCol, lookupCol2factTableCol, usedCols);
-    }
-
-    private String normToTwoDigits(int v) {
-        if (v < 10)
-            return "0" + v;
-        else
-            return Integer.toString(v);
-    }
-
-    private String randomPick(ArrayList<String> candidates) {
-        int index = r.nextInt(candidates.size());
-        return candidates.get(index);
-    }
-
-    private String createRandomCell(ColumnDesc cDesc, ArrayList<String> range) throws Exception {
-        DataType type = cDesc.getType();
-        if (type.isStringFamily()) {
-            throw new Exception("Can't handle range values for string");
-
-        } else if (type.isIntegerFamily()) {
-            int low = Integer.parseInt(range.get(0));
-            int high = Integer.parseInt(range.get(1));
-            return Integer.toString(r.nextInt(high - low) + low);
-
-        } else if (type.isDouble()) {
-            double low = Double.parseDouble(range.get(0));
-            double high = Double.parseDouble(range.get(1));
-            return String.format("%.4f", r.nextDouble() * (high - low) + low);
-
-        } else if (type.isFloat()) {
-            float low = Float.parseFloat(range.get(0));
-            float high = Float.parseFloat(range.get(1));
-            return String.format("%.4f", r.nextFloat() * (high - low) + low);
-
-        } else if (type.isDecimal()) {
-            double low = Double.parseDouble(range.get(0));
-            double high = Double.parseDouble(range.get(1));
-            return String.format("%.4f", r.nextDouble() * (high - low) + low);
-
-        } else if (type.isDateTimeFamily()) {
-            if (!type.isDate()) {
-                throw new RuntimeException("Does not support " + type);
-            }
-
-            SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
-            Date start = format.parse(range.get(0));
-            Date end = format.parse(range.get(1));
-            long diff = end.getTime() - start.getTime();
-            Date temp = new Date(start.getTime() + (long) (diff * r.nextDouble()));
-            Calendar cal = Calendar.getInstance();
-            cal.setTime(temp);
-            // first day
-            cal.set(Calendar.DAY_OF_WEEK, cal.getFirstDayOfWeek());
-
-            return cal.get(Calendar.YEAR) + "-" + normToTwoDigits(cal.get(Calendar.MONTH) + 1) + "-" + normToTwoDigits(cal.get(Calendar.DAY_OF_MONTH));
-        } else {
-            System.out.println("The data type " + type + "is not recognized");
-            System.exit(1);
-        }
-        return null;
-    }
-
-    private String createRandomCell(ColumnDesc cDesc) {
-        String type = cDesc.getTypeName();
-        String s = type.toLowerCase();
-        if (s.equals("string") || s.equals("char") || s.equals("varchar")) {
-            StringBuilder sb = new StringBuilder();
-            for (int i = 0; i < 2; i++) {
-                sb.append((char) ('a' + r.nextInt(10)));// there are 10*10
-                // possible strings
-            }
-            return sb.toString();
-        } else if (s.equals("bigint") || s.equals("int") || s.equals("tinyint") || s.equals("smallint")) {
-            return Integer.toString(r.nextInt(128));
-        } else if (s.equals("double")) {
-            return String.format("%.4f", r.nextDouble() * 100);
-        } else if (s.equals("float")) {
-            return String.format("%.4f", r.nextFloat() * 100);
-        } else if (s.equals("decimal")) {
-            return String.format("%.4f", r.nextDouble() * 100);
-        } else if (s.equals("date")) {
-            long date20131231 = 61349312153265L;
-            long date20010101 = 60939158400000L;
-            long diff = date20131231 - date20010101;
-            Date temp = new Date(date20010101 + (long) (diff * r.nextDouble()));
-            Calendar cal = Calendar.getInstance();
-            cal.setTime(temp);
-            // first day
-            cal.set(Calendar.DAY_OF_WEEK, cal.getFirstDayOfWeek());
-
-            return cal.get(Calendar.YEAR) + "-" + normToTwoDigits(cal.get(Calendar.MONTH) + 1) + "-" + normToTwoDigits(cal.get(Calendar.DAY_OF_MONTH));
-        } else {
-            System.out.println("The data type " + type + "is not recognized");
-            System.exit(1);
-        }
-        return null;
-    }
-
-    private String createDefaultsCell(String type) {
-        String s = type.toLowerCase();
-        if (s.equals("string") || s.equals("char") || s.equals("varchar")) {
-            return "abcde";
-        } else if (s.equals("bigint") || s.equals("int") || s.equals("tinyint") || s.equals("smallint")) {
-            return "0";
-        } else if (s.equals("double")) {
-            return "0";
-        } else if (s.equals("float")) {
-            return "0";
-        } else if (s.equals("decimal")) {
-            return "0";
-        } else if (s.equals("date")) {
-            return "1970-01-01";
-        } else {
-            System.out.println("The data type " + type + "is not recognized");
-            System.exit(1);
-        }
-        return null;
-    }
-
-    private void printColumnMappings(TreeMap<String, String> factTableCol2LookupCol, TreeSet<String> usedCols, TreeSet<String> defaultColumns) {
-
-        System.out.println("=======================================================================");
-        System.out.format("%-30s %s", "FACT_TABLE_COLUMN", "MAPPING");
-        System.out.println();
-        System.out.println();
-        for (Map.Entry<String, String> entry : factTableCol2LookupCol.entrySet()) {
-            System.out.format("%-30s %s", entry.getKey(), entry.getValue());
-            System.out.println();
-        }
-        for (String key : usedCols) {
-            System.out.format("%-30s %s", key, "Random Values");
-            System.out.println();
-        }
-        for (String key : defaultColumns) {
-            System.out.format("%-30s %s", key, "Default Values");
-            System.out.println();
-        }
-        System.out.println("=======================================================================");
-
-        System.out.println("Parameters:");
-        System.out.println();
-        System.out.println("CubeName:        " + cubeName);
-        System.out.println("RowCount:        " + rowCount);
-        System.out.println("ConflictRatio:   " + conflictRatio);
-        System.out.println("LinkableRatio:   " + linkableRatio);
-        System.out.println("Seed:            " + randomSeed);
-        System.out.println();
-        System.out.println("The number of actual unlinkable fact rows is: " + this.unlinkableRowCount);
-        System.out.println("You can vary the above parameters to generate different datasets.");
-        System.out.println();
-    }
-
-    // Any row in the column must finally appear in the flatten big table.
-    // for single-column joins the generated row is guaranteed to have a match
-    // in lookup table
-    // for composite keys we'll need an extra check
-    private boolean matchAllCompositeKeys(TreeMap<String, String> lookupCol2FactTableCol, LinkedList<String> columnValues) {
-        KylinConfig config = KylinConfig.getInstanceFromEnv();
-
-        for (String lookupTable : lookupTableKeys.keySet()) {
-            if (lookupTableKeys.get(lookupTable).size() == 1)
-                continue;
-
-            String[] comboKey = new String[lookupTableKeys.get(lookupTable).size()];
-            int index = 0;
-            for (String column : lookupTableKeys.get(lookupTable)) {
-                String key = lookupTable + "/" + column;
-                String factTableCol = lookupCol2FactTableCol.get(key);
-                int cardinal = MetadataManager.getInstance(config).getTableDesc(factTableName).findColumnByName(factTableCol).getZeroBasedIndex();
-                comboKey[index] = columnValues.get(cardinal);
-
-                index++;
-            }
-            Array<String> wrap = new Array<String>(comboKey);
-            if (!lookupTableCompositeKeyValues.get(lookupTable).contains(wrap)) {
-                // System.out.println("Try " + wrap + " Failed, continue...");
-                return false;
-            }
-        }
-        return true;
-    }
-
-    private String createCell(ColumnDesc cDesc) throws Exception {
-        ColumnConfig cConfig = null;
-
-        if ((cConfig = genConf.getColumnConfigByName(cDesc.getName())) == null) {
-            // if the column is not configured, use random values
-            return (createRandomCell(cDesc));
-
-        } else {
-            // the column has a configuration
-            if (!cConfig.isAsRange() && !cConfig.isExclusive() && r.nextBoolean()) {
-                // if the column still allows random values
-                return (createRandomCell(cDesc));
-
-            } else {
-                // use specified values
-                ArrayList<String> valueSet = cConfig.getValueSet();
-                if (valueSet == null || valueSet.size() == 0)
-                    throw new Exception("Did you forget to specify value set for " + cDesc.getName());
-
-                if (!cConfig.isAsRange()) {
-                    return (randomPick(valueSet));
-                } else {
-                    if (valueSet.size() != 2)
-                        throw new Exception("Only two values can be set for range values, the column: " + cDesc.getName());
-
-                    return (createRandomCell(cDesc, valueSet));
-                }
-            }
-
-        }
-    }
-
-    private LinkedList<String> createRow(TreeMap<String, String> factTableCol2LookupCol, TreeSet<String> usedCols, TreeSet<String> defaultColumns) throws Exception {
-        KylinConfig config = KylinConfig.getInstanceFromEnv();
-        LinkedList<String> columnValues = new LinkedList<String>();
-
-        for (ColumnDesc cDesc : MetadataManager.getInstance(config).getTableDesc(factTableName).getColumns()) {
-
-            String colName = cDesc.getName();
-
-            if (factTableCol2LookupCol.containsKey(colName)) {
-
-                // if the current column is a fk column in fact table
-                ArrayList<String> candidates = this.feasibleValues.get(factTableCol2LookupCol.get(colName));
-
-                columnValues.add(candidates.get(r.nextInt(candidates.size())));
-            } else if (usedCols.contains(colName)) {
-
-                // if the current column is a metric column in fact table
-                columnValues.add(createCell(cDesc));
-            } else {
-
-                // otherwise this column is not useful in OLAP
-                columnValues.add(createDefaultsCell(cDesc.getTypeName()));
-                defaultColumns.add(colName);
-            }
-        }
-
-        return columnValues;
-    }
-
-    /**
-     * return the text of table contents(one line one row)
-     *
-     * @param rowCount
-     * @param factTableCol2LookupCol
-     * @param lookupCol2FactTableCol
-     * @param usedCols
-     * @return
-     * @throws Exception
-     */
-    private String createTable(int rowCount, TreeMap<String, String> factTableCol2LookupCol, TreeMap<String, String> lookupCol2FactTableCol, TreeSet<String> usedCols) throws Exception {
-        try {
-            TreeSet<String> defaultColumns = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
-
-            StringBuffer sb = new StringBuffer();
-            for (int i = 0; i < rowCount;) {
-
-                LinkedList<String> columnValues = createRow(factTableCol2LookupCol, usedCols, defaultColumns);
-
-                if (!matchAllCompositeKeys(lookupCol2FactTableCol, columnValues)) {
-                    if (unlinkableRowCount < unlinkableRowCountMax) {
-                        unlinkableRowCount++;
-                    } else {
-                        continue;
-                    }
-                }
-
-                for (String c : columnValues)
-                    sb.append(c + ",");
-                sb.deleteCharAt(sb.length() - 1);
-                sb.append(System.getProperty("line.separator"));
-
-                i++;
-
-                // System.out.println("Just generated the " + i + "th record");
-            }
-
-            printColumnMappings(factTableCol2LookupCol, usedCols, defaultColumns);
-
-            return sb.toString();
-
-        } catch (IOException e) {
-            e.printStackTrace();
-            System.exit(1);
-        }
-
-        return null;
-    }
-
-    /**
-     * Randomly create a fact table and return the table content
-     *
-     * @param cubeName      name of the cube
-     * @param rowCount      expected row count generated
-     * @param linkableRatio the percentage of fact table rows that can be linked with all
-     *                      lookup table by INNER join
-     * @param randomSeed    random seed
-     */
-    public static String generate(String cubeName, String rowCount, String linkableRatio, String randomSeed) throws Exception {
-
-        if (rowCount == null)
-            rowCount = "10000";
-        if (linkableRatio == null)
-            linkableRatio = "0.6";
-
-        //if (randomSeed == null)
-        // don't give it value
-
-        // String conflictRatio = "5";//this parameter do not allow configuring
-        // any more
-
-        FactTableGenerator generator = new FactTableGenerator();
-        long seed;
-        if (randomSeed != null) {
-            seed = Long.parseLong(randomSeed);
-        } else {
-            Random r = new Random();
-            seed = r.nextLong();
-        }
-
-        generator.init(cubeName, Integer.parseInt(rowCount), 5, Double.parseDouble(linkableRatio), seed);
-        generator.prepare();
-        return generator.cookData();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/job/dataGen/GenConfig.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/job/dataGen/GenConfig.java b/job/src/test/java/org/apache/kylin/job/dataGen/GenConfig.java
deleted file mode 100644
index c58cfb6..0000000
--- a/job/src/test/java/org/apache/kylin/job/dataGen/GenConfig.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.dataGen;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.HashMap;
-
-import org.apache.kylin.common.util.JsonUtil;
-
-import com.fasterxml.jackson.annotation.JsonAutoDetect;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.core.JsonParseException;
-import com.fasterxml.jackson.databind.JsonMappingException;
-
-/**
- */
-@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE)
-public class GenConfig {
-
-    @JsonProperty("columnConfigs")
-    private ArrayList<ColumnConfig> columnConfigs;
-
-    private HashMap<String, ColumnConfig> cache = new HashMap<String, ColumnConfig>();
-
-    public ArrayList<ColumnConfig> getColumnConfigs() {
-        return columnConfigs;
-    }
-
-    public void setColumnConfigs(ArrayList<ColumnConfig> columnConfigs) {
-        this.columnConfigs = columnConfigs;
-    }
-
-    public ColumnConfig getColumnConfigByName(String columnName) {
-        columnName = columnName.toLowerCase();
-
-        if (cache.containsKey(columnName))
-            return cache.get(columnName);
-
-        for (ColumnConfig cConfig : columnConfigs) {
-            if (cConfig.getColumnName().toLowerCase().equals(columnName)) {
-                cache.put(columnName, cConfig);
-                return cConfig;
-            }
-        }
-        cache.put(columnName, null);
-        return null;
-    }
-
-    public static GenConfig loadConfig(InputStream stream) {
-        try {
-            GenConfig config = JsonUtil.readValue(stream, GenConfig.class);
-            return config;
-        } catch (JsonMappingException e) {
-            e.printStackTrace();
-        } catch (JsonParseException e) {
-            e.printStackTrace();
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-
-        return null;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/job/dataGen/StreamingDataGenerator.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/job/dataGen/StreamingDataGenerator.java b/job/src/test/java/org/apache/kylin/job/dataGen/StreamingDataGenerator.java
deleted file mode 100644
index ebc4114..0000000
--- a/job/src/test/java/org/apache/kylin/job/dataGen/StreamingDataGenerator.java
+++ /dev/null
@@ -1,83 +0,0 @@
-package org.apache.kylin.job.dataGen;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.JsonUtil;
-import org.apache.kylin.common.util.TimeUtil;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.invertedindex.model.IIDesc;
-import org.apache.kylin.metadata.model.TblColRef;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.Maps;
-
-/**
- * data gen for II streaming, may be merged with StreamingTableDataGenerator
- */
-public class StreamingDataGenerator {
-    private static final Logger logger = LoggerFactory.getLogger(StreamingDataGenerator.class);
-    private static Random random = new Random();
-    private static String[] decimalFormat = new String[] { "%.4f", "%.5f", "%.6f" };
-
-    public static Iterator<String> generate(final long start, final long end, final int count) {
-        final KylinConfig config = KylinConfig.getInstanceFromEnv();
-        final IIInstance ii = IIManager.getInstance(config).getII("test_streaming_table_ii");
-        final IIDesc iiDesc = ii.getDescriptor();
-        final List<TblColRef> columns = iiDesc.listAllColumns();
-
-        return new Iterator<String>() {
-            private Map<String, String> values = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
-            private int index = 0;
-
-            @Override
-            public boolean hasNext() {
-                return this.index < count;
-            }
-
-            @Override
-            public String next() {
-                values.clear();
-                long ts = this.createTs(start, end);
-                values.put("minute_start", Long.toString(TimeUtil.getMinuteStart(ts)));
-                values.put("hour_start", Long.toString(TimeUtil.getHourStart(ts)));
-                values.put("day_start", Long.toString(TimeUtil.getDayStart(ts)));
-                values.put("itm", Integer.toString(random.nextInt(20)));
-                values.put("site", Integer.toString(random.nextInt(5)));
-
-                values.put("gmv", String.format(decimalFormat[random.nextInt(3)], random.nextFloat() * 100));
-                values.put("item_count", Integer.toString(random.nextInt(5)));
-
-                if (values.size() != columns.size()) {
-                    throw new RuntimeException("the structure of streaming table has changed, need to modify generator too");
-                }
-
-                ByteArrayOutputStream os = new ByteArrayOutputStream();
-                try {
-                    JsonUtil.writeValue(os, values);
-                } catch (IOException e) {
-                    e.printStackTrace();
-                    throw new RuntimeException(e);
-                }
-                index++;
-                return new String(os.toByteArray());
-            }
-
-            @Override
-            public void remove() {
-            }
-
-            private long createTs(final long start, final long end) {
-                return start + (long) (random.nextDouble() * (end - start));
-            }
-        };
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/job/hadoop/invertedindex/IITest.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/job/hadoop/invertedindex/IITest.java b/job/src/test/java/org/apache/kylin/job/hadoop/invertedindex/IITest.java
deleted file mode 100644
index dcd460b..0000000
--- a/job/src/test/java/org/apache/kylin/job/hadoop/invertedindex/IITest.java
+++ /dev/null
@@ -1,240 +0,0 @@
-package org.apache.kylin.job.hadoop.invertedindex;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Queue;
-import java.util.Set;
-
-import javax.annotation.Nullable;
-
-import org.apache.commons.lang.NotImplementedException;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.regionserver.RegionScanner;
-import org.apache.kylin.common.util.FIFOIterable;
-import org.apache.kylin.common.util.LocalFileMetadataTestCase;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.invertedindex.index.Slice;
-import org.apache.kylin.invertedindex.index.TableRecordInfo;
-import org.apache.kylin.invertedindex.index.TableRecordInfoDigest;
-import org.apache.kylin.invertedindex.model.IIDesc;
-import org.apache.kylin.invertedindex.model.IIKeyValueCodec;
-import org.apache.kylin.invertedindex.model.IIKeyValueCodecWithState;
-import org.apache.kylin.invertedindex.model.IIRow;
-import org.apache.kylin.invertedindex.model.KeyValueCodec;
-import org.apache.kylin.metadata.filter.ColumnTupleFilter;
-import org.apache.kylin.metadata.filter.CompareTupleFilter;
-import org.apache.kylin.metadata.filter.ConstantTupleFilter;
-import org.apache.kylin.metadata.filter.TupleFilter;
-import org.apache.kylin.metadata.model.FunctionDesc;
-import org.apache.kylin.metadata.model.ParameterDesc;
-import org.apache.kylin.metadata.model.TblColRef;
-import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorFilter;
-import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorProjector;
-import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorRowType;
-import org.apache.kylin.storage.hbase.common.coprocessor.FilterDecorator;
-import org.apache.kylin.storage.hbase.ii.coprocessor.endpoint.ClearTextDictionary;
-import org.apache.kylin.storage.hbase.ii.coprocessor.endpoint.EndpointAggregators;
-import org.apache.kylin.storage.hbase.ii.coprocessor.endpoint.IIEndpoint;
-import org.apache.kylin.storage.hbase.ii.coprocessor.endpoint.generated.IIProtos;
-import org.apache.kylin.streaming.MicroStreamBatch;
-import org.apache.kylin.streaming.ParsedStreamMessage;
-import org.apache.kylin.streaming.StreamMessage;
-import org.apache.kylin.streaming.StreamParser;
-import org.apache.kylin.streaming.StringStreamParser;
-import org.apache.kylin.streaming.invertedindex.SliceBuilder;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.google.common.base.Function;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-
-/**
- */
-public class IITest extends LocalFileMetadataTestCase {
-
-    String iiName = "test_kylin_ii_inner_join";
-    IIInstance ii;
-    IIDesc iiDesc;
-    String cubeName = "test_kylin_cube_with_slr_empty";
-
-    List<IIRow> iiRows;
-
-    final String[] inputData = new String[] { //
-    "FP-non GTC,0,15,145970,0,28,Toys,2008-10-08 07:18:40,USER_Y,Toys & Hobbies,Models & Kits,Automotive,0,Ebay,USER_S,15,Professional-Other,2012-08-16,2012-08-11,0,2012-08-16,145970,10000329,26.8551,0", //
-            "ABIN,0,-99,43479,0,21,Photo,2012-09-11 20:26:04,USER_Y,Cameras & Photo,Film Photography,Other,0,Ebay,USER_S,-99,Not Applicable,2012-08-16,2012-08-11,0,2012-08-16,43479,10000807,26.2474,0", //
-            "ABIN,0,16,80053,0,12,Computers,2012-06-19 21:15:09,USER_Y,Computers/Tablets & Networking,MonitorProjectors & Accs,Monitors,0,Ebay,USER_S,16,Consumer-Other,2012-08-16,2012-08-11,0,2012-08-16,80053,10000261,94.2273,0" };
-
-    @Before
-    public void setUp() throws Exception {
-        this.createTestMetadata();
-        this.ii = IIManager.getInstance(getTestConfig()).getII(iiName);
-        this.iiDesc = ii.getDescriptor();
-
-        List<StreamMessage> streamMessages = Lists.transform(Arrays.asList(inputData), new Function<String, StreamMessage>() {
-            @Nullable
-            @Override
-            public StreamMessage apply(String input) {
-                return new StreamMessage(System.currentTimeMillis(), input.getBytes());
-            }
-        });
-
-        List<List<String>> parsedStreamMessages = Lists.newArrayList();
-        StreamParser parser = StringStreamParser.instance;
-
-        MicroStreamBatch batch = new MicroStreamBatch(0);
-        for (StreamMessage message : streamMessages) {
-            ParsedStreamMessage parsedStreamMessage = parser.parse(message);
-            if ((parsedStreamMessage.isAccepted())) {
-                batch.add(parsedStreamMessage);
-            }
-        }
-
-        iiRows = Lists.newArrayList();
-        final Slice slice = new SliceBuilder(iiDesc, (short) 0, true).buildSlice((batch));
-        IIKeyValueCodec codec = new IIKeyValueCodec(slice.getInfo());
-        for (IIRow iiRow : codec.encodeKeyValue(slice)) {
-            iiRows.add(iiRow);
-        }
-    }
-
-    @After
-    public void after() throws Exception {
-        cleanupTestMetadata();
-    }
-
-    /**
-     * simulate stream building into slices, and encode the slice into IIRows.
-     * Then reconstruct the IIRows to slice.
-     */
-    @Test
-    public void basicTest() {
-        Queue<IIRow> buffer = Lists.newLinkedList();
-        FIFOIterable bufferIterable = new FIFOIterable(buffer);
-        TableRecordInfo info = new TableRecordInfo(iiDesc);
-        TableRecordInfoDigest digest = info.getDigest();
-        KeyValueCodec codec = new IIKeyValueCodecWithState(digest);
-        Iterator<Slice> slices = codec.decodeKeyValue(bufferIterable).iterator();
-
-        Assert.assertTrue(!slices.hasNext());
-        Assert.assertEquals(iiRows.size(), digest.getColumnCount());
-
-        for (int i = 0; i < digest.getColumnCount(); ++i) {
-            buffer.add(iiRows.get(i));
-
-            if (i != digest.getColumnCount() - 1) {
-                Assert.assertTrue(!slices.hasNext());
-            } else {
-                Assert.assertTrue(slices.hasNext());
-            }
-        }
-
-        Slice newSlice = slices.next();
-        Assert.assertEquals(newSlice.getLocalDictionaries()[0].getSize(), 2);
-    }
-
-    @Test
-    public void IIEndpointTest() {
-        TableRecordInfo info = new TableRecordInfo(ii.getDescriptor());
-        CoprocessorRowType type = CoprocessorRowType.fromTableRecordInfo(info, ii.getFirstSegment().getColumns());
-        CoprocessorProjector projector = CoprocessorProjector.makeForEndpoint(info, Collections.singletonList(ii.getDescriptor().findColumnRef("default.test_kylin_fact", "lstg_format_name")));
-
-        FunctionDesc f1 = new FunctionDesc();
-        f1.setExpression("SUM");
-        ParameterDesc p1 = new ParameterDesc();
-        p1.setType("column");
-        p1.setValue("PRICE");
-        f1.setParameter(p1);
-        f1.setReturnType("decimal(19,4)");
-
-        TblColRef column = ii.getDescriptor().findColumnRef("default.test_kylin_fact", "cal_dt");
-        CompareTupleFilter compareFilter = new CompareTupleFilter(TupleFilter.FilterOperatorEnum.GTE);
-        ColumnTupleFilter columnFilter = new ColumnTupleFilter(column);
-        compareFilter.addChild(columnFilter);
-        ConstantTupleFilter constantFilter = null;
-        constantFilter = new ConstantTupleFilter(("2012-08-16"));
-        compareFilter.addChild(constantFilter);
-
-        EndpointAggregators aggregators = EndpointAggregators.fromFunctions(info, Collections.singletonList(f1));
-        CoprocessorFilter filter = CoprocessorFilter.fromFilter(new ClearTextDictionary(info), compareFilter, FilterDecorator.FilterConstantsTreatment.AS_IT_IS);
-
-        final Iterator<IIRow> iiRowIterator = iiRows.iterator();
-
-        IIEndpoint endpoint = new IIEndpoint();
-        IIProtos.IIResponseInternal response = endpoint.getResponse(new RegionScanner() {
-            @Override
-            public HRegionInfo getRegionInfo() {
-                throw new NotImplementedException();
-            }
-
-            @Override
-            public boolean isFilterDone() throws IOException {
-                throw new NotImplementedException();
-            }
-
-            @Override
-            public boolean reseek(byte[] row) throws IOException {
-                throw new NotImplementedException();
-            }
-
-            @Override
-            public long getMaxResultSize() {
-                throw new NotImplementedException();
-
-            }
-
-            @Override
-            public long getMvccReadPoint() {
-                throw new NotImplementedException();
-            }
-
-            @Override
-            public boolean nextRaw(List<Cell> result) throws IOException {
-                if (iiRowIterator.hasNext()) {
-                    IIRow iiRow = iiRowIterator.next();
-                    result.addAll(iiRow.makeCells());
-                    return true;
-                } else {
-                    return false;
-                }
-            }
-
-            @Override
-            public boolean nextRaw(List<Cell> result, int limit) throws IOException {
-                throw new NotImplementedException();
-            }
-
-            @Override
-            public boolean next(List<Cell> results) throws IOException {
-                throw new NotImplementedException();
-            }
-
-            @Override
-            public boolean next(List<Cell> result, int limit) throws IOException {
-                throw new NotImplementedException();
-            }
-
-            @Override
-            public void close() throws IOException {
-                throw new NotImplementedException();
-            }
-        }, type, projector, aggregators, filter);
-
-        Assert.assertEquals(2, response.getRowsList().size());
-        System.out.println(response.getRowsList().size());
-        Set<String> answers = Sets.newHashSet("120.4747", "26.8551");
-        for (IIProtos.IIResponseInternal.IIRow responseRow : response.getRowsList()) {
-            byte[] measuresBytes = responseRow.getMeasures().toByteArray();
-            List<Object> metrics = aggregators.deserializeMetricValues(measuresBytes, 0);
-            Assert.assertTrue(answers.contains(metrics.get(0)));
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java b/job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java
deleted file mode 100644
index fd2eceb..0000000
--- a/job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.impl.threadpool;
-
-import java.lang.reflect.Field;
-import java.lang.reflect.Modifier;
-
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.LocalFileMetadataTestCase;
-import org.apache.kylin.job.DeployUtil;
-import org.apache.kylin.job.constant.ExecutableConstants;
-import org.apache.kylin.job.engine.JobEngineConfig;
-import org.apache.kylin.job.execution.AbstractExecutable;
-import org.apache.kylin.job.execution.ExecutableState;
-import org.apache.kylin.job.lock.MockJobLock;
-import org.apache.kylin.job.manager.ExecutableManager;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.BeforeClass;
-
-/**
- */
-public abstract class BaseSchedulerTest extends LocalFileMetadataTestCase {
-
-    private DefaultScheduler scheduler;
-
-    protected ExecutableManager jobService;
-
-    static void setFinalStatic(Field field, Object newValue) throws Exception {
-        field.setAccessible(true);
-
-        Field modifiersField = Field.class.getDeclaredField("modifiers");
-        modifiersField.setAccessible(true);
-        modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL);
-
-        field.set(null, newValue);
-    }
-
-    protected void waitForJobFinish(String jobId) {
-        while (true) {
-            AbstractExecutable job = jobService.getJob(jobId);
-            final ExecutableState status = job.getStatus();
-            if (status == ExecutableState.SUCCEED || status == ExecutableState.ERROR || status == ExecutableState.STOPPED || status == ExecutableState.DISCARDED) {
-                break;
-            } else {
-                try {
-                    Thread.sleep(5000);
-                } catch (InterruptedException e) {
-                    e.printStackTrace();
-                }
-            }
-        }
-    }
-
-    protected void waitForJobStatus(String jobId, ExecutableState state, long interval) {
-        while (true) {
-            AbstractExecutable job = jobService.getJob(jobId);
-            if (job.getStatus() == state) {
-                break;
-            } else {
-                try {
-                    Thread.sleep(interval);
-                } catch (InterruptedException e) {
-                    e.printStackTrace();
-                }
-            }
-        }
-    }
-
-    @BeforeClass
-    public static void beforeClass() {
-        System.setProperty(KylinConfig.KYLIN_CONF, "../examples/test_case_data/sandbox");
-        DeployUtil.overrideJobJarLocations();
-    }
-
-    @Before
-    public void setup() throws Exception {
-        createTestMetadata();
-        setFinalStatic(ExecutableConstants.class.getField("DEFAULT_SCHEDULER_INTERVAL_SECONDS"), 10);
-        jobService = ExecutableManager.getInstance(KylinConfig.getInstanceFromEnv());
-        scheduler = DefaultScheduler.getInstance();
-        scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv()), new MockJobLock());
-        if (!scheduler.hasStarted()) {
-            throw new RuntimeException("scheduler has not been started");
-        }
-
-    }
-
-    @After
-    public void after() throws Exception {
-        cleanupTestMetadata();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java b/job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java
deleted file mode 100644
index 7c33d02..0000000
--- a/job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.impl.threadpool;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.ScheduledFuture;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.kylin.job.BaseTestExecutable;
-import org.apache.kylin.job.ErrorTestExecutable;
-import org.apache.kylin.job.FailedTestExecutable;
-import org.apache.kylin.job.SelfStopExecutable;
-import org.apache.kylin.job.SucceedTestExecutable;
-import org.apache.kylin.job.execution.DefaultChainedExecutable;
-import org.apache.kylin.job.execution.ExecutableState;
-import org.junit.Test;
-
-/**
- */
-public class DefaultSchedulerTest extends BaseSchedulerTest {
-
-    @Test
-    public void testSingleTaskJob() throws Exception {
-        DefaultChainedExecutable job = new DefaultChainedExecutable();
-        BaseTestExecutable task1 = new SucceedTestExecutable();
-        job.addTask(task1);
-        jobService.addJob(job);
-        waitForJobFinish(job.getId());
-        assertEquals(ExecutableState.SUCCEED, jobService.getOutput(job.getId()).getState());
-        assertEquals(ExecutableState.SUCCEED, jobService.getOutput(task1.getId()).getState());
-    }
-
-    @Test
-    public void testSucceed() throws Exception {
-        DefaultChainedExecutable job = new DefaultChainedExecutable();
-        BaseTestExecutable task1 = new SucceedTestExecutable();
-        BaseTestExecutable task2 = new SucceedTestExecutable();
-        job.addTask(task1);
-        job.addTask(task2);
-        jobService.addJob(job);
-        waitForJobFinish(job.getId());
-        assertEquals(ExecutableState.SUCCEED, jobService.getOutput(job.getId()).getState());
-        assertEquals(ExecutableState.SUCCEED, jobService.getOutput(task1.getId()).getState());
-        assertEquals(ExecutableState.SUCCEED, jobService.getOutput(task2.getId()).getState());
-    }
-
-    @Test
-    public void testSucceedAndFailed() throws Exception {
-        DefaultChainedExecutable job = new DefaultChainedExecutable();
-        BaseTestExecutable task1 = new SucceedTestExecutable();
-        BaseTestExecutable task2 = new FailedTestExecutable();
-        job.addTask(task1);
-        job.addTask(task2);
-        jobService.addJob(job);
-        waitForJobFinish(job.getId());
-        assertEquals(ExecutableState.ERROR, jobService.getOutput(job.getId()).getState());
-        assertEquals(ExecutableState.SUCCEED, jobService.getOutput(task1.getId()).getState());
-        assertEquals(ExecutableState.ERROR, jobService.getOutput(task2.getId()).getState());
-    }
-
-    @Test
-    public void testSucceedAndError() throws Exception {
-        DefaultChainedExecutable job = new DefaultChainedExecutable();
-        BaseTestExecutable task1 = new ErrorTestExecutable();
-        BaseTestExecutable task2 = new SucceedTestExecutable();
-        job.addTask(task1);
-        job.addTask(task2);
-        jobService.addJob(job);
-        waitForJobFinish(job.getId());
-        assertEquals(ExecutableState.ERROR, jobService.getOutput(job.getId()).getState());
-        assertEquals(ExecutableState.ERROR, jobService.getOutput(task1.getId()).getState());
-        assertEquals(ExecutableState.READY, jobService.getOutput(task2.getId()).getState());
-    }
-
-    @Test
-    public void testDiscard() throws Exception {
-        DefaultChainedExecutable job = new DefaultChainedExecutable();
-        BaseTestExecutable task1 = new SelfStopExecutable();
-        job.addTask(task1);
-        jobService.addJob(job);
-        waitForJobStatus(job.getId(), ExecutableState.RUNNING, 500);
-        jobService.discardJob(job.getId());
-        waitForJobFinish(job.getId());
-        assertEquals(ExecutableState.DISCARDED, jobService.getOutput(job.getId()).getState());
-        assertEquals(ExecutableState.DISCARDED, jobService.getOutput(task1.getId()).getState());
-        Thread.sleep(5000);
-        System.out.println(job);
-    }
-
-    @Test
-    public void testSchedulerPool() throws InterruptedException {
-        ScheduledExecutorService fetchPool = Executors.newScheduledThreadPool(1);
-        final CountDownLatch countDownLatch = new CountDownLatch(3);
-        ScheduledFuture future = fetchPool.scheduleAtFixedRate(new Runnable() {
-            @Override
-            public void run() {
-                countDownLatch.countDown();
-            }
-        }, 5, 5, TimeUnit.SECONDS);
-        assertTrue("countDownLatch should reach zero in 15 secs", countDownLatch.await(20, TimeUnit.SECONDS));
-        assertTrue("future should still running", future.cancel(true));
-
-        final CountDownLatch countDownLatch2 = new CountDownLatch(3);
-        ScheduledFuture future2 = fetchPool.scheduleAtFixedRate(new Runnable() {
-            @Override
-            public void run() {
-                countDownLatch2.countDown();
-                throw new RuntimeException();
-            }
-        }, 5, 5, TimeUnit.SECONDS);
-        assertFalse("countDownLatch2 should NOT reach zero in 15 secs", countDownLatch2.await(20, TimeUnit.SECONDS));
-        assertFalse("future2 should has been stopped", future2.cancel(true));
-
-        final CountDownLatch countDownLatch3 = new CountDownLatch(3);
-        ScheduledFuture future3 = fetchPool.scheduleAtFixedRate(new Runnable() {
-            @Override
-            public void run() {
-                try {
-                    countDownLatch3.countDown();
-                    throw new RuntimeException();
-                } catch (Exception e) {
-                }
-            }
-        }, 5, 5, TimeUnit.SECONDS);
-        assertTrue("countDownLatch3 should reach zero in 15 secs", countDownLatch3.await(20, TimeUnit.SECONDS));
-        assertTrue("future3 should still running", future3.cancel(true));
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/job/streaming/CubeStreamConsumerTest.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/job/streaming/CubeStreamConsumerTest.java b/job/src/test/java/org/apache/kylin/job/streaming/CubeStreamConsumerTest.java
deleted file mode 100644
index be4fa26..0000000
--- a/job/src/test/java/org/apache/kylin/job/streaming/CubeStreamConsumerTest.java
+++ /dev/null
@@ -1,90 +0,0 @@
-package org.apache.kylin.job.streaming;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.util.Arrays;
-import java.util.List;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-import java.util.concurrent.LinkedBlockingDeque;
-
-import org.apache.hadoop.util.StringUtils;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.AbstractKylinTestCase;
-import org.apache.kylin.common.util.ClassUtil;
-import org.apache.kylin.cube.CubeInstance;
-import org.apache.kylin.cube.CubeManager;
-import org.apache.kylin.cube.CubeSegment;
-import org.apache.kylin.cube.CubeUpdate;
-import org.apache.kylin.job.DeployUtil;
-import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
-import org.apache.kylin.streaming.StreamBuilder;
-import org.apache.kylin.streaming.StreamMessage;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.Lists;
-
-/**
- */
-@Ignore
-public class CubeStreamConsumerTest {
-
-    private static final Logger logger = LoggerFactory.getLogger(CubeStreamConsumerTest.class);
-
-    private KylinConfig kylinConfig;
-
-    private static final String CUBE_NAME = "test_kylin_cube_without_slr_left_join_ready";
-
-    @BeforeClass
-    public static void beforeClass() throws Exception {
-        ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
-        System.setProperty("hdp.version", "2.2.0.0-2041"); // mapred-site.xml ref this
-    }
-
-    @Before
-    public void before() throws Exception {
-        HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA);
-
-        kylinConfig = KylinConfig.getInstanceFromEnv();
-        DeployUtil.initCliWorkDir();
-        DeployUtil.deployMetadata();
-        DeployUtil.overrideJobJarLocations();
-        final CubeInstance cube = CubeManager.getInstance(kylinConfig).getCube(CUBE_NAME);
-        CubeUpdate cubeBuilder = new CubeUpdate(cube);
-        cubeBuilder.setToRemoveSegs(cube.getSegments().toArray(new CubeSegment[cube.getSegments().size()]));
-        // remove all existing segments
-        CubeManager.getInstance(kylinConfig).updateCube(cubeBuilder);
-
-    }
-
-    @Test
-    public void test() throws Exception {
-        LinkedBlockingDeque<StreamMessage> queue = new LinkedBlockingDeque<>();
-        List<BlockingQueue<StreamMessage>> queues = Lists.newArrayList();
-        queues.add(queue);
-        StreamBuilder cubeStreamBuilder = StreamBuilder.newPeriodicalStreamBuilder(CUBE_NAME, queues, new CubeStreamConsumer(CUBE_NAME), System.currentTimeMillis(), 30L * 1000);
-        final Future<?> future = Executors.newSingleThreadExecutor().submit(cubeStreamBuilder);
-        loadDataFromLocalFile(queue, 100000);
-        future.get();
-    }
-
-    private void loadDataFromLocalFile(BlockingQueue<StreamMessage> queue, final int maxCount) throws IOException, InterruptedException {
-        BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream("../table.txt")));
-        String line;
-        int count = 0;
-        while ((line = br.readLine()) != null && count++ < maxCount) {
-            final List<String> strings = Arrays.asList(line.split("\t"));
-            queue.put(new StreamMessage(System.currentTimeMillis(), StringUtils.join(",", strings).getBytes()));
-        }
-        queue.put(StreamMessage.EOF);
-    }
-}


[8/9] incubator-kylin git commit: KYLIN-1010 Decompose project job

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/src/test/java/org/apache/kylin/job/dataGen/ColumnConfig.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/dataGen/ColumnConfig.java b/assembly/src/test/java/org/apache/kylin/job/dataGen/ColumnConfig.java
new file mode 100644
index 0000000..44ba8f4
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/job/dataGen/ColumnConfig.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.dataGen;
+
+import java.util.ArrayList;
+
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+/**
+ */
+@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE)
+public class ColumnConfig {
+    @JsonProperty("columnName")
+    private String columnName;
+    @JsonProperty("valueSet")
+    private ArrayList<String> valueSet;
+    @JsonProperty("exclusive")
+    private boolean exclusive;
+    @JsonProperty("asRange")
+    private boolean asRange;
+
+    public boolean isAsRange() {
+        return asRange;
+    }
+
+    public void setAsRange(boolean asRange) {
+        this.asRange = asRange;
+    }
+
+    public boolean isExclusive() {
+        return exclusive;
+    }
+
+    public void setExclusive(boolean exclusive) {
+        this.exclusive = exclusive;
+    }
+
+    public String getColumnName() {
+        return columnName;
+    }
+
+    public void setColumnName(String columnName) {
+        this.columnName = columnName;
+    }
+
+    public ArrayList<String> getValueSet() {
+        return valueSet;
+    }
+
+    public void setValueSet(ArrayList<String> valueSet) {
+        this.valueSet = valueSet;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/src/test/java/org/apache/kylin/job/dataGen/FactTableGenerator.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/dataGen/FactTableGenerator.java b/assembly/src/test/java/org/apache/kylin/job/dataGen/FactTableGenerator.java
new file mode 100644
index 0000000..a965753
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/job/dataGen/FactTableGenerator.java
@@ -0,0 +1,647 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.dataGen;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.persistence.ResourceStore;
+import org.apache.kylin.common.util.Array;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.cube.model.CubeDesc;
+import org.apache.kylin.cube.model.DimensionDesc;
+import org.apache.kylin.metadata.MetadataManager;
+import org.apache.kylin.metadata.model.ColumnDesc;
+import org.apache.kylin.metadata.model.DataType;
+import org.apache.kylin.metadata.model.JoinDesc;
+import org.apache.kylin.metadata.model.MeasureDesc;
+import org.apache.kylin.metadata.model.TblColRef;
+
+/**
+ */
+public class FactTableGenerator {
+    CubeInstance cube = null;
+    CubeDesc desc = null;
+    ResourceStore store = null;
+    String factTableName = null;
+
+    GenConfig genConf = null;
+
+    Random r = null;
+
+    String cubeName;
+    long randomSeed;
+    int rowCount;
+    int unlinkableRowCount;
+    int unlinkableRowCountMax;
+    double conflictRatio;
+    double linkableRatio;
+
+    // the names of lookup table columns which is in relation with fact
+    // table(appear as fk in fact table)
+    TreeMap<String, LinkedList<String>> lookupTableKeys = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
+
+    // possible values of lookupTableKeys, extracted from existing lookup
+    // tables.
+    // The key is in the format of tablename/columnname
+    TreeMap<String, ArrayList<String>> feasibleValues = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
+
+    // lookup table name -> sets of all composite keys
+    TreeMap<String, HashSet<Array<String>>> lookupTableCompositeKeyValues = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
+
+    private void init(String cubeName, int rowCount, double conflictRaio, double linkableRatio, long randomSeed) {
+        this.rowCount = rowCount;
+        this.conflictRatio = conflictRaio;
+        this.cubeName = cubeName;
+        this.randomSeed = randomSeed;
+        this.linkableRatio = linkableRatio;
+
+        this.unlinkableRowCountMax = (int) (this.rowCount * (1 - linkableRatio));
+        this.unlinkableRowCount = 0;
+
+        r = new Random(randomSeed);
+
+        KylinConfig config = KylinConfig.getInstanceFromEnv();
+        cube = CubeManager.getInstance(config).getCube(cubeName);
+        desc = cube.getDescriptor();
+        factTableName = desc.getFactTable();
+        store = ResourceStore.getStore(config);
+    }
+
+    /*
+     * users can specify the value preference for each column
+     */
+    private void loadConfig() {
+        try {
+            InputStream configStream = null;
+            configStream = store.getResource("/data/data_gen_config.json");
+            this.genConf = GenConfig.loadConfig(configStream);
+
+            if (configStream != null)
+                configStream.close();
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private void loadLookupTableValues(String lookupTableName, LinkedList<String> columnNames, int distinctRowCount) throws Exception {
+        KylinConfig config = KylinConfig.getInstanceFromEnv();
+
+        // only deal with composite keys
+        if (columnNames.size() > 1 && !lookupTableCompositeKeyValues.containsKey(lookupTableName)) {
+            lookupTableCompositeKeyValues.put(lookupTableName, new HashSet<Array<String>>());
+        }
+
+        InputStream tableStream = null;
+        BufferedReader tableReader = null;
+        try {
+            TreeMap<String, Integer> zeroBasedInice = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
+            for (String columnName : columnNames) {
+                ColumnDesc cDesc = MetadataManager.getInstance(config).getTableDesc(lookupTableName).findColumnByName(columnName);
+                zeroBasedInice.put(columnName, cDesc.getZeroBasedIndex());
+            }
+
+            String path = "/data/" + lookupTableName + ".csv";
+            tableStream = store.getResource(path);
+            tableReader = new BufferedReader(new InputStreamReader(tableStream));
+            tableReader.mark(0);
+            int rowCount = 0;
+            int curRowNum = 0;
+            String curRow;
+
+            while (tableReader.readLine() != null)
+                rowCount++;
+
+            HashSet<Integer> rows = new HashSet<Integer>();
+            distinctRowCount = (distinctRowCount < rowCount) ? distinctRowCount : rowCount;
+            while (rows.size() < distinctRowCount) {
+                rows.add(r.nextInt(rowCount));
+            }
+
+            // reopen the stream
+            tableReader.close();
+            tableStream.close();
+            tableStream = null;
+            tableReader = null;
+
+            tableStream = store.getResource(path);
+            tableReader = new BufferedReader(new InputStreamReader(tableStream));
+
+            while ((curRow = tableReader.readLine()) != null) {
+                if (rows.contains(curRowNum)) {
+                    String[] tokens = curRow.split(",");
+
+                    String[] comboKeys = null;
+                    int index = 0;
+                    if (columnNames.size() > 1)
+                        comboKeys = new String[columnNames.size()];
+
+                    for (String columnName : columnNames) {
+                        int zeroBasedIndex = zeroBasedInice.get(columnName);
+                        if (!feasibleValues.containsKey(lookupTableName + "/" + columnName))
+                            feasibleValues.put(lookupTableName + "/" + columnName, new ArrayList<String>());
+                        feasibleValues.get(lookupTableName + "/" + columnName).add(tokens[zeroBasedIndex]);
+
+                        if (columnNames.size() > 1) {
+                            comboKeys[index] = tokens[zeroBasedIndex];
+                            index++;
+                        }
+                    }
+
+                    if (columnNames.size() > 1) {
+                        Array<String> wrap = new Array<String>(comboKeys);
+                        if (lookupTableCompositeKeyValues.get(lookupTableName).contains(wrap)) {
+                            throw new Exception("The composite key already exist in the lookup table");
+                        }
+                        lookupTableCompositeKeyValues.get(lookupTableName).add(wrap);
+                    }
+                }
+                curRowNum++;
+            }
+
+            if (tableStream != null)
+                tableStream.close();
+            if (tableReader != null)
+                tableReader.close();
+
+        } catch (IOException e) {
+            e.printStackTrace();
+            System.exit(1);
+        }
+    }
+
+    // prepare the candidate values for each joined column
+    private void prepare() throws Exception {
+        // load config
+        loadConfig();
+
+        TreeSet<String> factTableColumns = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
+
+        for (DimensionDesc dim : desc.getDimensions()) {
+            for (TblColRef col : dim.getColumnRefs()) {
+                if (col.getTable().equals(factTableName))
+                    factTableColumns.add(col.getName());
+            }
+
+            JoinDesc join = dim.getJoin();
+            if (join != null) {
+                String lookupTable = dim.getTable();
+                for (String column : join.getPrimaryKey()) {
+                    if (!lookupTableKeys.containsKey(lookupTable)) {
+                        lookupTableKeys.put(lookupTable, new LinkedList<String>());
+                    }
+
+                    if (!lookupTableKeys.get(lookupTable).contains(column))
+                        lookupTableKeys.get(lookupTable).add(column);
+                }
+            }
+        }
+
+        int distinctRowCount = (int) (this.rowCount / this.conflictRatio);
+        distinctRowCount = (distinctRowCount == 0) ? 1 : distinctRowCount;
+        // lookup tables
+        for (String lookupTable : lookupTableKeys.keySet()) {
+            this.loadLookupTableValues(lookupTable, lookupTableKeys.get(lookupTable), distinctRowCount);
+        }
+    }
+
+    private List<DimensionDesc> getSortedDimentsionDescs() {
+        List<DimensionDesc> dimensions = desc.getDimensions();
+        Collections.sort(dimensions, new Comparator<DimensionDesc>() {
+            @Override
+            public int compare(DimensionDesc o1, DimensionDesc o2) {
+                JoinDesc j1 = o2.getJoin();
+                JoinDesc j2 = o1.getJoin();
+                return Integer.valueOf(j1 != null ? j1.getPrimaryKey().length : 0).compareTo(j2 != null ? j2.getPrimaryKey().length : 0);
+            }
+        });
+        return dimensions;
+    }
+
+    /**
+     * Generate the fact table and return it as text
+     *
+     * @return
+     * @throws Exception
+     */
+    private String cookData() throws Exception {
+        // the columns on the fact table can be classified into three groups:
+        // 1. foreign keys
+        TreeMap<String, String> factTableCol2LookupCol = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
+        // 2. metrics or directly used dimensions
+        TreeSet<String> usedCols = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
+        // 3. others, not referenced anywhere
+
+        TreeMap<String, String> lookupCol2factTableCol = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
+
+        // find fact table columns in fks
+        List<DimensionDesc> dimensions = getSortedDimentsionDescs();
+        for (DimensionDesc dim : dimensions) {
+            JoinDesc jDesc = dim.getJoin();
+            if (jDesc != null) {
+                String[] fks = jDesc.getForeignKey();
+                String[] pks = jDesc.getPrimaryKey();
+                int num = fks.length;
+                for (int i = 0; i < num; ++i) {
+                    String value = dim.getTable() + "/" + pks[i];
+
+                    lookupCol2factTableCol.put(value, fks[i]);
+
+                    if (factTableCol2LookupCol.containsKey(fks[i])) {
+                        if (!factTableCol2LookupCol.get(fks[i]).equals(value)) {
+                            System.out.println("Warning: Disambiguation on the mapping of column " + fks[i] + ", " + factTableCol2LookupCol.get(fks[i]) + "(chosen) or " + value);
+                            continue;
+                        }
+                    }
+                    factTableCol2LookupCol.put(fks[i], value);
+                }
+            }
+            //else, deal with it in next roung
+        }
+
+        // find fact table columns in direct dimension
+        // DO NOT merge this with the previous loop
+        for (DimensionDesc dim : dimensions) {
+            JoinDesc jDesc = dim.getJoin();
+            if (jDesc == null) {
+                // column on fact table used directly as a dimension
+                for (String aColumn : dim.getColumn()) {
+                    if (!factTableCol2LookupCol.containsKey(aColumn))
+                        usedCols.add(aColumn);
+                }
+            }
+        }
+
+        // find fact table columns in measures
+        for (MeasureDesc mDesc : desc.getMeasures()) {
+            List<TblColRef> pcols = mDesc.getFunction().getParameter().getColRefs();
+            if (pcols != null) {
+                for (TblColRef col : pcols) {
+                    if (!factTableCol2LookupCol.containsKey(col.getName()))
+                        usedCols.add(col.getName());
+                }
+            }
+        }
+
+        return createTable(this.rowCount, factTableCol2LookupCol, lookupCol2factTableCol, usedCols);
+    }
+
+    private String normToTwoDigits(int v) {
+        if (v < 10)
+            return "0" + v;
+        else
+            return Integer.toString(v);
+    }
+
+    private String randomPick(ArrayList<String> candidates) {
+        int index = r.nextInt(candidates.size());
+        return candidates.get(index);
+    }
+
+    private String createRandomCell(ColumnDesc cDesc, ArrayList<String> range) throws Exception {
+        DataType type = cDesc.getType();
+        if (type.isStringFamily()) {
+            throw new Exception("Can't handle range values for string");
+
+        } else if (type.isIntegerFamily()) {
+            int low = Integer.parseInt(range.get(0));
+            int high = Integer.parseInt(range.get(1));
+            return Integer.toString(r.nextInt(high - low) + low);
+
+        } else if (type.isDouble()) {
+            double low = Double.parseDouble(range.get(0));
+            double high = Double.parseDouble(range.get(1));
+            return String.format("%.4f", r.nextDouble() * (high - low) + low);
+
+        } else if (type.isFloat()) {
+            float low = Float.parseFloat(range.get(0));
+            float high = Float.parseFloat(range.get(1));
+            return String.format("%.4f", r.nextFloat() * (high - low) + low);
+
+        } else if (type.isDecimal()) {
+            double low = Double.parseDouble(range.get(0));
+            double high = Double.parseDouble(range.get(1));
+            return String.format("%.4f", r.nextDouble() * (high - low) + low);
+
+        } else if (type.isDateTimeFamily()) {
+            if (!type.isDate()) {
+                throw new RuntimeException("Does not support " + type);
+            }
+
+            SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
+            Date start = format.parse(range.get(0));
+            Date end = format.parse(range.get(1));
+            long diff = end.getTime() - start.getTime();
+            Date temp = new Date(start.getTime() + (long) (diff * r.nextDouble()));
+            Calendar cal = Calendar.getInstance();
+            cal.setTime(temp);
+            // first day
+            cal.set(Calendar.DAY_OF_WEEK, cal.getFirstDayOfWeek());
+
+            return cal.get(Calendar.YEAR) + "-" + normToTwoDigits(cal.get(Calendar.MONTH) + 1) + "-" + normToTwoDigits(cal.get(Calendar.DAY_OF_MONTH));
+        } else {
+            System.out.println("The data type " + type + "is not recognized");
+            System.exit(1);
+        }
+        return null;
+    }
+
+    private String createRandomCell(ColumnDesc cDesc) {
+        String type = cDesc.getTypeName();
+        String s = type.toLowerCase();
+        if (s.equals("string") || s.equals("char") || s.equals("varchar")) {
+            StringBuilder sb = new StringBuilder();
+            for (int i = 0; i < 2; i++) {
+                sb.append((char) ('a' + r.nextInt(10)));// there are 10*10
+                // possible strings
+            }
+            return sb.toString();
+        } else if (s.equals("bigint") || s.equals("int") || s.equals("tinyint") || s.equals("smallint")) {
+            return Integer.toString(r.nextInt(128));
+        } else if (s.equals("double")) {
+            return String.format("%.4f", r.nextDouble() * 100);
+        } else if (s.equals("float")) {
+            return String.format("%.4f", r.nextFloat() * 100);
+        } else if (s.equals("decimal")) {
+            return String.format("%.4f", r.nextDouble() * 100);
+        } else if (s.equals("date")) {
+            long date20131231 = 61349312153265L;
+            long date20010101 = 60939158400000L;
+            long diff = date20131231 - date20010101;
+            Date temp = new Date(date20010101 + (long) (diff * r.nextDouble()));
+            Calendar cal = Calendar.getInstance();
+            cal.setTime(temp);
+            // first day
+            cal.set(Calendar.DAY_OF_WEEK, cal.getFirstDayOfWeek());
+
+            return cal.get(Calendar.YEAR) + "-" + normToTwoDigits(cal.get(Calendar.MONTH) + 1) + "-" + normToTwoDigits(cal.get(Calendar.DAY_OF_MONTH));
+        } else {
+            System.out.println("The data type " + type + "is not recognized");
+            System.exit(1);
+        }
+        return null;
+    }
+
+    private String createDefaultsCell(String type) {
+        String s = type.toLowerCase();
+        if (s.equals("string") || s.equals("char") || s.equals("varchar")) {
+            return "abcde";
+        } else if (s.equals("bigint") || s.equals("int") || s.equals("tinyint") || s.equals("smallint")) {
+            return "0";
+        } else if (s.equals("double")) {
+            return "0";
+        } else if (s.equals("float")) {
+            return "0";
+        } else if (s.equals("decimal")) {
+            return "0";
+        } else if (s.equals("date")) {
+            return "1970-01-01";
+        } else {
+            System.out.println("The data type " + type + "is not recognized");
+            System.exit(1);
+        }
+        return null;
+    }
+
+    private void printColumnMappings(TreeMap<String, String> factTableCol2LookupCol, TreeSet<String> usedCols, TreeSet<String> defaultColumns) {
+
+        System.out.println("=======================================================================");
+        System.out.format("%-30s %s", "FACT_TABLE_COLUMN", "MAPPING");
+        System.out.println();
+        System.out.println();
+        for (Map.Entry<String, String> entry : factTableCol2LookupCol.entrySet()) {
+            System.out.format("%-30s %s", entry.getKey(), entry.getValue());
+            System.out.println();
+        }
+        for (String key : usedCols) {
+            System.out.format("%-30s %s", key, "Random Values");
+            System.out.println();
+        }
+        for (String key : defaultColumns) {
+            System.out.format("%-30s %s", key, "Default Values");
+            System.out.println();
+        }
+        System.out.println("=======================================================================");
+
+        System.out.println("Parameters:");
+        System.out.println();
+        System.out.println("CubeName:        " + cubeName);
+        System.out.println("RowCount:        " + rowCount);
+        System.out.println("ConflictRatio:   " + conflictRatio);
+        System.out.println("LinkableRatio:   " + linkableRatio);
+        System.out.println("Seed:            " + randomSeed);
+        System.out.println();
+        System.out.println("The number of actual unlinkable fact rows is: " + this.unlinkableRowCount);
+        System.out.println("You can vary the above parameters to generate different datasets.");
+        System.out.println();
+    }
+
+    // Any row in the column must finally appear in the flatten big table.
+    // for single-column joins the generated row is guaranteed to have a match
+    // in lookup table
+    // for composite keys we'll need an extra check
+    private boolean matchAllCompositeKeys(TreeMap<String, String> lookupCol2FactTableCol, LinkedList<String> columnValues) {
+        KylinConfig config = KylinConfig.getInstanceFromEnv();
+
+        for (String lookupTable : lookupTableKeys.keySet()) {
+            if (lookupTableKeys.get(lookupTable).size() == 1)
+                continue;
+
+            String[] comboKey = new String[lookupTableKeys.get(lookupTable).size()];
+            int index = 0;
+            for (String column : lookupTableKeys.get(lookupTable)) {
+                String key = lookupTable + "/" + column;
+                String factTableCol = lookupCol2FactTableCol.get(key);
+                int cardinal = MetadataManager.getInstance(config).getTableDesc(factTableName).findColumnByName(factTableCol).getZeroBasedIndex();
+                comboKey[index] = columnValues.get(cardinal);
+
+                index++;
+            }
+            Array<String> wrap = new Array<String>(comboKey);
+            if (!lookupTableCompositeKeyValues.get(lookupTable).contains(wrap)) {
+                // System.out.println("Try " + wrap + " Failed, continue...");
+                return false;
+            }
+        }
+        return true;
+    }
+
+    private String createCell(ColumnDesc cDesc) throws Exception {
+        ColumnConfig cConfig = null;
+
+        if ((cConfig = genConf.getColumnConfigByName(cDesc.getName())) == null) {
+            // if the column is not configured, use random values
+            return (createRandomCell(cDesc));
+
+        } else {
+            // the column has a configuration
+            if (!cConfig.isAsRange() && !cConfig.isExclusive() && r.nextBoolean()) {
+                // if the column still allows random values
+                return (createRandomCell(cDesc));
+
+            } else {
+                // use specified values
+                ArrayList<String> valueSet = cConfig.getValueSet();
+                if (valueSet == null || valueSet.size() == 0)
+                    throw new Exception("Did you forget to specify value set for " + cDesc.getName());
+
+                if (!cConfig.isAsRange()) {
+                    return (randomPick(valueSet));
+                } else {
+                    if (valueSet.size() != 2)
+                        throw new Exception("Only two values can be set for range values, the column: " + cDesc.getName());
+
+                    return (createRandomCell(cDesc, valueSet));
+                }
+            }
+
+        }
+    }
+
+    private LinkedList<String> createRow(TreeMap<String, String> factTableCol2LookupCol, TreeSet<String> usedCols, TreeSet<String> defaultColumns) throws Exception {
+        KylinConfig config = KylinConfig.getInstanceFromEnv();
+        LinkedList<String> columnValues = new LinkedList<String>();
+
+        for (ColumnDesc cDesc : MetadataManager.getInstance(config).getTableDesc(factTableName).getColumns()) {
+
+            String colName = cDesc.getName();
+
+            if (factTableCol2LookupCol.containsKey(colName)) {
+
+                // if the current column is a fk column in fact table
+                ArrayList<String> candidates = this.feasibleValues.get(factTableCol2LookupCol.get(colName));
+
+                columnValues.add(candidates.get(r.nextInt(candidates.size())));
+            } else if (usedCols.contains(colName)) {
+
+                // if the current column is a metric column in fact table
+                columnValues.add(createCell(cDesc));
+            } else {
+
+                // otherwise this column is not useful in OLAP
+                columnValues.add(createDefaultsCell(cDesc.getTypeName()));
+                defaultColumns.add(colName);
+            }
+        }
+
+        return columnValues;
+    }
+
+    /**
+     * return the text of table contents(one line one row)
+     *
+     * @param rowCount
+     * @param factTableCol2LookupCol
+     * @param lookupCol2FactTableCol
+     * @param usedCols
+     * @return
+     * @throws Exception
+     */
+    private String createTable(int rowCount, TreeMap<String, String> factTableCol2LookupCol, TreeMap<String, String> lookupCol2FactTableCol, TreeSet<String> usedCols) throws Exception {
+        try {
+            TreeSet<String> defaultColumns = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
+
+            StringBuffer sb = new StringBuffer();
+            for (int i = 0; i < rowCount;) {
+
+                LinkedList<String> columnValues = createRow(factTableCol2LookupCol, usedCols, defaultColumns);
+
+                if (!matchAllCompositeKeys(lookupCol2FactTableCol, columnValues)) {
+                    if (unlinkableRowCount < unlinkableRowCountMax) {
+                        unlinkableRowCount++;
+                    } else {
+                        continue;
+                    }
+                }
+
+                for (String c : columnValues)
+                    sb.append(c + ",");
+                sb.deleteCharAt(sb.length() - 1);
+                sb.append(System.getProperty("line.separator"));
+
+                i++;
+
+                // System.out.println("Just generated the " + i + "th record");
+            }
+
+            printColumnMappings(factTableCol2LookupCol, usedCols, defaultColumns);
+
+            return sb.toString();
+
+        } catch (IOException e) {
+            e.printStackTrace();
+            System.exit(1);
+        }
+
+        return null;
+    }
+
+    /**
+     * Randomly create a fact table and return the table content
+     *
+     * @param cubeName      name of the cube
+     * @param rowCount      expected row count generated
+     * @param linkableRatio the percentage of fact table rows that can be linked with all
+     *                      lookup table by INNER join
+     * @param randomSeed    random seed
+     */
+    public static String generate(String cubeName, String rowCount, String linkableRatio, String randomSeed) throws Exception {
+
+        if (rowCount == null)
+            rowCount = "10000";
+        if (linkableRatio == null)
+            linkableRatio = "0.6";
+
+        //if (randomSeed == null)
+        // don't give it value
+
+        // String conflictRatio = "5";//this parameter do not allow configuring
+        // any more
+
+        FactTableGenerator generator = new FactTableGenerator();
+        long seed;
+        if (randomSeed != null) {
+            seed = Long.parseLong(randomSeed);
+        } else {
+            Random r = new Random();
+            seed = r.nextLong();
+        }
+
+        generator.init(cubeName, Integer.parseInt(rowCount), 5, Double.parseDouble(linkableRatio), seed);
+        generator.prepare();
+        return generator.cookData();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/src/test/java/org/apache/kylin/job/dataGen/GenConfig.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/dataGen/GenConfig.java b/assembly/src/test/java/org/apache/kylin/job/dataGen/GenConfig.java
new file mode 100644
index 0000000..c58cfb6
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/job/dataGen/GenConfig.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.dataGen;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+
+import org.apache.kylin.common.util.JsonUtil;
+
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+
+/**
+ */
+@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE)
+public class GenConfig {
+
+    @JsonProperty("columnConfigs")
+    private ArrayList<ColumnConfig> columnConfigs;
+
+    private HashMap<String, ColumnConfig> cache = new HashMap<String, ColumnConfig>();
+
+    public ArrayList<ColumnConfig> getColumnConfigs() {
+        return columnConfigs;
+    }
+
+    public void setColumnConfigs(ArrayList<ColumnConfig> columnConfigs) {
+        this.columnConfigs = columnConfigs;
+    }
+
+    public ColumnConfig getColumnConfigByName(String columnName) {
+        columnName = columnName.toLowerCase();
+
+        if (cache.containsKey(columnName))
+            return cache.get(columnName);
+
+        for (ColumnConfig cConfig : columnConfigs) {
+            if (cConfig.getColumnName().toLowerCase().equals(columnName)) {
+                cache.put(columnName, cConfig);
+                return cConfig;
+            }
+        }
+        cache.put(columnName, null);
+        return null;
+    }
+
+    public static GenConfig loadConfig(InputStream stream) {
+        try {
+            GenConfig config = JsonUtil.readValue(stream, GenConfig.class);
+            return config;
+        } catch (JsonMappingException e) {
+            e.printStackTrace();
+        } catch (JsonParseException e) {
+            e.printStackTrace();
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+
+        return null;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/src/test/java/org/apache/kylin/job/hadoop/invertedindex/IITest.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/hadoop/invertedindex/IITest.java b/assembly/src/test/java/org/apache/kylin/job/hadoop/invertedindex/IITest.java
new file mode 100644
index 0000000..dcd460b
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/job/hadoop/invertedindex/IITest.java
@@ -0,0 +1,240 @@
+package org.apache.kylin.job.hadoop.invertedindex;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Queue;
+import java.util.Set;
+
+import javax.annotation.Nullable;
+
+import org.apache.commons.lang.NotImplementedException;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.regionserver.RegionScanner;
+import org.apache.kylin.common.util.FIFOIterable;
+import org.apache.kylin.common.util.LocalFileMetadataTestCase;
+import org.apache.kylin.invertedindex.IIInstance;
+import org.apache.kylin.invertedindex.IIManager;
+import org.apache.kylin.invertedindex.index.Slice;
+import org.apache.kylin.invertedindex.index.TableRecordInfo;
+import org.apache.kylin.invertedindex.index.TableRecordInfoDigest;
+import org.apache.kylin.invertedindex.model.IIDesc;
+import org.apache.kylin.invertedindex.model.IIKeyValueCodec;
+import org.apache.kylin.invertedindex.model.IIKeyValueCodecWithState;
+import org.apache.kylin.invertedindex.model.IIRow;
+import org.apache.kylin.invertedindex.model.KeyValueCodec;
+import org.apache.kylin.metadata.filter.ColumnTupleFilter;
+import org.apache.kylin.metadata.filter.CompareTupleFilter;
+import org.apache.kylin.metadata.filter.ConstantTupleFilter;
+import org.apache.kylin.metadata.filter.TupleFilter;
+import org.apache.kylin.metadata.model.FunctionDesc;
+import org.apache.kylin.metadata.model.ParameterDesc;
+import org.apache.kylin.metadata.model.TblColRef;
+import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorFilter;
+import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorProjector;
+import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorRowType;
+import org.apache.kylin.storage.hbase.common.coprocessor.FilterDecorator;
+import org.apache.kylin.storage.hbase.ii.coprocessor.endpoint.ClearTextDictionary;
+import org.apache.kylin.storage.hbase.ii.coprocessor.endpoint.EndpointAggregators;
+import org.apache.kylin.storage.hbase.ii.coprocessor.endpoint.IIEndpoint;
+import org.apache.kylin.storage.hbase.ii.coprocessor.endpoint.generated.IIProtos;
+import org.apache.kylin.streaming.MicroStreamBatch;
+import org.apache.kylin.streaming.ParsedStreamMessage;
+import org.apache.kylin.streaming.StreamMessage;
+import org.apache.kylin.streaming.StreamParser;
+import org.apache.kylin.streaming.StringStreamParser;
+import org.apache.kylin.streaming.invertedindex.SliceBuilder;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.common.base.Function;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+
+/**
+ */
+public class IITest extends LocalFileMetadataTestCase {
+
+    String iiName = "test_kylin_ii_inner_join";
+    IIInstance ii;
+    IIDesc iiDesc;
+    String cubeName = "test_kylin_cube_with_slr_empty";
+
+    List<IIRow> iiRows;
+
+    final String[] inputData = new String[] { //
+    "FP-non GTC,0,15,145970,0,28,Toys,2008-10-08 07:18:40,USER_Y,Toys & Hobbies,Models & Kits,Automotive,0,Ebay,USER_S,15,Professional-Other,2012-08-16,2012-08-11,0,2012-08-16,145970,10000329,26.8551,0", //
+            "ABIN,0,-99,43479,0,21,Photo,2012-09-11 20:26:04,USER_Y,Cameras & Photo,Film Photography,Other,0,Ebay,USER_S,-99,Not Applicable,2012-08-16,2012-08-11,0,2012-08-16,43479,10000807,26.2474,0", //
+            "ABIN,0,16,80053,0,12,Computers,2012-06-19 21:15:09,USER_Y,Computers/Tablets & Networking,MonitorProjectors & Accs,Monitors,0,Ebay,USER_S,16,Consumer-Other,2012-08-16,2012-08-11,0,2012-08-16,80053,10000261,94.2273,0" };
+
+    @Before
+    public void setUp() throws Exception {
+        this.createTestMetadata();
+        this.ii = IIManager.getInstance(getTestConfig()).getII(iiName);
+        this.iiDesc = ii.getDescriptor();
+
+        List<StreamMessage> streamMessages = Lists.transform(Arrays.asList(inputData), new Function<String, StreamMessage>() {
+            @Nullable
+            @Override
+            public StreamMessage apply(String input) {
+                return new StreamMessage(System.currentTimeMillis(), input.getBytes());
+            }
+        });
+
+        List<List<String>> parsedStreamMessages = Lists.newArrayList();
+        StreamParser parser = StringStreamParser.instance;
+
+        MicroStreamBatch batch = new MicroStreamBatch(0);
+        for (StreamMessage message : streamMessages) {
+            ParsedStreamMessage parsedStreamMessage = parser.parse(message);
+            if ((parsedStreamMessage.isAccepted())) {
+                batch.add(parsedStreamMessage);
+            }
+        }
+
+        iiRows = Lists.newArrayList();
+        final Slice slice = new SliceBuilder(iiDesc, (short) 0, true).buildSlice((batch));
+        IIKeyValueCodec codec = new IIKeyValueCodec(slice.getInfo());
+        for (IIRow iiRow : codec.encodeKeyValue(slice)) {
+            iiRows.add(iiRow);
+        }
+    }
+
+    @After
+    public void after() throws Exception {
+        cleanupTestMetadata();
+    }
+
+    /**
+     * simulate stream building into slices, and encode the slice into IIRows.
+     * Then reconstruct the IIRows to slice.
+     */
+    @Test
+    public void basicTest() {
+        Queue<IIRow> buffer = Lists.newLinkedList();
+        FIFOIterable bufferIterable = new FIFOIterable(buffer);
+        TableRecordInfo info = new TableRecordInfo(iiDesc);
+        TableRecordInfoDigest digest = info.getDigest();
+        KeyValueCodec codec = new IIKeyValueCodecWithState(digest);
+        Iterator<Slice> slices = codec.decodeKeyValue(bufferIterable).iterator();
+
+        Assert.assertTrue(!slices.hasNext());
+        Assert.assertEquals(iiRows.size(), digest.getColumnCount());
+
+        for (int i = 0; i < digest.getColumnCount(); ++i) {
+            buffer.add(iiRows.get(i));
+
+            if (i != digest.getColumnCount() - 1) {
+                Assert.assertTrue(!slices.hasNext());
+            } else {
+                Assert.assertTrue(slices.hasNext());
+            }
+        }
+
+        Slice newSlice = slices.next();
+        Assert.assertEquals(newSlice.getLocalDictionaries()[0].getSize(), 2);
+    }
+
+    @Test
+    public void IIEndpointTest() {
+        TableRecordInfo info = new TableRecordInfo(ii.getDescriptor());
+        CoprocessorRowType type = CoprocessorRowType.fromTableRecordInfo(info, ii.getFirstSegment().getColumns());
+        CoprocessorProjector projector = CoprocessorProjector.makeForEndpoint(info, Collections.singletonList(ii.getDescriptor().findColumnRef("default.test_kylin_fact", "lstg_format_name")));
+
+        FunctionDesc f1 = new FunctionDesc();
+        f1.setExpression("SUM");
+        ParameterDesc p1 = new ParameterDesc();
+        p1.setType("column");
+        p1.setValue("PRICE");
+        f1.setParameter(p1);
+        f1.setReturnType("decimal(19,4)");
+
+        TblColRef column = ii.getDescriptor().findColumnRef("default.test_kylin_fact", "cal_dt");
+        CompareTupleFilter compareFilter = new CompareTupleFilter(TupleFilter.FilterOperatorEnum.GTE);
+        ColumnTupleFilter columnFilter = new ColumnTupleFilter(column);
+        compareFilter.addChild(columnFilter);
+        ConstantTupleFilter constantFilter = null;
+        constantFilter = new ConstantTupleFilter(("2012-08-16"));
+        compareFilter.addChild(constantFilter);
+
+        EndpointAggregators aggregators = EndpointAggregators.fromFunctions(info, Collections.singletonList(f1));
+        CoprocessorFilter filter = CoprocessorFilter.fromFilter(new ClearTextDictionary(info), compareFilter, FilterDecorator.FilterConstantsTreatment.AS_IT_IS);
+
+        final Iterator<IIRow> iiRowIterator = iiRows.iterator();
+
+        IIEndpoint endpoint = new IIEndpoint();
+        IIProtos.IIResponseInternal response = endpoint.getResponse(new RegionScanner() {
+            @Override
+            public HRegionInfo getRegionInfo() {
+                throw new NotImplementedException();
+            }
+
+            @Override
+            public boolean isFilterDone() throws IOException {
+                throw new NotImplementedException();
+            }
+
+            @Override
+            public boolean reseek(byte[] row) throws IOException {
+                throw new NotImplementedException();
+            }
+
+            @Override
+            public long getMaxResultSize() {
+                throw new NotImplementedException();
+
+            }
+
+            @Override
+            public long getMvccReadPoint() {
+                throw new NotImplementedException();
+            }
+
+            @Override
+            public boolean nextRaw(List<Cell> result) throws IOException {
+                if (iiRowIterator.hasNext()) {
+                    IIRow iiRow = iiRowIterator.next();
+                    result.addAll(iiRow.makeCells());
+                    return true;
+                } else {
+                    return false;
+                }
+            }
+
+            @Override
+            public boolean nextRaw(List<Cell> result, int limit) throws IOException {
+                throw new NotImplementedException();
+            }
+
+            @Override
+            public boolean next(List<Cell> results) throws IOException {
+                throw new NotImplementedException();
+            }
+
+            @Override
+            public boolean next(List<Cell> result, int limit) throws IOException {
+                throw new NotImplementedException();
+            }
+
+            @Override
+            public void close() throws IOException {
+                throw new NotImplementedException();
+            }
+        }, type, projector, aggregators, filter);
+
+        Assert.assertEquals(2, response.getRowsList().size());
+        System.out.println(response.getRowsList().size());
+        Set<String> answers = Sets.newHashSet("120.4747", "26.8551");
+        for (IIProtos.IIResponseInternal.IIRow responseRow : response.getRowsList()) {
+            byte[] measuresBytes = responseRow.getMeasures().toByteArray();
+            List<Object> metrics = aggregators.deserializeMetricValues(measuresBytes, 0);
+            Assert.assertTrue(answers.contains(metrics.get(0)));
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/src/test/java/org/apache/kylin/job/streaming/CubeStreamConsumerTest.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/streaming/CubeStreamConsumerTest.java b/assembly/src/test/java/org/apache/kylin/job/streaming/CubeStreamConsumerTest.java
new file mode 100644
index 0000000..be4fa26
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/job/streaming/CubeStreamConsumerTest.java
@@ -0,0 +1,90 @@
+package org.apache.kylin.job.streaming;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.LinkedBlockingDeque;
+
+import org.apache.hadoop.util.StringUtils;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.AbstractKylinTestCase;
+import org.apache.kylin.common.util.ClassUtil;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.cube.CubeUpdate;
+import org.apache.kylin.job.DeployUtil;
+import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
+import org.apache.kylin.streaming.StreamBuilder;
+import org.apache.kylin.streaming.StreamMessage;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Lists;
+
+/**
+ */
+@Ignore
+public class CubeStreamConsumerTest {
+
+    private static final Logger logger = LoggerFactory.getLogger(CubeStreamConsumerTest.class);
+
+    private KylinConfig kylinConfig;
+
+    private static final String CUBE_NAME = "test_kylin_cube_without_slr_left_join_ready";
+
+    @BeforeClass
+    public static void beforeClass() throws Exception {
+        ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
+        System.setProperty("hdp.version", "2.2.0.0-2041"); // mapred-site.xml ref this
+    }
+
+    @Before
+    public void before() throws Exception {
+        HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA);
+
+        kylinConfig = KylinConfig.getInstanceFromEnv();
+        DeployUtil.initCliWorkDir();
+        DeployUtil.deployMetadata();
+        DeployUtil.overrideJobJarLocations();
+        final CubeInstance cube = CubeManager.getInstance(kylinConfig).getCube(CUBE_NAME);
+        CubeUpdate cubeBuilder = new CubeUpdate(cube);
+        cubeBuilder.setToRemoveSegs(cube.getSegments().toArray(new CubeSegment[cube.getSegments().size()]));
+        // remove all existing segments
+        CubeManager.getInstance(kylinConfig).updateCube(cubeBuilder);
+
+    }
+
+    @Test
+    public void test() throws Exception {
+        LinkedBlockingDeque<StreamMessage> queue = new LinkedBlockingDeque<>();
+        List<BlockingQueue<StreamMessage>> queues = Lists.newArrayList();
+        queues.add(queue);
+        StreamBuilder cubeStreamBuilder = StreamBuilder.newPeriodicalStreamBuilder(CUBE_NAME, queues, new CubeStreamConsumer(CUBE_NAME), System.currentTimeMillis(), 30L * 1000);
+        final Future<?> future = Executors.newSingleThreadExecutor().submit(cubeStreamBuilder);
+        loadDataFromLocalFile(queue, 100000);
+        future.get();
+    }
+
+    private void loadDataFromLocalFile(BlockingQueue<StreamMessage> queue, final int maxCount) throws IOException, InterruptedException {
+        BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream("../table.txt")));
+        String line;
+        int count = 0;
+        while ((line = br.readLine()) != null && count++ < maxCount) {
+            final List<String> strings = Arrays.asList(line.split("\t"));
+            queue.put(new StreamMessage(System.currentTimeMillis(), StringUtils.join(",", strings).getBytes()));
+        }
+        queue.put(StreamMessage.EOF);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/src/test/java/org/apache/kylin/job/streaming/PeriodicalStreamBuilderTest.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/streaming/PeriodicalStreamBuilderTest.java b/assembly/src/test/java/org/apache/kylin/job/streaming/PeriodicalStreamBuilderTest.java
new file mode 100644
index 0000000..dc6d312
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/job/streaming/PeriodicalStreamBuilderTest.java
@@ -0,0 +1,144 @@
+package org.apache.kylin.job.streaming;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.LinkedBlockingQueue;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.LocalFileMetadataTestCase;
+import org.apache.kylin.common.util.TimeUtil;
+import org.apache.kylin.streaming.MicroStreamBatch;
+import org.apache.kylin.streaming.MicroStreamBatchConsumer;
+import org.apache.kylin.streaming.ParsedStreamMessage;
+import org.apache.kylin.streaming.StreamBuilder;
+import org.apache.kylin.streaming.StreamMessage;
+import org.apache.kylin.streaming.StreamParser;
+import org.apache.kylin.streaming.StreamingManager;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Lists;
+
+/**
+ */
+public class PeriodicalStreamBuilderTest extends LocalFileMetadataTestCase {
+
+    private static final Logger logger = LoggerFactory.getLogger(PeriodicalStreamBuilderTest.class);
+
+    @Before
+    public void setup() {
+        this.createTestMetadata();
+
+    }
+
+    @After
+    public void clear() {
+        this.cleanupTestMetadata();
+    }
+
+    private List<StreamMessage> prepareTestData(long start, long end, int count) {
+        double step = (double) (end - start) / (count - 1);
+        long ts = start;
+        int offset = 0;
+        ArrayList<StreamMessage> result = Lists.newArrayList();
+        for (int i = 0; i < count - 1; ++i) {
+            result.add(new StreamMessage(offset++, String.valueOf(ts).getBytes()));
+            ts += step;
+        }
+        result.add(new StreamMessage(offset++, String.valueOf(end).getBytes()));
+        assertEquals(count, result.size());
+        assertEquals(start + "", new String(result.get(0).getRawData()));
+        assertEquals(end + "", new String(result.get(count - 1).getRawData()));
+        return result;
+    }
+
+    @Test
+    public void test() throws ExecutionException, InterruptedException {
+
+        List<BlockingQueue<StreamMessage>> queues = Lists.newArrayList();
+        queues.add(new LinkedBlockingQueue<StreamMessage>());
+        queues.add(new LinkedBlockingQueue<StreamMessage>());
+
+        final long interval = 3000L;
+        final long nextPeriodStart = TimeUtil.getNextPeriodStart(System.currentTimeMillis(), interval);
+
+        final List<Integer> partitionIds = Lists.newArrayList();
+        for (int i = 0; i < queues.size(); i++) {
+            partitionIds.add(i);
+        }
+
+        final MicroStreamBatchConsumer consumer = new MicroStreamBatchConsumer() {
+            @Override
+            public void consume(MicroStreamBatch microStreamBatch) throws Exception {
+                logger.info("consuming batch:" + microStreamBatch.getPartitionId() + " count:" + microStreamBatch.size() + " timestamp:" + microStreamBatch.getTimestamp() + " offset:" + microStreamBatch.getOffset());
+            }
+
+            @Override
+            public void stop() {
+                logger.info("consumer stopped");
+            }
+        };
+        final StreamBuilder streamBuilder = StreamBuilder.newPeriodicalStreamBuilder("test", queues, consumer, nextPeriodStart, interval);
+
+        streamBuilder.setStreamParser(new StreamParser() {
+            @Override
+            public ParsedStreamMessage parse(StreamMessage streamMessage) {
+                return new ParsedStreamMessage(Collections.<String> emptyList(), streamMessage.getOffset(), Long.parseLong(new String(streamMessage.getRawData())), true);
+            }
+        });
+
+        Future<?> future = Executors.newSingleThreadExecutor().submit(streamBuilder);
+        long timeout = nextPeriodStart + interval;
+        int messageCount = 0;
+        int inPeriodMessageCount = 0;
+        int expectedOffset = 0;
+        logger.info("prepare to add StreamMessage");
+        while (true) {
+            long ts = System.currentTimeMillis();
+            if (ts >= timeout + interval) {
+                break;
+            }
+            if (ts >= nextPeriodStart && ts < timeout) {
+                inPeriodMessageCount++;
+            }
+            for (BlockingQueue<StreamMessage> queue : queues) {
+                queue.put(new StreamMessage(messageCount, String.valueOf(ts).getBytes()));
+            }
+            if (expectedOffset == 0 && ts >= timeout) {
+                expectedOffset = messageCount - 1;
+            }
+            messageCount++;
+            Thread.sleep(10);
+        }
+        logger.info("totally put " + messageCount + " StreamMessages");
+        logger.info("totally in period " + inPeriodMessageCount + " StreamMessages");
+
+        for (BlockingQueue<StreamMessage> queue : queues) {
+            queue.put(StreamMessage.EOF);
+        }
+
+        future.get();
+
+        for (BlockingQueue<StreamMessage> queue : queues) {
+            queue.take();
+        }
+
+        final Map<Integer, Long> offsets = StreamingManager.getInstance(KylinConfig.getInstanceFromEnv()).getOffset("test", partitionIds);
+        logger.info("offset:" + offsets);
+        for (Long offset : offsets.values()) {
+            assertEquals(expectedOffset, offset.longValue());
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/src/test/java/org/apache/kylin/job/streaming/StreamingTableDataGenerator.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/streaming/StreamingTableDataGenerator.java b/assembly/src/test/java/org/apache/kylin/job/streaming/StreamingTableDataGenerator.java
new file mode 100644
index 0000000..075a048
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/job/streaming/StreamingTableDataGenerator.java
@@ -0,0 +1,76 @@
+package org.apache.kylin.job.streaming;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Random;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.metadata.MetadataManager;
+import org.apache.kylin.metadata.model.ColumnDesc;
+import org.apache.kylin.metadata.model.DataType;
+import org.apache.kylin.metadata.model.TableDesc;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.SortedMultiset;
+import com.google.common.collect.TreeMultiset;
+
+/**
+ * this is for generating fact table data for test_streaming_table (cube streaming)
+ */
+public class StreamingTableDataGenerator {
+
+    private static final Logger logger = LoggerFactory.getLogger(StreamingTableDataGenerator.class);
+    private static final ObjectMapper mapper = new ObjectMapper();
+
+    public static List<String> generate(int recordCount, long startTime, long endTime, String tableName) {
+        Preconditions.checkArgument(startTime < endTime);
+        Preconditions.checkArgument(recordCount > 0);
+
+        KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+        TableDesc tableDesc = MetadataManager.getInstance(kylinConfig).getTableDesc(tableName);
+
+        SortedMultiset<Long> times = TreeMultiset.create();
+        Random r = new Random();
+        for (int i = 0; i < recordCount; i++) {
+            long t = startTime + (long) ((endTime - startTime) * r.nextDouble());
+            times.add(t);
+        }
+
+        List<String> ret = Lists.newArrayList();
+        HashMap<String, String> kvs = Maps.newHashMap();
+        for (long time : times) {
+            kvs.clear();
+            kvs.put("timestamp", String.valueOf(time));
+            for (ColumnDesc columnDesc : tableDesc.getColumns()) {
+                String lowerCaseColumnName = columnDesc.getName().toLowerCase();
+                DataType dataType = columnDesc.getType();
+                if (dataType.isDateTimeFamily()) {
+                    //TimedJsonStreamParser will derived minute_start,hour_start,day_start from timestamp
+                    continue;
+                } else if (dataType.isStringFamily()) {
+                    char c = (char) ('A' + (int) (26 * r.nextDouble()));
+                    kvs.put(lowerCaseColumnName, String.valueOf(c));
+                } else if (dataType.isIntegerFamily()) {
+                    int v = r.nextInt(10000);
+                    kvs.put(lowerCaseColumnName, String.valueOf(v));
+                } else if (dataType.isNumberFamily()) {
+                    String v = String.format("%.4f", r.nextDouble() * 100);
+                    kvs.put(lowerCaseColumnName, v);
+                }
+            }
+            try {
+                ret.add(mapper.writeValueAsString(kvs));
+            } catch (JsonProcessingException e) {
+                logger.error("error!", e);
+            }
+        }
+
+        return ret;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/src/test/java/org/apache/kylin/source/hive/ITHiveSourceTableLoaderTest.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/source/hive/ITHiveSourceTableLoaderTest.java b/assembly/src/test/java/org/apache/kylin/source/hive/ITHiveSourceTableLoaderTest.java
new file mode 100644
index 0000000..8218d51
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/source/hive/ITHiveSourceTableLoaderTest.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.source.hive;
+
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.Set;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class ITHiveSourceTableLoaderTest extends HBaseMetadataTestCase {
+
+    @Before
+    public void setup() throws Exception {
+        super.createTestMetadata();
+    }
+
+    @After
+    public void after() throws Exception {
+        super.cleanupTestMetadata();
+    }
+
+    @Test
+    public void test() throws IOException {
+        if (!useSandbox())
+            return;
+
+        KylinConfig config = getTestConfig();
+        String[] toLoad = new String[] { "DEFAULT.TEST_KYLIN_FACT", "EDW.TEST_CAL_DT" };
+        Set<String> loaded = HiveSourceTableLoader.reloadHiveTables(toLoad, config);
+
+        assertTrue(loaded.size() == toLoad.length);
+        for (String str : toLoad)
+            assertTrue(loaded.contains(str));
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/src/test/java/org/apache/kylin/source/hive/ITHiveTableReaderTest.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/source/hive/ITHiveTableReaderTest.java b/assembly/src/test/java/org/apache/kylin/source/hive/ITHiveTableReaderTest.java
new file mode 100644
index 0000000..57c0be3
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/source/hive/ITHiveTableReaderTest.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.source.hive;
+
+import java.io.IOException;
+
+import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * This test case need the hive runtime; Please run it with sandbox;
+ * @author shaoshi
+ *
+ * It is in the exclude list of default profile in pom.xml
+ */
+public class ITHiveTableReaderTest extends HBaseMetadataTestCase {
+
+    @Test
+    public void test() throws IOException {
+        HiveTableReader reader = new HiveTableReader("default", "test_kylin_fact");
+        int rowNumber = 0;
+        while (reader.next()) {
+            String[] row = reader.getRow();
+            Assert.assertEquals(9, row.length);
+            //System.out.println(ArrayUtils.toString(row));
+            rowNumber++;
+        }
+
+        reader.close();
+        Assert.assertEquals(10000, rowNumber);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/src/test/java/org/apache/kylin/source/hive/ITSnapshotManagerTest.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/source/hive/ITSnapshotManagerTest.java b/assembly/src/test/java/org/apache/kylin/source/hive/ITSnapshotManagerTest.java
new file mode 100644
index 0000000..0df632a
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/source/hive/ITSnapshotManagerTest.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.source.hive;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+import org.apache.kylin.dict.lookup.SnapshotManager;
+import org.apache.kylin.dict.lookup.SnapshotTable;
+import org.apache.kylin.metadata.MetadataManager;
+import org.apache.kylin.metadata.model.TableDesc;
+import org.apache.kylin.source.ReadableTable;
+import org.apache.kylin.source.ReadableTable.TableReader;
+import org.apache.kylin.source.SourceFactory;
+import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * @author yangli9
+ * 
+ */
+public class ITSnapshotManagerTest extends HBaseMetadataTestCase {
+
+    SnapshotManager snapshotMgr;
+
+    @Before
+    public void setup() throws Exception {
+        createTestMetadata();
+        snapshotMgr = SnapshotManager.getInstance(getTestConfig());
+    }
+
+    @After
+    public void after() throws Exception {
+        cleanupTestMetadata();
+    }
+
+    @Test
+    public void basicTest() throws Exception {
+        String tableName = "EDW.TEST_SITES";
+        TableDesc tableDesc = MetadataManager.getInstance(getTestConfig()).getTableDesc(tableName);
+        ReadableTable hiveTable = SourceFactory.createReadableTable(tableDesc);
+        String snapshotPath = snapshotMgr.buildSnapshot(hiveTable, tableDesc).getResourcePath();
+
+        snapshotMgr.wipeoutCache();
+
+        SnapshotTable snapshot = snapshotMgr.getSnapshotTable(snapshotPath);
+
+        // compare hive & snapshot
+        TableReader hiveReader = hiveTable.getReader();
+        TableReader snapshotReader = snapshot.getReader();
+
+        while (true) {
+            boolean hiveNext = hiveReader.next();
+            boolean snapshotNext = snapshotReader.next();
+            assertEquals(hiveNext, snapshotNext);
+
+            if (hiveNext == false)
+                break;
+
+            String[] hiveRow = hiveReader.getRow();
+            String[] snapshotRow = snapshotReader.getRow();
+            assertArrayEquals(hiveRow, snapshotRow);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/build/script/prepare_libs.sh
----------------------------------------------------------------------
diff --git a/build/script/prepare_libs.sh b/build/script/prepare_libs.sh
index a80d4f4..2012b3f 100755
--- a/build/script/prepare_libs.sh
+++ b/build/script/prepare_libs.sh
@@ -13,7 +13,7 @@ echo "version ${version}"
 echo "copy lib file"
 rm -rf build/lib
 mkdir build/lib
-cp job/target/kylin-job-${version}-job.jar build/lib/kylin-job-${version}.jar
+cp assembly/target/kylin-job-${version}-job.jar build/lib/kylin-job-${version}.jar
 cp storage-hbase/target/kylin-storage-hbase-${version}-coprocessor.jar build/lib/kylin-coprocessor-${version}.jar
 cp jdbc/target/kylin-jdbc-${version}.jar build/lib/kylin-jdbc-${version}.jar
 # Copied file becomes 000 for some env (e.g. my Cygwin)

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/core-job/src/main/java/org/apache/kylin/job/JobInstance.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/JobInstance.java b/core-job/src/main/java/org/apache/kylin/job/JobInstance.java
index 001dfe5..e5a4540 100644
--- a/core-job/src/main/java/org/apache/kylin/job/JobInstance.java
+++ b/core-job/src/main/java/org/apache/kylin/job/JobInstance.java
@@ -37,6 +37,7 @@ import com.fasterxml.jackson.annotation.JsonManagedReference;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.collect.Lists;
 
+@SuppressWarnings("serial")
 @JsonAutoDetect(fieldVisibility = Visibility.NONE, getterVisibility = Visibility.NONE, isGetterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE)
 public class JobInstance extends RootPersistentEntity implements Comparable<JobInstance> {
 

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/core-job/src/main/java/org/apache/kylin/job/dao/ExecutableOutputPO.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/dao/ExecutableOutputPO.java b/core-job/src/main/java/org/apache/kylin/job/dao/ExecutableOutputPO.java
index 9ebe335..1e34f39 100644
--- a/core-job/src/main/java/org/apache/kylin/job/dao/ExecutableOutputPO.java
+++ b/core-job/src/main/java/org/apache/kylin/job/dao/ExecutableOutputPO.java
@@ -28,6 +28,7 @@ import com.google.common.collect.Maps;
 
 /**
  */
+@SuppressWarnings("serial")
 @JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE)
 public class ExecutableOutputPO extends RootPersistentEntity {
 

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/core-job/src/main/java/org/apache/kylin/job/dao/ExecutablePO.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/dao/ExecutablePO.java b/core-job/src/main/java/org/apache/kylin/job/dao/ExecutablePO.java
index 52df562..75717e0 100644
--- a/core-job/src/main/java/org/apache/kylin/job/dao/ExecutablePO.java
+++ b/core-job/src/main/java/org/apache/kylin/job/dao/ExecutablePO.java
@@ -29,6 +29,7 @@ import com.google.common.collect.Maps;
 
 /**
  */
+@SuppressWarnings("serial")
 @JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE)
 public class ExecutablePO extends RootPersistentEntity {
 

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java
----------------------------------------------------------------------
diff --git a/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java b/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java
new file mode 100644
index 0000000..ecac973
--- /dev/null
+++ b/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java
@@ -0,0 +1,101 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.impl.threadpool;
+
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.LocalFileMetadataTestCase;
+import org.apache.kylin.job.constant.ExecutableConstants;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.execution.AbstractExecutable;
+import org.apache.kylin.job.execution.ExecutableState;
+import org.apache.kylin.job.lock.MockJobLock;
+import org.apache.kylin.job.manager.ExecutableManager;
+import org.junit.After;
+import org.junit.Before;
+
+/**
+ */
+public abstract class BaseSchedulerTest extends LocalFileMetadataTestCase {
+
+    private DefaultScheduler scheduler;
+
+    protected ExecutableManager jobService;
+
+    @Before
+    public void setup() throws Exception {
+        createTestMetadata();
+        setFinalStatic(ExecutableConstants.class.getField("DEFAULT_SCHEDULER_INTERVAL_SECONDS"), 10);
+        jobService = ExecutableManager.getInstance(KylinConfig.getInstanceFromEnv());
+        scheduler = DefaultScheduler.getInstance();
+        scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv()), new MockJobLock());
+        if (!scheduler.hasStarted()) {
+            throw new RuntimeException("scheduler has not been started");
+        }
+
+    }
+
+    @After
+    public void after() throws Exception {
+        cleanupTestMetadata();
+    }
+
+    static void setFinalStatic(Field field, Object newValue) throws Exception {
+        field.setAccessible(true);
+
+        Field modifiersField = Field.class.getDeclaredField("modifiers");
+        modifiersField.setAccessible(true);
+        modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL);
+
+        field.set(null, newValue);
+    }
+
+    protected void waitForJobFinish(String jobId) {
+        while (true) {
+            AbstractExecutable job = jobService.getJob(jobId);
+            final ExecutableState status = job.getStatus();
+            if (status == ExecutableState.SUCCEED || status == ExecutableState.ERROR || status == ExecutableState.STOPPED || status == ExecutableState.DISCARDED) {
+                break;
+            } else {
+                try {
+                    Thread.sleep(5000);
+                } catch (InterruptedException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+    }
+
+    protected void waitForJobStatus(String jobId, ExecutableState state, long interval) {
+        while (true) {
+            AbstractExecutable job = jobService.getJob(jobId);
+            if (job.getStatus() == state) {
+                break;
+            } else {
+                try {
+                    Thread.sleep(interval);
+                } catch (InterruptedException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java
----------------------------------------------------------------------
diff --git a/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java b/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java
new file mode 100644
index 0000000..d50baad
--- /dev/null
+++ b/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.impl.threadpool;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ScheduledFuture;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.kylin.job.BaseTestExecutable;
+import org.apache.kylin.job.ErrorTestExecutable;
+import org.apache.kylin.job.FailedTestExecutable;
+import org.apache.kylin.job.SelfStopExecutable;
+import org.apache.kylin.job.SucceedTestExecutable;
+import org.apache.kylin.job.execution.DefaultChainedExecutable;
+import org.apache.kylin.job.execution.ExecutableState;
+import org.junit.Test;
+
+/**
+ */
+public class DefaultSchedulerTest extends BaseSchedulerTest {
+
+    @Test
+    public void testSingleTaskJob() throws Exception {
+        DefaultChainedExecutable job = new DefaultChainedExecutable();
+        BaseTestExecutable task1 = new SucceedTestExecutable();
+        job.addTask(task1);
+        jobService.addJob(job);
+        waitForJobFinish(job.getId());
+        assertEquals(ExecutableState.SUCCEED, jobService.getOutput(job.getId()).getState());
+        assertEquals(ExecutableState.SUCCEED, jobService.getOutput(task1.getId()).getState());
+    }
+
+    @Test
+    public void testSucceed() throws Exception {
+        DefaultChainedExecutable job = new DefaultChainedExecutable();
+        BaseTestExecutable task1 = new SucceedTestExecutable();
+        BaseTestExecutable task2 = new SucceedTestExecutable();
+        job.addTask(task1);
+        job.addTask(task2);
+        jobService.addJob(job);
+        waitForJobFinish(job.getId());
+        assertEquals(ExecutableState.SUCCEED, jobService.getOutput(job.getId()).getState());
+        assertEquals(ExecutableState.SUCCEED, jobService.getOutput(task1.getId()).getState());
+        assertEquals(ExecutableState.SUCCEED, jobService.getOutput(task2.getId()).getState());
+    }
+
+    @Test
+    public void testSucceedAndFailed() throws Exception {
+        DefaultChainedExecutable job = new DefaultChainedExecutable();
+        BaseTestExecutable task1 = new SucceedTestExecutable();
+        BaseTestExecutable task2 = new FailedTestExecutable();
+        job.addTask(task1);
+        job.addTask(task2);
+        jobService.addJob(job);
+        waitForJobFinish(job.getId());
+        assertEquals(ExecutableState.ERROR, jobService.getOutput(job.getId()).getState());
+        assertEquals(ExecutableState.SUCCEED, jobService.getOutput(task1.getId()).getState());
+        assertEquals(ExecutableState.ERROR, jobService.getOutput(task2.getId()).getState());
+    }
+
+    @Test
+    public void testSucceedAndError() throws Exception {
+        DefaultChainedExecutable job = new DefaultChainedExecutable();
+        BaseTestExecutable task1 = new ErrorTestExecutable();
+        BaseTestExecutable task2 = new SucceedTestExecutable();
+        job.addTask(task1);
+        job.addTask(task2);
+        jobService.addJob(job);
+        waitForJobFinish(job.getId());
+        assertEquals(ExecutableState.ERROR, jobService.getOutput(job.getId()).getState());
+        assertEquals(ExecutableState.ERROR, jobService.getOutput(task1.getId()).getState());
+        assertEquals(ExecutableState.READY, jobService.getOutput(task2.getId()).getState());
+    }
+
+    @Test
+    public void testDiscard() throws Exception {
+        DefaultChainedExecutable job = new DefaultChainedExecutable();
+        BaseTestExecutable task1 = new SelfStopExecutable();
+        job.addTask(task1);
+        jobService.addJob(job);
+        waitForJobStatus(job.getId(), ExecutableState.RUNNING, 500);
+        jobService.discardJob(job.getId());
+        waitForJobFinish(job.getId());
+        assertEquals(ExecutableState.DISCARDED, jobService.getOutput(job.getId()).getState());
+        assertEquals(ExecutableState.DISCARDED, jobService.getOutput(task1.getId()).getState());
+        Thread.sleep(5000);
+        System.out.println(job);
+    }
+
+    @SuppressWarnings("rawtypes")
+    @Test
+    public void testSchedulerPool() throws InterruptedException {
+        ScheduledExecutorService fetchPool = Executors.newScheduledThreadPool(1);
+        final CountDownLatch countDownLatch = new CountDownLatch(3);
+        ScheduledFuture future = fetchPool.scheduleAtFixedRate(new Runnable() {
+            @Override
+            public void run() {
+                countDownLatch.countDown();
+            }
+        }, 5, 5, TimeUnit.SECONDS);
+        assertTrue("countDownLatch should reach zero in 15 secs", countDownLatch.await(20, TimeUnit.SECONDS));
+        assertTrue("future should still running", future.cancel(true));
+
+        final CountDownLatch countDownLatch2 = new CountDownLatch(3);
+        ScheduledFuture future2 = fetchPool.scheduleAtFixedRate(new Runnable() {
+            @Override
+            public void run() {
+                countDownLatch2.countDown();
+                throw new RuntimeException();
+            }
+        }, 5, 5, TimeUnit.SECONDS);
+        assertFalse("countDownLatch2 should NOT reach zero in 15 secs", countDownLatch2.await(20, TimeUnit.SECONDS));
+        assertFalse("future2 should has been stopped", future2.cancel(true));
+
+        final CountDownLatch countDownLatch3 = new CountDownLatch(3);
+        ScheduledFuture future3 = fetchPool.scheduleAtFixedRate(new Runnable() {
+            @Override
+            public void run() {
+                try {
+                    countDownLatch3.countDown();
+                    throw new RuntimeException();
+                } catch (Exception e) {
+                }
+            }
+        }, 5, 5, TimeUnit.SECONDS);
+        assertTrue("countDownLatch3 should reach zero in 15 secs", countDownLatch3.await(20, TimeUnit.SECONDS));
+        assertTrue("future3 should still running", future3.cancel(true));
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/core-storage/pom.xml
----------------------------------------------------------------------
diff --git a/core-storage/pom.xml b/core-storage/pom.xml
index 4bb7695..e17d13f 100644
--- a/core-storage/pom.xml
+++ b/core-storage/pom.xml
@@ -42,12 +42,6 @@
         </dependency>
 
         <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-invertedindex</artifactId>
-            <version>${project.parent.version}</version>
-        </dependency>
-
-        <dependency>
             <groupId>net.sf.ehcache</groupId>
             <artifactId>ehcache</artifactId>
             <version>2.8.1</version>

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/engine-spark/pom.xml
----------------------------------------------------------------------
diff --git a/engine-spark/pom.xml b/engine-spark/pom.xml
index 3aa01e3..d2b150e 100644
--- a/engine-spark/pom.xml
+++ b/engine-spark/pom.xml
@@ -86,14 +86,6 @@
         </dependency>
 
         <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-job</artifactId>
-            <type>test-jar</type>
-            <scope>test</scope>
-            <version>${project.parent.version}</version>
-        </dependency>
-
-        <dependency>
             <groupId>junit</groupId>
             <artifactId>junit</artifactId>
             <scope>test</scope>

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java
----------------------------------------------------------------------
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java
index dd87782..896aa80 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java
@@ -25,6 +25,7 @@ import com.google.common.collect.Maps;
 import com.google.common.hash.Hasher;
 import com.google.common.hash.Hashing;
 import com.google.common.primitives.UnsignedBytes;
+
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
@@ -66,7 +67,7 @@ import org.apache.kylin.metadata.model.MeasureDesc;
 import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.model.TblColRef;
 import org.apache.kylin.metadata.realization.RealizationStatusEnum;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.steps.CreateHTableJob;
 import org.apache.kylin.storage.hbase.steps.CubeHTableUtil;
 import org.apache.spark.SparkConf;
@@ -80,9 +81,11 @@ import org.apache.spark.api.java.function.PairFunction;
 import org.apache.spark.sql.DataFrame;
 import org.apache.spark.sql.Row;
 import org.apache.spark.sql.hive.HiveContext;
+
 import scala.Tuple2;
 
 import javax.annotation.Nullable;
+
 import java.io.File;
 import java.io.FileFilter;
 import java.io.IOException;


[3/9] incubator-kylin git commit: KYLIN-1010 Decompose project job

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/job/streaming/PeriodicalStreamBuilderTest.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/job/streaming/PeriodicalStreamBuilderTest.java b/job/src/test/java/org/apache/kylin/job/streaming/PeriodicalStreamBuilderTest.java
deleted file mode 100644
index dc6d312..0000000
--- a/job/src/test/java/org/apache/kylin/job/streaming/PeriodicalStreamBuilderTest.java
+++ /dev/null
@@ -1,144 +0,0 @@
-package org.apache.kylin.job.streaming;
-
-import static org.junit.Assert.assertEquals;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.LocalFileMetadataTestCase;
-import org.apache.kylin.common.util.TimeUtil;
-import org.apache.kylin.streaming.MicroStreamBatch;
-import org.apache.kylin.streaming.MicroStreamBatchConsumer;
-import org.apache.kylin.streaming.ParsedStreamMessage;
-import org.apache.kylin.streaming.StreamBuilder;
-import org.apache.kylin.streaming.StreamMessage;
-import org.apache.kylin.streaming.StreamParser;
-import org.apache.kylin.streaming.StreamingManager;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.Lists;
-
-/**
- */
-public class PeriodicalStreamBuilderTest extends LocalFileMetadataTestCase {
-
-    private static final Logger logger = LoggerFactory.getLogger(PeriodicalStreamBuilderTest.class);
-
-    @Before
-    public void setup() {
-        this.createTestMetadata();
-
-    }
-
-    @After
-    public void clear() {
-        this.cleanupTestMetadata();
-    }
-
-    private List<StreamMessage> prepareTestData(long start, long end, int count) {
-        double step = (double) (end - start) / (count - 1);
-        long ts = start;
-        int offset = 0;
-        ArrayList<StreamMessage> result = Lists.newArrayList();
-        for (int i = 0; i < count - 1; ++i) {
-            result.add(new StreamMessage(offset++, String.valueOf(ts).getBytes()));
-            ts += step;
-        }
-        result.add(new StreamMessage(offset++, String.valueOf(end).getBytes()));
-        assertEquals(count, result.size());
-        assertEquals(start + "", new String(result.get(0).getRawData()));
-        assertEquals(end + "", new String(result.get(count - 1).getRawData()));
-        return result;
-    }
-
-    @Test
-    public void test() throws ExecutionException, InterruptedException {
-
-        List<BlockingQueue<StreamMessage>> queues = Lists.newArrayList();
-        queues.add(new LinkedBlockingQueue<StreamMessage>());
-        queues.add(new LinkedBlockingQueue<StreamMessage>());
-
-        final long interval = 3000L;
-        final long nextPeriodStart = TimeUtil.getNextPeriodStart(System.currentTimeMillis(), interval);
-
-        final List<Integer> partitionIds = Lists.newArrayList();
-        for (int i = 0; i < queues.size(); i++) {
-            partitionIds.add(i);
-        }
-
-        final MicroStreamBatchConsumer consumer = new MicroStreamBatchConsumer() {
-            @Override
-            public void consume(MicroStreamBatch microStreamBatch) throws Exception {
-                logger.info("consuming batch:" + microStreamBatch.getPartitionId() + " count:" + microStreamBatch.size() + " timestamp:" + microStreamBatch.getTimestamp() + " offset:" + microStreamBatch.getOffset());
-            }
-
-            @Override
-            public void stop() {
-                logger.info("consumer stopped");
-            }
-        };
-        final StreamBuilder streamBuilder = StreamBuilder.newPeriodicalStreamBuilder("test", queues, consumer, nextPeriodStart, interval);
-
-        streamBuilder.setStreamParser(new StreamParser() {
-            @Override
-            public ParsedStreamMessage parse(StreamMessage streamMessage) {
-                return new ParsedStreamMessage(Collections.<String> emptyList(), streamMessage.getOffset(), Long.parseLong(new String(streamMessage.getRawData())), true);
-            }
-        });
-
-        Future<?> future = Executors.newSingleThreadExecutor().submit(streamBuilder);
-        long timeout = nextPeriodStart + interval;
-        int messageCount = 0;
-        int inPeriodMessageCount = 0;
-        int expectedOffset = 0;
-        logger.info("prepare to add StreamMessage");
-        while (true) {
-            long ts = System.currentTimeMillis();
-            if (ts >= timeout + interval) {
-                break;
-            }
-            if (ts >= nextPeriodStart && ts < timeout) {
-                inPeriodMessageCount++;
-            }
-            for (BlockingQueue<StreamMessage> queue : queues) {
-                queue.put(new StreamMessage(messageCount, String.valueOf(ts).getBytes()));
-            }
-            if (expectedOffset == 0 && ts >= timeout) {
-                expectedOffset = messageCount - 1;
-            }
-            messageCount++;
-            Thread.sleep(10);
-        }
-        logger.info("totally put " + messageCount + " StreamMessages");
-        logger.info("totally in period " + inPeriodMessageCount + " StreamMessages");
-
-        for (BlockingQueue<StreamMessage> queue : queues) {
-            queue.put(StreamMessage.EOF);
-        }
-
-        future.get();
-
-        for (BlockingQueue<StreamMessage> queue : queues) {
-            queue.take();
-        }
-
-        final Map<Integer, Long> offsets = StreamingManager.getInstance(KylinConfig.getInstanceFromEnv()).getOffset("test", partitionIds);
-        logger.info("offset:" + offsets);
-        for (Long offset : offsets.values()) {
-            assertEquals(expectedOffset, offset.longValue());
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/job/streaming/StreamingTableDataGenerator.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/job/streaming/StreamingTableDataGenerator.java b/job/src/test/java/org/apache/kylin/job/streaming/StreamingTableDataGenerator.java
deleted file mode 100644
index 075a048..0000000
--- a/job/src/test/java/org/apache/kylin/job/streaming/StreamingTableDataGenerator.java
+++ /dev/null
@@ -1,76 +0,0 @@
-package org.apache.kylin.job.streaming;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Random;
-
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.metadata.MetadataManager;
-import org.apache.kylin.metadata.model.ColumnDesc;
-import org.apache.kylin.metadata.model.DataType;
-import org.apache.kylin.metadata.model.TableDesc;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.collect.SortedMultiset;
-import com.google.common.collect.TreeMultiset;
-
-/**
- * this is for generating fact table data for test_streaming_table (cube streaming)
- */
-public class StreamingTableDataGenerator {
-
-    private static final Logger logger = LoggerFactory.getLogger(StreamingTableDataGenerator.class);
-    private static final ObjectMapper mapper = new ObjectMapper();
-
-    public static List<String> generate(int recordCount, long startTime, long endTime, String tableName) {
-        Preconditions.checkArgument(startTime < endTime);
-        Preconditions.checkArgument(recordCount > 0);
-
-        KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-        TableDesc tableDesc = MetadataManager.getInstance(kylinConfig).getTableDesc(tableName);
-
-        SortedMultiset<Long> times = TreeMultiset.create();
-        Random r = new Random();
-        for (int i = 0; i < recordCount; i++) {
-            long t = startTime + (long) ((endTime - startTime) * r.nextDouble());
-            times.add(t);
-        }
-
-        List<String> ret = Lists.newArrayList();
-        HashMap<String, String> kvs = Maps.newHashMap();
-        for (long time : times) {
-            kvs.clear();
-            kvs.put("timestamp", String.valueOf(time));
-            for (ColumnDesc columnDesc : tableDesc.getColumns()) {
-                String lowerCaseColumnName = columnDesc.getName().toLowerCase();
-                DataType dataType = columnDesc.getType();
-                if (dataType.isDateTimeFamily()) {
-                    //TimedJsonStreamParser will derived minute_start,hour_start,day_start from timestamp
-                    continue;
-                } else if (dataType.isStringFamily()) {
-                    char c = (char) ('A' + (int) (26 * r.nextDouble()));
-                    kvs.put(lowerCaseColumnName, String.valueOf(c));
-                } else if (dataType.isIntegerFamily()) {
-                    int v = r.nextInt(10000);
-                    kvs.put(lowerCaseColumnName, String.valueOf(v));
-                } else if (dataType.isNumberFamily()) {
-                    String v = String.format("%.4f", r.nextDouble() * 100);
-                    kvs.put(lowerCaseColumnName, v);
-                }
-            }
-            try {
-                ret.add(mapper.writeValueAsString(kvs));
-            } catch (JsonProcessingException e) {
-                logger.error("error!", e);
-            }
-        }
-
-        return ret;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/job/tools/ColumnCardinalityReducerTest.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/job/tools/ColumnCardinalityReducerTest.java b/job/src/test/java/org/apache/kylin/job/tools/ColumnCardinalityReducerTest.java
deleted file mode 100644
index 2a9893a..0000000
--- a/job/src/test/java/org/apache/kylin/job/tools/ColumnCardinalityReducerTest.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.tools;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.StringTokenizer;
-
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mrunit.mapreduce.ReduceDriver;
-import org.apache.hadoop.mrunit.types.Pair;
-import org.apache.kylin.common.hll.HyperLogLogPlusCounter;
-import org.apache.kylin.common.util.Bytes;
-import org.apache.kylin.cube.kv.RowConstants;
-import org.apache.kylin.source.hive.cardinality.ColumnCardinalityMapper;
-import org.apache.kylin.source.hive.cardinality.ColumnCardinalityReducer;
-import org.junit.Before;
-import org.junit.Test;
-
-/**
- * @author ysong1
- * 
- */
-public class ColumnCardinalityReducerTest {
-
-    public final static String strArr = "abc,tests,test,test,as,sts,test,tss,sets";
-
-    ReduceDriver<IntWritable, BytesWritable, IntWritable, LongWritable> reduceDriver;
-    String localTempDir = System.getProperty("java.io.tmpdir") + File.separator;
-
-    @Before
-    public void setUp() {
-        ColumnCardinalityReducer reducer = new ColumnCardinalityReducer();
-        reduceDriver = ReduceDriver.newReduceDriver(reducer);
-    }
-
-    private byte[] getBytes(String str) throws IOException {
-        HyperLogLogPlusCounter hllc = new HyperLogLogPlusCounter();
-        StringTokenizer tokenizer = new StringTokenizer(str, ColumnCardinalityMapper.DEFAULT_DELIM);
-        int i = 0;
-        while (tokenizer.hasMoreTokens()) {
-            String temp = i + "_" + tokenizer.nextToken();
-            i++;
-            hllc.add(Bytes.toBytes(temp));
-        }
-        ByteBuffer buf = ByteBuffer.allocate(RowConstants.ROWVALUE_BUFFER_SIZE);
-        buf.clear();
-        hllc.writeRegisters(buf);
-        buf.flip();
-        return buf.array();
-    }
-
-    @Test
-    public void testReducer() throws IOException {
-        IntWritable key1 = new IntWritable(1);
-        List<BytesWritable> values1 = new ArrayList<BytesWritable>();
-        values1.add(new BytesWritable(getBytes(strArr)));
-
-        IntWritable key2 = new IntWritable(2);
-        List<BytesWritable> values2 = new ArrayList<BytesWritable>();
-        values2.add(new BytesWritable(getBytes(strArr + " x")));
-
-        IntWritable key3 = new IntWritable(3);
-        List<BytesWritable> values3 = new ArrayList<BytesWritable>();
-        values3.add(new BytesWritable(getBytes(strArr + " xx")));
-
-        IntWritable key4 = new IntWritable(4);
-        List<BytesWritable> values4 = new ArrayList<BytesWritable>();
-        values4.add(new BytesWritable(getBytes(strArr + " xxx")));
-
-        IntWritable key5 = new IntWritable(5);
-        List<BytesWritable> values5 = new ArrayList<BytesWritable>();
-        values5.add(new BytesWritable(getBytes(strArr + " xxxx")));
-
-        reduceDriver.withInput(key1, values1);
-        reduceDriver.withInput(key2, values2);
-        reduceDriver.withInput(key3, values3);
-        reduceDriver.withInput(key4, values4);
-        reduceDriver.withInput(key5, values5);
-
-        List<Pair<IntWritable, LongWritable>> result = reduceDriver.run();
-
-        assertEquals(5, result.size());
-
-        int outputKey1 = result.get(0).getFirst().get();
-        LongWritable value1 = result.get(0).getSecond();
-        assertTrue(outputKey1 == 1);
-        assertTrue((10 == value1.get()) || (9 == value1.get()));
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/source/hive/ITHiveSourceTableLoaderTest.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/source/hive/ITHiveSourceTableLoaderTest.java b/job/src/test/java/org/apache/kylin/source/hive/ITHiveSourceTableLoaderTest.java
deleted file mode 100644
index 8218d51..0000000
--- a/job/src/test/java/org/apache/kylin/source/hive/ITHiveSourceTableLoaderTest.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.source.hive;
-
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.util.Set;
-
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-public class ITHiveSourceTableLoaderTest extends HBaseMetadataTestCase {
-
-    @Before
-    public void setup() throws Exception {
-        super.createTestMetadata();
-    }
-
-    @After
-    public void after() throws Exception {
-        super.cleanupTestMetadata();
-    }
-
-    @Test
-    public void test() throws IOException {
-        if (!useSandbox())
-            return;
-
-        KylinConfig config = getTestConfig();
-        String[] toLoad = new String[] { "DEFAULT.TEST_KYLIN_FACT", "EDW.TEST_CAL_DT" };
-        Set<String> loaded = HiveSourceTableLoader.reloadHiveTables(toLoad, config);
-
-        assertTrue(loaded.size() == toLoad.length);
-        for (String str : toLoad)
-            assertTrue(loaded.contains(str));
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/source/hive/ITHiveTableReaderTest.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/source/hive/ITHiveTableReaderTest.java b/job/src/test/java/org/apache/kylin/source/hive/ITHiveTableReaderTest.java
deleted file mode 100644
index 57c0be3..0000000
--- a/job/src/test/java/org/apache/kylin/source/hive/ITHiveTableReaderTest.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.source.hive;
-
-import java.io.IOException;
-
-import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
-import org.junit.Assert;
-import org.junit.Test;
-
-/**
- * This test case need the hive runtime; Please run it with sandbox;
- * @author shaoshi
- *
- * It is in the exclude list of default profile in pom.xml
- */
-public class ITHiveTableReaderTest extends HBaseMetadataTestCase {
-
-    @Test
-    public void test() throws IOException {
-        HiveTableReader reader = new HiveTableReader("default", "test_kylin_fact");
-        int rowNumber = 0;
-        while (reader.next()) {
-            String[] row = reader.getRow();
-            Assert.assertEquals(9, row.length);
-            //System.out.println(ArrayUtils.toString(row));
-            rowNumber++;
-        }
-
-        reader.close();
-        Assert.assertEquals(10000, rowNumber);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/source/hive/ITSnapshotManagerTest.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/source/hive/ITSnapshotManagerTest.java b/job/src/test/java/org/apache/kylin/source/hive/ITSnapshotManagerTest.java
deleted file mode 100644
index 0df632a..0000000
--- a/job/src/test/java/org/apache/kylin/source/hive/ITSnapshotManagerTest.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.source.hive;
-
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-
-import org.apache.kylin.dict.lookup.SnapshotManager;
-import org.apache.kylin.dict.lookup.SnapshotTable;
-import org.apache.kylin.metadata.MetadataManager;
-import org.apache.kylin.metadata.model.TableDesc;
-import org.apache.kylin.source.ReadableTable;
-import org.apache.kylin.source.ReadableTable.TableReader;
-import org.apache.kylin.source.SourceFactory;
-import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-/**
- * @author yangli9
- * 
- */
-public class ITSnapshotManagerTest extends HBaseMetadataTestCase {
-
-    SnapshotManager snapshotMgr;
-
-    @Before
-    public void setup() throws Exception {
-        createTestMetadata();
-        snapshotMgr = SnapshotManager.getInstance(getTestConfig());
-    }
-
-    @After
-    public void after() throws Exception {
-        cleanupTestMetadata();
-    }
-
-    @Test
-    public void basicTest() throws Exception {
-        String tableName = "EDW.TEST_SITES";
-        TableDesc tableDesc = MetadataManager.getInstance(getTestConfig()).getTableDesc(tableName);
-        ReadableTable hiveTable = SourceFactory.createReadableTable(tableDesc);
-        String snapshotPath = snapshotMgr.buildSnapshot(hiveTable, tableDesc).getResourcePath();
-
-        snapshotMgr.wipeoutCache();
-
-        SnapshotTable snapshot = snapshotMgr.getSnapshotTable(snapshotPath);
-
-        // compare hive & snapshot
-        TableReader hiveReader = hiveTable.getReader();
-        TableReader snapshotReader = snapshot.getReader();
-
-        while (true) {
-            boolean hiveNext = hiveReader.next();
-            boolean snapshotNext = snapshotReader.next();
-            assertEquals(hiveNext, snapshotNext);
-
-            if (hiveNext == false)
-                break;
-
-            String[] hiveRow = hiveReader.getRow();
-            String[] snapshotRow = snapshotReader.getRow();
-            assertArrayEquals(hiveRow, snapshotRow);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/resources/data/flat_table/000000_0
----------------------------------------------------------------------
diff --git a/job/src/test/resources/data/flat_table/000000_0 b/job/src/test/resources/data/flat_table/000000_0
deleted file mode 100644
index 8b1b7cc..0000000
Binary files a/job/src/test/resources/data/flat_table/000000_0 and /dev/null differ


[9/9] incubator-kylin git commit: KYLIN-1010 Decompose project job

Posted by li...@apache.org.
KYLIN-1010 Decompose project job


Project: http://git-wip-us.apache.org/repos/asf/incubator-kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-kylin/commit/6c59e107
Tree: http://git-wip-us.apache.org/repos/asf/incubator-kylin/tree/6c59e107
Diff: http://git-wip-us.apache.org/repos/asf/incubator-kylin/diff/6c59e107

Branch: refs/heads/KYLIN-1010
Commit: 6c59e107794668c397b3741424b63f58c580030c
Parents: 4456bb1
Author: Li, Yang <ya...@ebay.com>
Authored: Thu Sep 17 17:44:40 2015 +0800
Committer: Li, Yang <ya...@ebay.com>
Committed: Thu Sep 17 17:44:40 2015 +0800

----------------------------------------------------------------------
 assembly/pom.xml                                | 170 +++--
 .../engine/spark/BuildCubeWithSparkTest.java    | 148 +++++
 .../kylin/job/BuildCubeWithEngineTest.java      | 283 ++++++++
 .../kylin/job/BuildCubeWithStreamTest.java      | 132 ++++
 .../apache/kylin/job/BuildIIWithEngineTest.java | 250 +++++++
 .../apache/kylin/job/BuildIIWithStreamTest.java | 248 +++++++
 .../java/org/apache/kylin/job/DataGenTest.java  |  56 ++
 .../kylin/job/DeployLocalMetaToRemoteTest.java  |  71 ++
 .../java/org/apache/kylin/job/DeployUtil.java   | 261 ++++++++
 .../org/apache/kylin/job/ExportHBaseData.java   | 160 +++++
 .../job/ITKafkaBasedIIStreamBuilderTest.java    |  85 +++
 .../apache/kylin/job/dataGen/ColumnConfig.java  |  71 ++
 .../kylin/job/dataGen/FactTableGenerator.java   | 647 +++++++++++++++++++
 .../org/apache/kylin/job/dataGen/GenConfig.java |  81 +++
 .../kylin/job/hadoop/invertedindex/IITest.java  | 240 +++++++
 .../job/streaming/CubeStreamConsumerTest.java   |  90 +++
 .../streaming/PeriodicalStreamBuilderTest.java  | 144 +++++
 .../streaming/StreamingTableDataGenerator.java  |  76 +++
 .../hive/ITHiveSourceTableLoaderTest.java       |  58 ++
 .../source/hive/ITHiveTableReaderTest.java      |  49 ++
 .../source/hive/ITSnapshotManagerTest.java      |  83 +++
 build/script/prepare_libs.sh                    |   2 +-
 .../java/org/apache/kylin/job/JobInstance.java  |   1 +
 .../kylin/job/dao/ExecutableOutputPO.java       |   1 +
 .../org/apache/kylin/job/dao/ExecutablePO.java  |   1 +
 .../job/impl/threadpool/BaseSchedulerTest.java  | 101 +++
 .../impl/threadpool/DefaultSchedulerTest.java   | 151 +++++
 core-storage/pom.xml                            |   6 -
 engine-spark/pom.xml                            |   8 -
 .../apache/kylin/engine/spark/SparkCubing.java  |   5 +-
 .../engine/spark/BuildCubeWithSparkTest.java    | 148 -----
 engine-streaming/pom.xml                        |   5 -
 invertedindex/pom.xml                           |  46 +-
 .../dict/CreateInvertedIndexDictionaryJob.java  |  70 ++
 .../job/hadoop/invertedindex/IIBulkLoadJob.java |  74 +++
 .../hadoop/invertedindex/IICreateHFileJob.java  |  81 +++
 .../invertedindex/IICreateHFileMapper.java      |  55 ++
 .../hadoop/invertedindex/IICreateHTableJob.java | 148 +++++
 .../invertedindex/IIDeployCoprocessorCLI.java   | 157 +++++
 .../IIDistinctColumnsCombiner.java              |  58 ++
 .../invertedindex/IIDistinctColumnsJob.java     | 136 ++++
 .../invertedindex/IIDistinctColumnsMapper.java  |  66 ++
 .../invertedindex/IIDistinctColumnsReducer.java |  77 +++
 .../hadoop/invertedindex/InvertedIndexJob.java  | 164 +++++
 .../invertedindex/InvertedIndexMapper.java      |  90 +++
 .../invertedindex/InvertedIndexPartitioner.java |  73 +++
 .../invertedindex/InvertedIndexReducer.java     | 100 +++
 .../apache/kylin/job/invertedindex/IIJob.java   |  50 ++
 .../kylin/job/invertedindex/IIJobBuilder.java   | 230 +++++++
 .../java/org/apache/kylin/job/tools/IICLI.java  | 106 +++
 job/.gitignore                                  |   1 -
 job/.settings/org.eclipse.core.resources.prefs  |   6 -
 job/.settings/org.eclipse.jdt.core.prefs        | 379 -----------
 job/.settings/org.eclipse.jdt.ui.prefs          |   7 -
 job/pom.xml                                     | 314 ---------
 .../dict/CreateInvertedIndexDictionaryJob.java  |  70 --
 .../job/hadoop/invertedindex/IIBulkLoadJob.java |  83 ---
 .../hadoop/invertedindex/IICreateHFileJob.java  |  91 ---
 .../invertedindex/IICreateHFileMapper.java      |  55 --
 .../hadoop/invertedindex/IICreateHTableJob.java | 156 -----
 .../IIDistinctColumnsCombiner.java              |  58 --
 .../invertedindex/IIDistinctColumnsJob.java     | 136 ----
 .../invertedindex/IIDistinctColumnsMapper.java  |  66 --
 .../invertedindex/IIDistinctColumnsReducer.java |  77 ---
 .../hadoop/invertedindex/InvertedIndexJob.java  | 164 -----
 .../invertedindex/InvertedIndexMapper.java      |  90 ---
 .../invertedindex/InvertedIndexPartitioner.java |  73 ---
 .../invertedindex/InvertedIndexReducer.java     | 100 ---
 .../apache/kylin/job/invertedindex/IIJob.java   |  50 --
 .../kylin/job/invertedindex/IIJobBuilder.java   | 230 -------
 .../java/org/apache/kylin/job/tools/IICLI.java  | 106 ---
 .../kylin/job/BuildCubeWithEngineTest.java      | 283 --------
 .../kylin/job/BuildCubeWithStreamTest.java      | 132 ----
 .../apache/kylin/job/BuildIIWithEngineTest.java | 250 -------
 .../apache/kylin/job/BuildIIWithStreamTest.java | 248 -------
 .../java/org/apache/kylin/job/DataGenTest.java  |  56 --
 .../kylin/job/DeployLocalMetaToRemoteTest.java  |  71 --
 .../java/org/apache/kylin/job/DeployUtil.java   | 262 --------
 .../org/apache/kylin/job/ExportHBaseData.java   | 160 -----
 .../job/ITKafkaBasedIIStreamBuilderTest.java    |  85 ---
 .../apache/kylin/job/dataGen/ColumnConfig.java  |  71 --
 .../kylin/job/dataGen/FactTableGenerator.java   | 647 -------------------
 .../org/apache/kylin/job/dataGen/GenConfig.java |  81 ---
 .../job/dataGen/StreamingDataGenerator.java     |  83 ---
 .../kylin/job/hadoop/invertedindex/IITest.java  | 240 -------
 .../job/impl/threadpool/BaseSchedulerTest.java  | 109 ----
 .../impl/threadpool/DefaultSchedulerTest.java   | 150 -----
 .../job/streaming/CubeStreamConsumerTest.java   |  90 ---
 .../streaming/PeriodicalStreamBuilderTest.java  | 144 -----
 .../streaming/StreamingTableDataGenerator.java  |  76 ---
 .../job/tools/ColumnCardinalityReducerTest.java | 115 ----
 .../hive/ITHiveSourceTableLoaderTest.java       |  58 --
 .../source/hive/ITHiveTableReaderTest.java      |  49 --
 .../source/hive/ITSnapshotManagerTest.java      |  83 ---
 job/src/test/resources/data/flat_table/000000_0 | Bin 110778 -> 0 bytes
 .../resources/data/test_cal_dt/part-r-00000     | 366 -----------
 .../expected_result/flat_item/part-r-00000      | Bin 565 -> 0 bytes
 .../jarfile/SampleBadJavaProgram.jarfile        | Bin 1006 -> 0 bytes
 .../resources/jarfile/SampleJavaProgram.jarfile | Bin 1166 -> 0 bytes
 .../test/resources/json/dummy_jobinstance.json  | 195 ------
 pom.xml                                         |   1 -
 query/pom.xml                                   |   6 +
 server/pom.xml                                  |  15 +-
 .../java/org/apache/kylin/rest/DebugTomcat.java |   2 +-
 .../rest/security/RealAclHBaseStorage.java      |   2 +-
 .../apache/kylin/rest/service/CubeService.java  |   2 +-
 .../apache/kylin/rest/service/QueryService.java |   2 +-
 source-hive/pom.xml                             |   6 +
 .../ColumnCardinalityReducerTest.java           | 115 ++++
 source-kafka/pom.xml                            |  11 +-
 storage-hbase/pom.xml                           |   1 -
 .../kylin/storage/hbase/HBaseConnection.java    | 234 +++++++
 .../kylin/storage/hbase/HBaseResourceStore.java |   1 -
 .../storage/hbase/cube/v1/CubeStorageQuery.java |   2 +-
 .../hbase/cube/v2/CubeHBaseEndpointRPC.java     |   3 +-
 .../storage/hbase/cube/v2/CubeHBaseScanRPC.java |   2 +-
 .../hbase/ii/InvertedIndexStorageQuery.java     |   2 +-
 .../storage/hbase/steps/HBaseConnection.java    | 234 -------
 .../hbase/steps/HBaseStreamingOutput.java       |   1 +
 .../hbase/util/DeployCoprocessorCLI.java        |   6 +-
 .../hbase/util/GridTableHBaseBenchmark.java     |   2 +-
 .../kylin/storage/hbase/util/PingHBaseCLI.java  |   2 +-
 .../storage/hbase/util/ZookeeperJobLock.java    |   3 +-
 .../hbase/ii/ITInvertedIndexHBaseTest.java      |   2 +-
 .../storage/hbase/steps/HbaseImporter.java      |   1 +
 streaming/pom.xml                               |   6 +-
 .../kylin/job/streaming/CubeStreamConsumer.java |   2 +-
 .../kylin/job/streaming/StreamingBootstrap.java |   2 +-
 .../invertedindex/IIStreamConsumer.java         |   2 +-
 129 files changed, 5757 insertions(+), 6942 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/pom.xml
----------------------------------------------------------------------
diff --git a/assembly/pom.xml b/assembly/pom.xml
index bc758f7..99557fb 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -15,7 +15,7 @@
     <dependencies>
         <dependency>
             <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-engine-streaming</artifactId>
+            <artifactId>kylin-source-hive</artifactId>
             <version>${project.parent.version}</version>
         </dependency>
         <dependency>
@@ -35,17 +35,141 @@
         </dependency>
         <dependency>
             <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-source-hive</artifactId>
+            <artifactId>kylin-engine-spark</artifactId>
             <version>${project.parent.version}</version>
         </dependency>
-
-
+        <dependency>
+            <groupId>org.apache.kylin</groupId>
+            <artifactId>kylin-engine-streaming</artifactId>
+            <version>${project.parent.version}</version>
+        </dependency>
+        
         <dependency>
             <groupId>org.apache.kylin</groupId>
             <artifactId>kylin-invertedindex</artifactId>
             <version>${project.parent.version}</version>
         </dependency>
+        <dependency>
+            <groupId>org.apache.kylin</groupId>
+            <artifactId>kylin-streaming</artifactId>
+            <version>${project.parent.version}</version>
+        </dependency>
         
+        <!-- Env & Test -->
+        <dependency>
+            <groupId>org.apache.kylin</groupId>
+            <artifactId>kylin-core-common</artifactId>
+            <type>test-jar</type>
+            <scope>test</scope>
+            <version>${project.parent.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.kylin</groupId>
+            <artifactId>kylin-core-job</artifactId>
+            <type>test-jar</type>
+            <scope>test</scope>
+            <version>${project.parent.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.kylin</groupId>
+            <artifactId>kylin-storage-hbase</artifactId>
+            <type>test-jar</type>
+            <scope>test</scope>
+            <version>${project.parent.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-common</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-annotations</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-mapreduce-client-core</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-minicluster</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.mrunit</groupId>
+            <artifactId>mrunit</artifactId>
+            <classifier>hadoop2</classifier>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-common</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-hadoop2-compat</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-client</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-server</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-yarn-server-resourcemanager</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.maven</groupId>
+            <artifactId>maven-model</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-yarn-api</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-hdfs</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hive.hcatalog</groupId>
+            <artifactId>hive-hcatalog-core</artifactId>
+            <version>${hive-hcatalog.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-testing-util</artifactId>
+            <version>${hbase-hadoop2.version}</version>
+            <scope>test</scope>
+            <exclusions>
+                <exclusion>
+                    <groupId>javax.servlet</groupId>
+                    <artifactId>servlet-api</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>javax.servlet.jsp</groupId>
+                    <artifactId>jsp-api</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
     </dependencies>
 
     <build>
@@ -57,36 +181,6 @@
 
                 <executions>
                     <execution>
-                        <id>shade-streaming</id>
-                        <phase>package</phase>
-                        <goals>
-                            <goal>shade</goal>
-                        </goals>
-                        <configuration>
-                            <minimizeJar>false</minimizeJar>
-                            <shadedArtifactAttached>true</shadedArtifactAttached>
-                            <shadedClassifierName>streaming</shadedClassifierName>
-                            <filters>
-                                <filter>
-                                    <artifact>*:*</artifact>
-                                    <excludes>
-                                        <exclude>META-INF/*.SF</exclude>
-                                        <exclude>META-INF/*.DSA</exclude>
-                                        <exclude>META-INF/*.RSA</exclude>
-                                    </excludes>
-                                </filter>
-                            </filters>
-                            <artifactSet>
-                                <excludes>
-                                    <exclude>org.apache.kylin:kylin-invertedindex</exclude>
-                                    <exclude>org.apache.kylin:kylin-engine-mr</exclude>
-                                    <exclude>org.apache.kylin:kylin-source-hive</exclude>
-                                </excludes>
-                            </artifactSet>
-                        </configuration>
-                    </execution>
-                    <execution>
-                        <id>shade-mr</id>
                         <phase>package</phase>
                         <goals>
                             <goal>shade</goal>
@@ -94,7 +188,7 @@
                         <configuration>
                             <minimizeJar>false</minimizeJar>
                             <shadedArtifactAttached>true</shadedArtifactAttached>
-                            <shadedClassifierName>mr</shadedClassifierName>
+                            <shadedClassifierName>job</shadedClassifierName>
                             <filters>
                                 <filter>
                                     <artifact>*:*</artifact>
@@ -105,12 +199,6 @@
                                     </excludes>
                                 </filter>
                             </filters>
-                            <artifactSet>
-                                <excludes>
-                                    <exclude>org.apache.kylin:kylin-invertedindex</exclude>
-                                    <exclude>org.apache.kylin:kylin-engine-streaming</exclude>
-                                </excludes>
-                            </artifactSet>
                         </configuration>
                     </execution>
                 </executions>

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/src/test/java/org/apache/kylin/engine/spark/BuildCubeWithSparkTest.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/engine/spark/BuildCubeWithSparkTest.java b/assembly/src/test/java/org/apache/kylin/engine/spark/BuildCubeWithSparkTest.java
new file mode 100644
index 0000000..d24cc79
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/engine/spark/BuildCubeWithSparkTest.java
@@ -0,0 +1,148 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.engine.spark;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.List;
+import java.util.TimeZone;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.AbstractKylinTestCase;
+import org.apache.kylin.common.util.ClassUtil;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.cube.CubeUpdate;
+import org.apache.kylin.job.DeployUtil;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.execution.AbstractExecutable;
+import org.apache.kylin.job.execution.DefaultChainedExecutable;
+import org.apache.kylin.job.execution.ExecutableState;
+import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
+import org.apache.kylin.job.lock.MockJobLock;
+import org.apache.kylin.job.manager.ExecutableManager;
+import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.google.common.collect.Lists;
+
+public class BuildCubeWithSparkTest {
+
+    private CubeManager cubeManager;
+    private DefaultScheduler scheduler;
+    protected ExecutableManager jobService;
+
+    private static final Log logger = LogFactory.getLog(BuildCubeWithSparkTest.class);
+
+    protected void waitForJob(String jobId) {
+        while (true) {
+            AbstractExecutable job = jobService.getJob(jobId);
+            if (job.getStatus() == ExecutableState.SUCCEED || job.getStatus() == ExecutableState.ERROR) {
+                break;
+            } else {
+                try {
+                    Thread.sleep(5000);
+                } catch (InterruptedException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+    }
+
+    @BeforeClass
+    public static void beforeClass() throws Exception {
+        logger.info("Adding to classpath: " + new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
+        ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
+        System.setProperty(KylinConfig.KYLIN_CONF, "../examples/test_case_data/sandbox");
+        System.setProperty("hdp.version", "2.2.4.2-2"); // mapred-site.xml ref this
+    }
+
+    @Before
+    public void before() throws Exception {
+        HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA);
+
+        DeployUtil.initCliWorkDir();
+        DeployUtil.deployMetadata();
+        DeployUtil.overrideJobJarLocations();
+
+        final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+        jobService = ExecutableManager.getInstance(kylinConfig);
+        for (String jobId : jobService.getAllJobIds()) {
+            jobService.deleteJob(jobId);
+        }
+        scheduler = DefaultScheduler.getInstance();
+        scheduler.init(new JobEngineConfig(kylinConfig), new MockJobLock());
+        if (!scheduler.hasStarted()) {
+            throw new RuntimeException("scheduler has not been started");
+        }
+        cubeManager = CubeManager.getInstance(kylinConfig);
+
+    }
+
+    @After
+    public void after() {
+        HBaseMetadataTestCase.staticCleanupTestMetadata();
+    }
+
+    @Test
+    public void test() throws Exception {
+        final CubeSegment segment = createSegment();
+        String confPath = new File(AbstractKylinTestCase.SANDBOX_TEST_DATA).getAbsolutePath();
+        KylinConfig.getInstanceFromEnv().getCoprocessorLocalJar();
+        String coprocessor = KylinConfig.getInstanceFromEnv().getCoprocessorLocalJar();
+        logger.info("confPath location:" + confPath);
+        logger.info("coprocessor location:" + coprocessor);
+        final DefaultChainedExecutable cubingJob = new SparkBatchCubingEngine(confPath, coprocessor).createBatchCubingJob(segment, "BuildCubeWithSpark");
+        jobService.addJob(cubingJob);
+        waitForJob(cubingJob.getId());
+        assertEquals(ExecutableState.SUCCEED, jobService.getOutput(cubingJob.getId()).getState());
+    }
+
+    private void clearSegment(String cubeName) throws Exception {
+        CubeInstance cube = cubeManager.getCube(cubeName);
+        // remove all existing segments
+        CubeUpdate cubeBuilder = new CubeUpdate(cube);
+        cubeBuilder.setToRemoveSegs(cube.getSegments().toArray(new CubeSegment[cube.getSegments().size()]));
+        cubeManager.updateCube(cubeBuilder);
+    }
+
+    private CubeSegment createSegment() throws Exception {
+        String cubeName = "test_kylin_cube_with_slr_left_join_empty";
+        clearSegment(cubeName);
+
+        SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
+        f.setTimeZone(TimeZone.getTimeZone("GMT"));
+        long dateStart = cubeManager.getCube(cubeName).getDescriptor().getModel().getPartitionDesc().getPartitionDateStart();
+        long dateEnd = f.parse("2050-11-12").getTime();
+
+        // this cube's start date is 0, end date is 20501112000000
+        List<String> result = Lists.newArrayList();
+        return cubeManager.appendSegments(cubeManager.getCube(cubeName), dateEnd);
+
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/src/test/java/org/apache/kylin/job/BuildCubeWithEngineTest.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/BuildCubeWithEngineTest.java b/assembly/src/test/java/org/apache/kylin/job/BuildCubeWithEngineTest.java
new file mode 100644
index 0000000..d7eb3cf
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/job/BuildCubeWithEngineTest.java
@@ -0,0 +1,283 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.File;
+import java.lang.reflect.Method;
+import java.text.SimpleDateFormat;
+import java.util.List;
+import java.util.TimeZone;
+import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.AbstractKylinTestCase;
+import org.apache.kylin.common.util.ClassUtil;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.cube.CubeUpdate;
+import org.apache.kylin.engine.EngineFactory;
+import org.apache.kylin.engine.mr.CubingJob;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.execution.AbstractExecutable;
+import org.apache.kylin.job.execution.DefaultChainedExecutable;
+import org.apache.kylin.job.execution.ExecutableState;
+import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
+import org.apache.kylin.job.manager.ExecutableManager;
+import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
+import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.google.common.collect.Lists;
+
+public class BuildCubeWithEngineTest {
+
+    private CubeManager cubeManager;
+    private DefaultScheduler scheduler;
+    protected ExecutableManager jobService;
+
+    private static final Log logger = LogFactory.getLog(BuildCubeWithEngineTest.class);
+
+    protected void waitForJob(String jobId) {
+        while (true) {
+            AbstractExecutable job = jobService.getJob(jobId);
+            if (job.getStatus() == ExecutableState.SUCCEED || job.getStatus() == ExecutableState.ERROR) {
+                break;
+            } else {
+                try {
+                    Thread.sleep(5000);
+                } catch (InterruptedException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+    }
+
+    @BeforeClass
+    public static void beforeClass() throws Exception {
+        logger.info("Adding to classpath: " + new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
+        ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
+        System.setProperty(KylinConfig.KYLIN_CONF, "../examples/test_case_data/sandbox");
+        System.setProperty("hdp.version", "2.2.4.2-2"); // mapred-site.xml ref this
+    }
+
+    @Before
+    public void before() throws Exception {
+        HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA);
+
+        DeployUtil.initCliWorkDir();
+        DeployUtil.deployMetadata();
+        DeployUtil.overrideJobJarLocations();
+
+        final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+        jobService = ExecutableManager.getInstance(kylinConfig);
+        scheduler = DefaultScheduler.getInstance();
+        scheduler.init(new JobEngineConfig(kylinConfig), new ZookeeperJobLock());
+        if (!scheduler.hasStarted()) {
+            throw new RuntimeException("scheduler has not been started");
+        }
+        cubeManager = CubeManager.getInstance(kylinConfig);
+        for (String jobId : jobService.getAllJobIds()) {
+            if (jobService.getJob(jobId) instanceof CubingJob) {
+                jobService.deleteJob(jobId);
+            }
+        }
+
+    }
+
+    @After
+    public void after() {
+        HBaseMetadataTestCase.staticCleanupTestMetadata();
+    }
+
+    @Test
+    public void test() throws Exception {
+        DeployUtil.prepareTestDataForNormalCubes("test_kylin_cube_with_slr_left_join_empty");
+        testInner();
+        testLeft();
+    }
+
+    private void testInner() throws Exception {
+        String[] testCase = new String[] { "testInnerJoinCube", "testInnerJoinCube2", };
+        runTestAndAssertSucceed(testCase);
+    }
+
+    private void testLeft() throws Exception {
+        String[] testCase = new String[] { "testLeftJoinCube", "testLeftJoinCube2", };
+        runTestAndAssertSucceed(testCase);
+    }
+
+    private void runTestAndAssertSucceed(String[] testCase) throws Exception {
+        ExecutorService executorService = Executors.newFixedThreadPool(testCase.length);
+        final CountDownLatch countDownLatch = new CountDownLatch(testCase.length);
+        List<Future<List<String>>> tasks = Lists.newArrayListWithExpectedSize(testCase.length);
+        for (int i = 0; i < testCase.length; i++) {
+            tasks.add(executorService.submit(new TestCallable(testCase[i], countDownLatch)));
+        }
+        countDownLatch.await();
+        try {
+            for (int i = 0; i < tasks.size(); ++i) {
+                Future<List<String>> task = tasks.get(i);
+                final List<String> jobIds = task.get();
+                for (String jobId : jobIds) {
+                    assertJobSucceed(jobId);
+                }
+            }
+        } catch (Exception ex) {
+            logger.error(ex);
+            throw ex;
+        }
+    }
+
+    private void assertJobSucceed(String jobId) {
+        assertEquals("The job '" + jobId + "' is failed.", ExecutableState.SUCCEED, jobService.getOutput(jobId).getState());
+    }
+
+    private class TestCallable implements Callable<List<String>> {
+
+        private final String methodName;
+        private final CountDownLatch countDownLatch;
+
+        public TestCallable(String methodName, CountDownLatch countDownLatch) {
+            this.methodName = methodName;
+            this.countDownLatch = countDownLatch;
+        }
+
+        @SuppressWarnings("unchecked")
+        @Override
+        public List<String> call() throws Exception {
+            try {
+                final Method method = BuildCubeWithEngineTest.class.getDeclaredMethod(methodName);
+                method.setAccessible(true);
+                return (List<String>) method.invoke(BuildCubeWithEngineTest.this);
+            } finally {
+                countDownLatch.countDown();
+            }
+        }
+    }
+
+    @SuppressWarnings("unused")
+    // called by reflection
+    private List<String> testInnerJoinCube2() throws Exception {
+        clearSegment("test_kylin_cube_with_slr_empty");
+        SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
+        f.setTimeZone(TimeZone.getTimeZone("GMT"));
+        long date1 = 0;
+        long date2 = f.parse("2013-01-01").getTime();
+        long date3 = f.parse("2022-01-01").getTime();
+        List<String> result = Lists.newArrayList();
+        result.add(buildSegment("test_kylin_cube_with_slr_empty", date1, date2));
+        result.add(buildSegment("test_kylin_cube_with_slr_empty", date2, date3));
+        return result;
+    }
+
+    @SuppressWarnings("unused")
+    // called by reflection
+    private List<String> testInnerJoinCube() throws Exception {
+        clearSegment("test_kylin_cube_without_slr_empty");
+
+        SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
+        f.setTimeZone(TimeZone.getTimeZone("GMT"));
+
+        // this cube's start date is 0, end date is 20501112000000
+        long date1 = 0;
+        long date2 = f.parse("2050-01-11").getTime();
+
+        // this cube doesn't support incremental build, always do full build
+
+        List<String> result = Lists.newArrayList();
+        result.add(buildSegment("test_kylin_cube_without_slr_empty", date1, date2));
+        return result;
+    }
+
+    @SuppressWarnings("unused")
+    // called by reflection
+    private List<String> testLeftJoinCube2() throws Exception {
+        SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
+        f.setTimeZone(TimeZone.getTimeZone("GMT"));
+        List<String> result = Lists.newArrayList();
+        final String cubeName = "test_kylin_cube_without_slr_left_join_empty";
+        // this cube's start date is 0, end date is 20120601000000
+        long dateStart = cubeManager.getCube(cubeName).getDescriptor().getModel().getPartitionDesc().getPartitionDateStart();
+        long dateEnd = f.parse("2012-06-01").getTime();
+
+        clearSegment(cubeName);
+        result.add(buildSegment(cubeName, dateStart, dateEnd));
+
+        // then submit an append job, start date is 20120601000000, end
+        // date is 20220101000000
+        dateStart = f.parse("2012-06-01").getTime();
+        dateEnd = f.parse("2022-01-01").getTime();
+        result.add(buildSegment(cubeName, dateStart, dateEnd));
+
+        // build an empty segment which doesn't have data
+        dateStart = f.parse("2022-01-01").getTime();
+        dateEnd = f.parse("2023-01-01").getTime();
+        result.add(buildSegment(cubeName, dateStart, dateEnd));
+
+        return result;
+
+    }
+
+    @SuppressWarnings("unused")
+    // called by reflection
+    private List<String> testLeftJoinCube() throws Exception {
+        String cubeName = "test_kylin_cube_with_slr_left_join_empty";
+        clearSegment(cubeName);
+
+        SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
+        f.setTimeZone(TimeZone.getTimeZone("GMT"));
+        long dateStart = cubeManager.getCube(cubeName).getDescriptor().getModel().getPartitionDesc().getPartitionDateStart();
+        long dateEnd = f.parse("2050-11-12").getTime();
+
+        // this cube's start date is 0, end date is 20501112000000
+        List<String> result = Lists.newArrayList();
+        result.add(buildSegment(cubeName, dateStart, dateEnd));
+        return result;
+
+    }
+
+    private void clearSegment(String cubeName) throws Exception {
+        CubeInstance cube = cubeManager.getCube(cubeName);
+        // remove all existing segments
+        CubeUpdate cubeBuilder = new CubeUpdate(cube);
+        cubeBuilder.setToRemoveSegs(cube.getSegments().toArray(new CubeSegment[cube.getSegments().size()]));
+        cubeManager.updateCube(cubeBuilder);
+    }
+
+    private String buildSegment(String cubeName, long startDate, long endDate) throws Exception {
+        CubeSegment segment = cubeManager.appendSegments(cubeManager.getCube(cubeName), endDate);
+        DefaultChainedExecutable job = EngineFactory.createBatchCubingJob(segment, "TEST");
+        jobService.addJob(job);
+        waitForJob(job.getId());
+        return job.getId();
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/src/test/java/org/apache/kylin/job/BuildCubeWithStreamTest.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/BuildCubeWithStreamTest.java b/assembly/src/test/java/org/apache/kylin/job/BuildCubeWithStreamTest.java
new file mode 100644
index 0000000..b02b2f2
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/job/BuildCubeWithStreamTest.java
@@ -0,0 +1,132 @@
+/*
+ *
+ *
+ *  Licensed to the Apache Software Foundation (ASF) under one or more
+ *
+ *  contributor license agreements. See the NOTICE file distributed with
+ *
+ *  this work for additional information regarding copyright ownership.
+ *
+ *  The ASF licenses this file to You under the Apache License, Version 2.0
+ *
+ *  (the "License"); you may not use this file except in compliance with
+ *
+ *  the License. You may obtain a copy of the License at
+ *
+ *
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *
+ *  See the License for the specific language governing permissions and
+ *
+ *  limitations under the License.
+ *
+ * /
+ */
+
+package org.apache.kylin.job;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.UUID;
+
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.AbstractKylinTestCase;
+import org.apache.kylin.common.util.ClassUtil;
+import org.apache.kylin.common.util.DateFormat;
+import org.apache.kylin.job.streaming.BootstrapConfig;
+import org.apache.kylin.job.streaming.StreamingBootstrap;
+import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
+import org.apache.kylin.storage.hbase.util.StorageCleanupJob;
+import org.apache.kylin.streaming.StreamingConfig;
+import org.apache.kylin.streaming.StreamingManager;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ *  for streaming cubing case "test_streaming_table"
+ */
+public class BuildCubeWithStreamTest {
+
+    private static final Logger logger = LoggerFactory.getLogger(BuildCubeWithStreamTest.class);
+    private static final String streamingName = "test_streaming_table_cube";
+    private static final long startTime = DateFormat.stringToMillis("2015-01-01 00:00:00");
+    private static final long endTime = DateFormat.stringToMillis("2015-01-03 00:00:00");
+    private static final long batchInterval = 16 * 60 * 60 * 1000;//16 hours
+
+    private KylinConfig kylinConfig;
+
+    @BeforeClass
+    public static void beforeClass() throws Exception {
+        logger.info("Adding to classpath: " + new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
+        ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
+        System.setProperty(KylinConfig.KYLIN_CONF, "../examples/test_case_data/sandbox");
+        System.setProperty("hdp.version", "2.2.4.2-2"); // mapred-site.xml ref this
+    }
+
+    @Before
+    public void before() throws Exception {
+        HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA);
+        DeployUtil.overrideJobJarLocations();
+
+        kylinConfig = KylinConfig.getInstanceFromEnv();
+
+        //Use a random toplic for kafka data stream
+        StreamingConfig streamingConfig = StreamingManager.getInstance(kylinConfig).getStreamingConfig(streamingName);
+        streamingConfig.setTopic(UUID.randomUUID().toString());
+        StreamingManager.getInstance(kylinConfig).saveStreamingConfig(streamingConfig);
+
+        DeployUtil.prepareTestDataForStreamingCube(startTime, endTime, streamingConfig);
+    }
+
+    @AfterClass
+    public static void afterClass() throws Exception {
+        backup();
+        HBaseMetadataTestCase.staticCleanupTestMetadata();
+    }
+
+    private static int cleanupOldStorage() throws Exception {
+        String[] args = { "--delete", "true" };
+        int exitCode = ToolRunner.run(new StorageCleanupJob(), args);
+        return exitCode;
+    }
+
+    private static void backup() throws Exception {
+        int exitCode = cleanupOldStorage();
+        if (exitCode == 0) {
+            exportHBaseData();
+        }
+    }
+
+    private static void exportHBaseData() throws IOException {
+        ExportHBaseData export = new ExportHBaseData();
+        export.exportTables();
+        export.tearDown();
+    }
+
+    @Test
+    public void test() throws Exception {
+        for (long start = startTime; start < endTime; start += batchInterval) {
+            BootstrapConfig bootstrapConfig = new BootstrapConfig();
+            bootstrapConfig.setStart(start);
+            bootstrapConfig.setEnd(start + batchInterval);
+            bootstrapConfig.setOneOff(true);
+            bootstrapConfig.setPartitionId(0);
+            bootstrapConfig.setStreaming(streamingName);
+            StreamingBootstrap.getInstance(KylinConfig.getInstanceFromEnv()).start(bootstrapConfig);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/src/test/java/org/apache/kylin/job/BuildIIWithEngineTest.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/BuildIIWithEngineTest.java b/assembly/src/test/java/org/apache/kylin/job/BuildIIWithEngineTest.java
new file mode 100644
index 0000000..fecb106
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/job/BuildIIWithEngineTest.java
@@ -0,0 +1,250 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.File;
+import java.lang.reflect.Method;
+import java.text.SimpleDateFormat;
+import java.util.List;
+import java.util.TimeZone;
+import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.AbstractKylinTestCase;
+import org.apache.kylin.common.util.ClassUtil;
+import org.apache.kylin.invertedindex.IIInstance;
+import org.apache.kylin.invertedindex.IIManager;
+import org.apache.kylin.invertedindex.IISegment;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.execution.AbstractExecutable;
+import org.apache.kylin.job.execution.ExecutableState;
+import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
+import org.apache.kylin.job.invertedindex.IIJob;
+import org.apache.kylin.job.invertedindex.IIJobBuilder;
+import org.apache.kylin.job.manager.ExecutableManager;
+import org.apache.kylin.metadata.realization.RealizationStatusEnum;
+import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
+import org.apache.kylin.storage.hbase.util.StorageCleanupJob;
+import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import com.google.common.collect.Lists;
+
+/**
+ * @author shaoshi
+ */
+public class BuildIIWithEngineTest {
+
+    private JobEngineConfig jobEngineConfig;
+    private IIManager iiManager;
+
+    private DefaultScheduler scheduler;
+    protected ExecutableManager jobService;
+
+    protected static final String[] TEST_II_INSTANCES = new String[] { "test_kylin_ii_inner_join", "test_kylin_ii_left_join" };
+
+    private static final Log logger = LogFactory.getLog(BuildIIWithEngineTest.class);
+
+    protected void waitForJob(String jobId) {
+        while (true) {
+            AbstractExecutable job = jobService.getJob(jobId);
+            if (job.getStatus() == ExecutableState.SUCCEED || job.getStatus() == ExecutableState.ERROR) {
+                break;
+            } else {
+                try {
+                    Thread.sleep(5000);
+                } catch (InterruptedException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+    }
+
+    @BeforeClass
+    public static void beforeClass() throws Exception {
+        logger.info("Adding to classpath: " + new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
+        ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
+        System.setProperty("hdp.version", "2.2.4.2-2"); // mapred-site.xml ref this
+    }
+
+    @Before
+    public void before() throws Exception {
+        HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA);
+
+        //DeployUtil.initCliWorkDir();
+        //        DeployUtil.deployMetadata();
+        DeployUtil.overrideJobJarLocations();
+
+        final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+        jobService = ExecutableManager.getInstance(kylinConfig);
+        scheduler = DefaultScheduler.getInstance();
+        scheduler.init(new JobEngineConfig(kylinConfig), new ZookeeperJobLock());
+        if (!scheduler.hasStarted()) {
+            throw new RuntimeException("scheduler has not been started");
+        }
+        jobEngineConfig = new JobEngineConfig(kylinConfig);
+        for (String jobId : jobService.getAllJobIds()) {
+            if (jobService.getJob(jobId) instanceof IIJob) {
+                jobService.deleteJob(jobId);
+            }
+        }
+
+        iiManager = IIManager.getInstance(kylinConfig);
+        for (String iiInstance : TEST_II_INSTANCES) {
+
+            IIInstance ii = iiManager.getII(iiInstance);
+            if (ii.getStatus() != RealizationStatusEnum.DISABLED) {
+                ii.setStatus(RealizationStatusEnum.DISABLED);
+                iiManager.updateII(ii);
+            }
+        }
+    }
+
+    @After
+    public void after() throws Exception {
+
+        for (String iiInstance : TEST_II_INSTANCES) {
+            IIInstance ii = iiManager.getII(iiInstance);
+            if (ii.getStatus() != RealizationStatusEnum.READY) {
+                ii.setStatus(RealizationStatusEnum.READY);
+                iiManager.updateII(ii);
+            }
+        }
+    }
+
+    @Test
+    @Ignore
+    public void testBuildII() throws Exception {
+
+        String[] testCase = new String[] { "buildIIInnerJoin", "buildIILeftJoin" };
+        ExecutorService executorService = Executors.newFixedThreadPool(testCase.length);
+        final CountDownLatch countDownLatch = new CountDownLatch(testCase.length);
+        List<Future<List<String>>> tasks = Lists.newArrayListWithExpectedSize(testCase.length);
+        for (int i = 0; i < testCase.length; i++) {
+            tasks.add(executorService.submit(new TestCallable(testCase[i], countDownLatch)));
+        }
+        countDownLatch.await();
+        for (int i = 0; i < tasks.size(); ++i) {
+            Future<List<String>> task = tasks.get(i);
+            final List<String> jobIds = task.get();
+            for (String jobId : jobIds) {
+                assertJobSucceed(jobId);
+            }
+        }
+
+    }
+
+    private void assertJobSucceed(String jobId) {
+        assertEquals(ExecutableState.SUCCEED, jobService.getOutput(jobId).getState());
+    }
+
+    private class TestCallable implements Callable<List<String>> {
+
+        private final String methodName;
+        private final CountDownLatch countDownLatch;
+
+        public TestCallable(String methodName, CountDownLatch countDownLatch) {
+            this.methodName = methodName;
+            this.countDownLatch = countDownLatch;
+        }
+
+        @SuppressWarnings("unchecked")
+        @Override
+        public List<String> call() throws Exception {
+            try {
+                final Method method = BuildIIWithEngineTest.class.getDeclaredMethod(methodName);
+                method.setAccessible(true);
+                return (List<String>) method.invoke(BuildIIWithEngineTest.this);
+            } finally {
+                countDownLatch.countDown();
+            }
+        }
+    }
+
+    protected List<String> buildIIInnerJoin() throws Exception {
+        return buildII(TEST_II_INSTANCES[0]);
+    }
+
+    protected List<String> buildIILeftJoin() throws Exception {
+        return buildII(TEST_II_INSTANCES[1]);
+    }
+
+    protected List<String> buildII(String iiName) throws Exception {
+        clearSegment(iiName);
+
+        SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
+        f.setTimeZone(TimeZone.getTimeZone("GMT"));
+
+        long date1 = 0;
+        long date2 = f.parse("2015-01-01").getTime();
+
+        List<String> result = Lists.newArrayList();
+        result.add(buildSegment(iiName, date1, date2));
+        return result;
+    }
+
+    private void clearSegment(String iiName) throws Exception {
+        IIInstance ii = iiManager.getII(iiName);
+        ii.getSegments().clear();
+        iiManager.updateII(ii);
+    }
+
+    private String buildSegment(String iiName, long startDate, long endDate) throws Exception {
+        IIInstance iiInstance = iiManager.getII(iiName);
+        IISegment segment = iiManager.buildSegment(iiInstance, startDate, endDate);
+        iiInstance.getSegments().add(segment);
+        iiManager.updateII(iiInstance);
+        IIJobBuilder iiJobBuilder = new IIJobBuilder(jobEngineConfig);
+        IIJob job = iiJobBuilder.buildJob(segment, "TEST");
+        jobService.addJob(job);
+        waitForJob(job.getId());
+        return job.getId();
+    }
+
+    private int cleanupOldStorage() throws Exception {
+        String[] args = { "--delete", "true" };
+
+        int exitCode = ToolRunner.run(new StorageCleanupJob(), args);
+        return exitCode;
+    }
+
+    public static void main(String[] args) throws Exception {
+        BuildIIWithEngineTest instance = new BuildIIWithEngineTest();
+
+        BuildIIWithEngineTest.beforeClass();
+        instance.before();
+        instance.testBuildII();
+        instance.after();
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/src/test/java/org/apache/kylin/job/BuildIIWithStreamTest.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/BuildIIWithStreamTest.java b/assembly/src/test/java/org/apache/kylin/job/BuildIIWithStreamTest.java
new file mode 100644
index 0000000..5ca3b29
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/job/BuildIIWithStreamTest.java
@@ -0,0 +1,248 @@
+/*
+ *
+ *
+ *  Licensed to the Apache Software Foundation (ASF) under one or more
+ *
+ *  contributor license agreements. See the NOTICE file distributed with
+ *
+ *  this work for additional information regarding copyright ownership.
+ *
+ *  The ASF licenses this file to You under the Apache License, Version 2.0
+ *
+ *  (the "License"); you may not use this file except in compliance with
+ *
+ *  the License. You may obtain a copy of the License at
+ *
+ *
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *
+ *  See the License for the specific language governing permissions and
+ *
+ *  limitations under the License.
+ *
+ * /
+ */
+
+package org.apache.kylin.job;
+
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import java.util.TimeZone;
+import java.util.UUID;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.LinkedBlockingDeque;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.AbstractKylinTestCase;
+import org.apache.kylin.common.util.ClassUtil;
+import org.apache.kylin.common.util.DateFormat;
+import org.apache.kylin.engine.mr.JobBuilderSupport;
+import org.apache.kylin.invertedindex.IIInstance;
+import org.apache.kylin.invertedindex.IIManager;
+import org.apache.kylin.invertedindex.IISegment;
+import org.apache.kylin.invertedindex.model.IIDesc;
+import org.apache.kylin.invertedindex.model.IIJoinedFlatTableDesc;
+import org.apache.kylin.job.common.ShellExecutable;
+import org.apache.kylin.job.constant.ExecutableConstants;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.hadoop.invertedindex.IICreateHTableJob;
+import org.apache.kylin.metadata.model.TblColRef;
+import org.apache.kylin.metadata.realization.RealizationStatusEnum;
+import org.apache.kylin.source.hive.HiveTableReader;
+import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
+import org.apache.kylin.streaming.StreamBuilder;
+import org.apache.kylin.streaming.StreamMessage;
+import org.apache.kylin.streaming.invertedindex.IIStreamConsumer;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Lists;
+
+/**
+ */
+public class BuildIIWithStreamTest {
+
+    private static final Logger logger = LoggerFactory.getLogger(BuildIIWithStreamTest.class);
+
+    private static final String[] II_NAME = new String[] { "test_kylin_ii_left_join", "test_kylin_ii_inner_join" };
+    private IIManager iiManager;
+    private KylinConfig kylinConfig;
+
+    @BeforeClass
+    public static void beforeClass() throws Exception {
+        logger.info("Adding to classpath: " + new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
+        ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
+        System.setProperty("hdp.version", "2.2.4.2-2"); // mapred-site.xml ref this
+    }
+
+    @Before
+    public void before() throws Exception {
+        HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA);
+        DeployUtil.overrideJobJarLocations();
+
+        kylinConfig = KylinConfig.getInstanceFromEnv();
+        iiManager = IIManager.getInstance(kylinConfig);
+        iiManager = IIManager.getInstance(kylinConfig);
+        for (String iiInstance : II_NAME) {
+
+            IIInstance ii = iiManager.getII(iiInstance);
+            if (ii.getStatus() != RealizationStatusEnum.DISABLED) {
+                ii.setStatus(RealizationStatusEnum.DISABLED);
+                iiManager.updateII(ii);
+            }
+        }
+    }
+
+    private String createIntermediateTable(IIDesc desc, KylinConfig kylinConfig) throws IOException {
+        IIJoinedFlatTableDesc intermediateTableDesc = new IIJoinedFlatTableDesc(desc);
+        JobEngineConfig jobEngineConfig = new JobEngineConfig(kylinConfig);
+        final String uuid = UUID.randomUUID().toString();
+        final String useDatabaseHql = "USE " + kylinConfig.getHiveDatabaseForIntermediateTable() + ";";
+        final String dropTableHql = JoinedFlatTable.generateDropTableStatement(intermediateTableDesc);
+        final String createTableHql = JoinedFlatTable.generateCreateTableStatement(intermediateTableDesc, JobBuilderSupport.getJobWorkingDir(jobEngineConfig, uuid));
+        String insertDataHqls;
+        try {
+            insertDataHqls = JoinedFlatTable.generateInsertDataStatement(intermediateTableDesc, jobEngineConfig);
+        } catch (IOException e1) {
+            e1.printStackTrace();
+            throw new RuntimeException("Failed to generate insert data SQL for intermediate table.");
+        }
+
+        ShellExecutable step = new ShellExecutable();
+        StringBuffer buf = new StringBuffer();
+        buf.append("hive -e \"");
+        buf.append(useDatabaseHql + "\n");
+        buf.append(dropTableHql + "\n");
+        buf.append(createTableHql + "\n");
+        buf.append(insertDataHqls + "\n");
+        buf.append("\"");
+
+        step.setCmd(buf.toString());
+        logger.info(step.getCmd());
+        step.setName(ExecutableConstants.STEP_NAME_CREATE_FLAT_HIVE_TABLE);
+        kylinConfig.getCliCommandExecutor().execute(step.getCmd(), null);
+        return intermediateTableDesc.getTableName();
+    }
+
+    private void clearSegment(String iiName) throws Exception {
+        IIInstance ii = iiManager.getII(iiName);
+        ii.getSegments().clear();
+        iiManager.updateII(ii);
+    }
+
+    private IISegment createSegment(String iiName) throws Exception {
+        clearSegment(iiName);
+        SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
+        f.setTimeZone(TimeZone.getTimeZone("GMT"));
+
+        long date1 = 0;
+        long date2 = f.parse("2015-01-01").getTime();
+        return buildSegment(iiName, date1, date2);
+    }
+
+    private IISegment buildSegment(String iiName, long startDate, long endDate) throws Exception {
+        IIInstance iiInstance = iiManager.getII(iiName);
+        IISegment segment = iiManager.buildSegment(iiInstance, startDate, endDate);
+        iiInstance.getSegments().add(segment);
+        iiManager.updateII(iiInstance);
+        return segment;
+    }
+
+    private void buildII(String iiName) throws Exception {
+        final IIDesc desc = iiManager.getII(iiName).getDescriptor();
+        final String tableName = createIntermediateTable(desc, kylinConfig);
+        logger.info("intermediate table name:" + tableName);
+
+        HiveTableReader reader = new HiveTableReader("default", tableName);
+        final List<TblColRef> tblColRefs = desc.listAllColumns();
+        for (TblColRef tblColRef : tblColRefs) {
+            if (desc.isMetricsCol(tblColRef)) {
+                logger.info("matrix:" + tblColRef.getName());
+            } else {
+                logger.info("measure:" + tblColRef.getName());
+            }
+        }
+        LinkedBlockingDeque<StreamMessage> queue = new LinkedBlockingDeque<StreamMessage>();
+        final IISegment segment = createSegment(iiName);
+        String[] args = new String[] { "-iiname", iiName, "-htablename", segment.getStorageLocationIdentifier() };
+        ToolRunner.run(new IICreateHTableJob(), args);
+
+        ExecutorService executorService = Executors.newSingleThreadExecutor();
+        final StreamBuilder streamBuilder = StreamBuilder.newLimitedSizeStreamBuilder(iiName, queue, new IIStreamConsumer(iiName, segment.getStorageLocationIdentifier(), segment.getIIDesc(), 0), 0, segment.getIIDesc().getSliceSize());
+
+        List<String[]> sorted = getSortedRows(reader, desc.getTimestampColumn());
+        int count = sorted.size();
+        for (String[] row : sorted) {
+            logger.info("another row: " + StringUtils.join(row, ","));
+            queue.put(parse(row));
+        }
+
+        reader.close();
+        logger.info("total record count:" + count + " htable:" + segment.getStorageLocationIdentifier());
+        queue.put(StreamMessage.EOF);
+        final Future<?> future = executorService.submit(streamBuilder);
+        try {
+            future.get();
+        } catch (Exception e) {
+            logger.error("stream build failed", e);
+            fail("stream build failed");
+        }
+
+        logger.info("stream build finished, htable name:" + segment.getStorageLocationIdentifier());
+    }
+
+    @Test
+    public void test() throws Exception {
+        for (String iiName : II_NAME) {
+            buildII(iiName);
+            IIInstance ii = iiManager.getII(iiName);
+            if (ii.getStatus() != RealizationStatusEnum.READY) {
+                ii.setStatus(RealizationStatusEnum.READY);
+                iiManager.updateII(ii);
+            }
+        }
+    }
+
+    private StreamMessage parse(String[] row) {
+        return new StreamMessage(System.currentTimeMillis(), StringUtils.join(row, ",").getBytes());
+    }
+
+    private List<String[]> getSortedRows(HiveTableReader reader, final int tsCol) throws IOException {
+        List<String[]> unsorted = Lists.newArrayList();
+        while (reader.next()) {
+            unsorted.add(reader.getRow());
+        }
+        Collections.sort(unsorted, new Comparator<String[]>() {
+            @Override
+            public int compare(String[] o1, String[] o2) {
+                long t1 = DateFormat.stringToMillis(o1[tsCol]);
+                long t2 = DateFormat.stringToMillis(o2[tsCol]);
+                return Long.compare(t1, t2);
+            }
+        });
+        return unsorted;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/src/test/java/org/apache/kylin/job/DataGenTest.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/DataGenTest.java b/assembly/src/test/java/org/apache/kylin/job/DataGenTest.java
new file mode 100644
index 0000000..5c01305
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/job/DataGenTest.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job;
+
+import static org.junit.Assert.assertTrue;
+
+import org.apache.kylin.common.util.LocalFileMetadataTestCase;
+import org.apache.kylin.job.dataGen.FactTableGenerator;
+import org.apache.kylin.metadata.MetadataManager;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ *
+ */
+public class DataGenTest extends LocalFileMetadataTestCase {
+
+    @Before
+    public void before() throws Exception {
+        this.createTestMetadata();
+        MetadataManager.clearCache();
+    }
+
+    @After
+    public void after() throws Exception {
+        this.cleanupTestMetadata();
+    }
+
+    @Test
+    public void testBasics() throws Exception {
+        String content = FactTableGenerator.generate("test_kylin_cube_with_slr_ready", "10000", "1", null);// default  settings
+        System.out.println(content);
+        assertTrue(content.contains("FP-non GTC"));
+        assertTrue(content.contains("ABIN"));
+
+        DeployUtil.overrideFactTableData(content, "default.test_kylin_fact");
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/src/test/java/org/apache/kylin/job/DeployLocalMetaToRemoteTest.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/DeployLocalMetaToRemoteTest.java b/assembly/src/test/java/org/apache/kylin/job/DeployLocalMetaToRemoteTest.java
new file mode 100644
index 0000000..7f12069
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/job/DeployLocalMetaToRemoteTest.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job;
+
+import java.io.File;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.AbstractKylinTestCase;
+import org.apache.kylin.common.util.ClassUtil;
+import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+
+/**
+ * This test case is ONLY for dev use, it deploys local meta to sandbox
+ */
+@Ignore("dev use only")
+public class DeployLocalMetaToRemoteTest {
+
+    private static final Log logger = LogFactory.getLog(DeployLocalMetaToRemoteTest.class);
+
+    @BeforeClass
+    public static void beforeClass() throws Exception {
+        logger.info("Adding to classpath: " + new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
+        ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
+        System.setProperty(KylinConfig.KYLIN_CONF, "../examples/test_case_data/sandbox");
+        System.setProperty("hdp.version", "2.2.4.2-2"); // mapred-site.xml ref this
+    }
+
+    @Before
+    public void before() throws Exception {
+        HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA);
+
+        DeployUtil.initCliWorkDir();
+        DeployUtil.deployMetadata();
+        DeployUtil.overrideJobJarLocations();
+
+    }
+
+    @After
+    public void after() {
+        HBaseMetadataTestCase.staticCleanupTestMetadata();
+    }
+
+    @Test
+    public void test() throws Exception {
+        System.out.println("blank");
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java b/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java
new file mode 100644
index 0000000..045f608
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java
@@ -0,0 +1,261 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job;
+
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.List;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.persistence.ResourceStore;
+import org.apache.kylin.common.persistence.ResourceTool;
+import org.apache.kylin.common.util.AbstractKylinTestCase;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.cube.CubeUpdate;
+import org.apache.kylin.job.dataGen.FactTableGenerator;
+import org.apache.kylin.job.streaming.KafkaDataLoader;
+import org.apache.kylin.job.streaming.StreamingTableDataGenerator;
+import org.apache.kylin.metadata.MetadataManager;
+import org.apache.kylin.metadata.model.ColumnDesc;
+import org.apache.kylin.metadata.model.TableDesc;
+import org.apache.kylin.metadata.model.TblColRef;
+import org.apache.kylin.source.hive.HiveClient;
+import org.apache.kylin.streaming.StreamMessage;
+import org.apache.kylin.streaming.StreamingConfig;
+import org.apache.kylin.streaming.TimedJsonStreamParser;
+import org.apache.maven.model.Model;
+import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Lists;
+
+public class DeployUtil {
+    private static final Logger logger = LoggerFactory.getLogger(DeployUtil.class);
+
+    public static void initCliWorkDir() throws IOException {
+        execCliCommand("rm -rf " + getHadoopCliWorkingDir());
+        execCliCommand("mkdir -p " + config().getKylinJobLogDir());
+    }
+
+    public static void deployMetadata() throws IOException {
+        // install metadata to hbase
+        ResourceTool.reset(config());
+        ResourceTool.copy(KylinConfig.createInstanceFromUri(AbstractKylinTestCase.LOCALMETA_TEST_DATA), config());
+
+        // update cube desc signature.
+        for (CubeInstance cube : CubeManager.getInstance(config()).listAllCubes()) {
+            cube.getDescriptor().setSignature(cube.getDescriptor().calculateSignature());
+            CubeUpdate cubeBuilder = new CubeUpdate(cube);
+            CubeManager.getInstance(config()).updateCube(cubeBuilder);
+        }
+    }
+
+    public static void overrideJobJarLocations() {
+        File jobJar = getJobJarFile();
+        File coprocessorJar = getCoprocessorJarFile();
+
+        config().overrideMRJobJarPath(jobJar.getAbsolutePath());
+        config().overrideCoprocessorLocalJar(coprocessorJar.getAbsolutePath());
+        config().overrideSparkJobJarPath(getSparkJobJarFile().getAbsolutePath());
+    }
+
+    private static String getPomVersion() {
+        try {
+            MavenXpp3Reader pomReader = new MavenXpp3Reader();
+            Model model = pomReader.read(new FileReader("../pom.xml"));
+            return model.getVersion();
+        } catch (Exception e) {
+            throw new RuntimeException(e.getMessage(), e);
+        }
+    }
+
+    private static File getJobJarFile() {
+        return new File("../assembly/target", "kylin-job-" + getPomVersion() + "-job.jar");
+    }
+
+    private static File getCoprocessorJarFile() {
+        return new File("../storage-hbase/target", "kylin-storage-hbase-" + getPomVersion() + "-coprocessor.jar");
+    }
+
+    private static File getSparkJobJarFile() {
+        return new File("../engine-spark/target", "kylin-engine-spark-" + getPomVersion() + "-job.jar");
+    }
+
+    private static void execCliCommand(String cmd) throws IOException {
+        config().getCliCommandExecutor().execute(cmd);
+    }
+
+    private static String getHadoopCliWorkingDir() {
+        return config().getCliWorkingDir();
+    }
+
+    private static KylinConfig config() {
+        return KylinConfig.getInstanceFromEnv();
+    }
+
+    // ============================================================================
+
+    static final String TABLE_CAL_DT = "edw.test_cal_dt";
+    static final String TABLE_CATEGORY_GROUPINGS = "default.test_category_groupings";
+    static final String TABLE_KYLIN_FACT = "default.test_kylin_fact";
+    static final String TABLE_SELLER_TYPE_DIM = "edw.test_seller_type_dim";
+    static final String TABLE_SITES = "edw.test_sites";
+
+    static final String[] TABLE_NAMES = new String[] { TABLE_CAL_DT, TABLE_CATEGORY_GROUPINGS, TABLE_KYLIN_FACT, TABLE_SELLER_TYPE_DIM, TABLE_SITES };
+
+    public static void prepareTestDataForNormalCubes(String cubeName) throws Exception {
+
+        String factTableName = TABLE_KYLIN_FACT.toUpperCase();
+        String content = null;
+
+        boolean buildCubeUsingProvidedData = Boolean.parseBoolean(System.getProperty("buildCubeUsingProvidedData"));
+        if (!buildCubeUsingProvidedData) {
+            System.out.println("build cube with random dataset");
+            // data is generated according to cube descriptor and saved in resource store
+            content = FactTableGenerator.generate(cubeName, "10000", "0.6", null);
+            assert content != null;
+            overrideFactTableData(content, factTableName);
+        } else {
+            System.out.println("build normal cubes with provided dataset");
+        }
+
+        deployHiveTables();
+    }
+
+    public static void prepareTestDataForStreamingCube(long startTime, long endTime, StreamingConfig streamingConfig) throws IOException {
+        CubeInstance cubeInstance = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube(streamingConfig.getCubeName());
+        List<String> data = StreamingTableDataGenerator.generate(10000, startTime, endTime, cubeInstance.getFactTable());
+        TableDesc tableDesc = cubeInstance.getFactTableDesc();
+
+        //load into kafka
+        KafkaDataLoader.loadIntoKafka(streamingConfig, data);
+        logger.info("Write {} messages into topic {}", data.size(), streamingConfig.getTopic());
+
+        //csv data for H2 use
+        List<TblColRef> tableColumns = Lists.newArrayList();
+        for (ColumnDesc columnDesc : tableDesc.getColumns()) {
+            tableColumns.add(new TblColRef(columnDesc));
+        }
+        TimedJsonStreamParser timedJsonStreamParser = new TimedJsonStreamParser(tableColumns, "formatTs=true");
+        StringBuilder sb = new StringBuilder();
+        for (String json : data) {
+            List<String> rowColumns = timedJsonStreamParser.parse(new StreamMessage(0, json.getBytes())).getStreamMessage();
+            sb.append(StringUtils.join(rowColumns, ","));
+            sb.append(System.getProperty("line.separator"));
+        }
+        overrideFactTableData(sb.toString(), cubeInstance.getFactTable());
+    }
+
+    public static void overrideFactTableData(String factTableContent, String factTableName) throws IOException {
+        // Write to resource store
+        ResourceStore store = ResourceStore.getStore(config());
+
+        InputStream in = new ByteArrayInputStream(factTableContent.getBytes("UTF-8"));
+        String factTablePath = "/data/" + factTableName + ".csv";
+        store.deleteResource(factTablePath);
+        store.putResource(factTablePath, in, System.currentTimeMillis());
+        in.close();
+    }
+
+    private static void deployHiveTables() throws Exception {
+
+        MetadataManager metaMgr = MetadataManager.getInstance(config());
+
+        // scp data files, use the data from hbase, instead of local files
+        File temp = File.createTempFile("temp", ".csv");
+        temp.createNewFile();
+        for (String tablename : TABLE_NAMES) {
+            tablename = tablename.toUpperCase();
+
+            File localBufferFile = new File(temp.getParent() + "/" + tablename + ".csv");
+            localBufferFile.createNewFile();
+
+            InputStream hbaseDataStream = metaMgr.getStore().getResource("/data/" + tablename + ".csv");
+            FileOutputStream localFileStream = new FileOutputStream(localBufferFile);
+            IOUtils.copy(hbaseDataStream, localFileStream);
+
+            hbaseDataStream.close();
+            localFileStream.close();
+
+            localBufferFile.deleteOnExit();
+        }
+        String tableFileDir = temp.getParent();
+        temp.delete();
+
+        HiveClient hiveClient = new HiveClient();
+
+        // create hive tables
+        hiveClient.executeHQL("CREATE DATABASE IF NOT EXISTS EDW");
+        hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_CAL_DT.toUpperCase())));
+        hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_CATEGORY_GROUPINGS.toUpperCase())));
+        hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_KYLIN_FACT.toUpperCase())));
+        hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_SELLER_TYPE_DIM.toUpperCase())));
+        hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_SITES.toUpperCase())));
+
+        // load data to hive tables
+        // LOAD DATA LOCAL INPATH 'filepath' [OVERWRITE] INTO TABLE tablename
+        hiveClient.executeHQL(generateLoadDataHql(TABLE_CAL_DT, tableFileDir));
+        hiveClient.executeHQL(generateLoadDataHql(TABLE_CATEGORY_GROUPINGS, tableFileDir));
+        hiveClient.executeHQL(generateLoadDataHql(TABLE_KYLIN_FACT, tableFileDir));
+        hiveClient.executeHQL(generateLoadDataHql(TABLE_SELLER_TYPE_DIM, tableFileDir));
+        hiveClient.executeHQL(generateLoadDataHql(TABLE_SITES, tableFileDir));
+    }
+
+    private static String generateLoadDataHql(String tableName, String tableFileDir) {
+        return "LOAD DATA LOCAL INPATH '" + tableFileDir + "/" + tableName.toUpperCase() + ".csv' OVERWRITE INTO TABLE " + tableName.toUpperCase();
+    }
+
+    private static String[] generateCreateTableHql(TableDesc tableDesc) {
+
+        String dropsql = "DROP TABLE IF EXISTS " + tableDesc.getIdentity();
+        StringBuilder ddl = new StringBuilder();
+
+        ddl.append("CREATE TABLE " + tableDesc.getIdentity() + "\n");
+        ddl.append("(" + "\n");
+
+        for (int i = 0; i < tableDesc.getColumns().length; i++) {
+            ColumnDesc col = tableDesc.getColumns()[i];
+            if (i > 0) {
+                ddl.append(",");
+            }
+            ddl.append(col.getName() + " " + getHiveDataType((col.getDatatype())) + "\n");
+        }
+
+        ddl.append(")" + "\n");
+        ddl.append("ROW FORMAT DELIMITED FIELDS TERMINATED BY ','" + "\n");
+        ddl.append("STORED AS TEXTFILE");
+
+        return new String[] { dropsql, ddl.toString() };
+    }
+
+    private static String getHiveDataType(String javaDataType) {
+        String hiveDataType = javaDataType.toLowerCase().startsWith("varchar") ? "string" : javaDataType;
+        hiveDataType = javaDataType.toLowerCase().startsWith("integer") ? "int" : hiveDataType;
+
+        return hiveDataType.toLowerCase();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/src/test/java/org/apache/kylin/job/ExportHBaseData.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/ExportHBaseData.java b/assembly/src/test/java/org/apache/kylin/job/ExportHBaseData.java
new file mode 100644
index 0000000..5eb7485
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/job/ExportHBaseData.java
@@ -0,0 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.AbstractKylinTestCase;
+import org.apache.kylin.common.util.CliCommandExecutor;
+import org.apache.kylin.common.util.SSHClient;
+import org.apache.kylin.engine.mr.common.BatchConstants;
+import org.apache.kylin.storage.hbase.HBaseConnection;
+import org.apache.kylin.storage.hbase.steps.HBaseMiniclusterHelper;
+
+public class ExportHBaseData {
+
+    KylinConfig kylinConfig;
+    HTableDescriptor[] allTables;
+    Configuration config;
+    HBaseAdmin hbase;
+    CliCommandExecutor cli;
+    String exportHdfsFolder;
+    String exportLocalFolderParent;
+    String exportLocalFolder;
+    String backupArchive;
+    String tableNameBase;
+    long currentTIME;
+
+    public ExportHBaseData() {
+        try {
+            setup();
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    private void setup() throws IOException {
+
+        KylinConfig.destoryInstance();
+        System.setProperty(KylinConfig.KYLIN_CONF, AbstractKylinTestCase.SANDBOX_TEST_DATA);
+
+        kylinConfig = KylinConfig.getInstanceFromEnv();
+        cli = kylinConfig.getCliCommandExecutor();
+
+        currentTIME = System.currentTimeMillis();
+        exportHdfsFolder = kylinConfig.getHdfsWorkingDirectory() + "hbase-export/" + currentTIME + "/";
+        exportLocalFolderParent = BatchConstants.CFG_KYLIN_LOCAL_TEMP_DIR + "hbase-export/";
+        exportLocalFolder = exportLocalFolderParent + currentTIME + "/";
+        backupArchive = exportLocalFolderParent + "hbase-export-at-" + currentTIME + ".tar.gz";
+
+        String metadataUrl = kylinConfig.getMetadataUrl();
+        // split TABLE@HBASE_URL
+        int cut = metadataUrl.indexOf('@');
+        tableNameBase = metadataUrl.substring(0, cut);
+        String hbaseUrl = cut < 0 ? metadataUrl : metadataUrl.substring(cut + 1);
+
+        HConnection conn = HBaseConnection.get(hbaseUrl);
+        try {
+            hbase = new HBaseAdmin(conn);
+            config = hbase.getConfiguration();
+            allTables = hbase.listTables();
+        } catch (IOException e) {
+            e.printStackTrace();
+            throw e;
+        }
+    }
+
+    public void tearDown() {
+
+        // cleanup hdfs
+        try {
+            if (cli != null && exportHdfsFolder != null) {
+                cli.execute("hadoop fs -rm -r " + exportHdfsFolder);
+            }
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+        // cleanup sandbox disk
+        try {
+            if (cli != null && exportLocalFolder != null) {
+                cli.execute("rm -r " + exportLocalFolder);
+            }
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+
+        // delete archive file on sandbox
+        try {
+            if (cli != null && backupArchive != null) {
+                cli.execute("rm " + backupArchive);
+            }
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+    }
+
+    public void exportTables() throws IOException {
+        cli.execute("mkdir -p " + exportLocalFolderParent);
+
+        for (HTableDescriptor table : allTables) {
+            String tName = table.getNameAsString();
+            if (!tName.equals(tableNameBase) && !tName.startsWith(HBaseMiniclusterHelper.SHARED_STORAGE_PREFIX))
+                continue;
+
+            cli.execute("hbase org.apache.hadoop.hbase.mapreduce.Export " + tName + " " + exportHdfsFolder + tName);
+        }
+
+        cli.execute("hadoop fs -copyToLocal " + exportHdfsFolder + " " + exportLocalFolderParent);
+        cli.execute("tar -zcvf " + backupArchive + " --directory=" + exportLocalFolderParent + " " + currentTIME);
+        downloadToLocal();
+    }
+
+    public void downloadToLocal() throws IOException {
+        String localArchive = "../examples/test_case_data/minicluster/hbase-export.tar.gz";
+
+        if (kylinConfig.getRunAsRemoteCommand()) {
+            SSHClient ssh = new SSHClient(kylinConfig.getRemoteHadoopCliHostname(), kylinConfig.getRemoteHadoopCliPort(), kylinConfig.getRemoteHadoopCliUsername(), kylinConfig.getRemoteHadoopCliPassword());
+            try {
+                ssh.scpFileToLocal(backupArchive, localArchive);
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        } else {
+            FileUtils.copyFile(new File(backupArchive), new File(localArchive));
+        }
+    }
+
+    public static void main(String[] args) {
+        ExportHBaseData export = new ExportHBaseData();
+        try {
+            export.exportTables();
+        } catch (IOException e) {
+            e.printStackTrace();
+        } finally {
+            export.tearDown();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/assembly/src/test/java/org/apache/kylin/job/ITKafkaBasedIIStreamBuilderTest.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/ITKafkaBasedIIStreamBuilderTest.java b/assembly/src/test/java/org/apache/kylin/job/ITKafkaBasedIIStreamBuilderTest.java
new file mode 100644
index 0000000..6a615cb
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/job/ITKafkaBasedIIStreamBuilderTest.java
@@ -0,0 +1,85 @@
+/*
+ *
+ *
+ *  Licensed to the Apache Software Foundation (ASF) under one or more
+ *
+ *  contributor license agreements. See the NOTICE file distributed with
+ *
+ *  this work for additional information regarding copyright ownership.
+ *
+ *  The ASF licenses this file to You under the Apache License, Version 2.0
+ *
+ *  (the "License"); you may not use this file except in compliance with
+ *
+ *  the License. You may obtain a copy of the License at
+ *
+ *
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *
+ *  See the License for the specific language governing permissions and
+ *
+ *  limitations under the License.
+ *
+ * /
+ */
+
+package org.apache.kylin.job;
+
+import java.io.File;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.AbstractKylinTestCase;
+import org.apache.kylin.common.util.ClassUtil;
+import org.apache.kylin.job.streaming.StreamingBootstrap;
+import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ */
+@Ignore("this test case will break existing metadata store")
+public class ITKafkaBasedIIStreamBuilderTest {
+
+    private static final Logger logger = LoggerFactory.getLogger(ITKafkaBasedIIStreamBuilderTest.class);
+
+    private KylinConfig kylinConfig;
+
+    @BeforeClass
+    public static void beforeClass() throws Exception {
+        ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
+        System.setProperty("hdp.version", "2.2.4.2-2"); // mapred-site.xml ref this
+    }
+
+    @Before
+    public void before() throws Exception {
+        HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA);
+
+        kylinConfig = KylinConfig.getInstanceFromEnv();
+        DeployUtil.initCliWorkDir();
+        DeployUtil.deployMetadata();
+        DeployUtil.overrideJobJarLocations();
+    }
+
+    @Test
+    public void test() throws Exception {
+        final StreamingBootstrap bootstrap = StreamingBootstrap.getInstance(kylinConfig);
+        bootstrap.start("eagle", 0);
+        Thread.sleep(30 * 60 * 1000);
+        logger.info("time is up, stop streaming");
+        bootstrap.stop();
+        Thread.sleep(5 * 1000);
+    }
+}



[2/9] incubator-kylin git commit: KYLIN-1010 Decompose project job

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/resources/data/test_cal_dt/part-r-00000
----------------------------------------------------------------------
diff --git a/job/src/test/resources/data/test_cal_dt/part-r-00000 b/job/src/test/resources/data/test_cal_dt/part-r-00000
deleted file mode 100644
index 206c349..0000000
--- a/job/src/test/resources/data/test_cal_dt/part-r-00000
+++ /dev/null
@@ -1,366 +0,0 @@
-2013-08-16,2013-01-01,2013-07-01,2013-08-01,2013-08-11,0,-1,-3,-15,-103,0,-1,-4,-15,-15,41501,228,47,16,6,33,5928,2,8,1364,3,455,114,2013-12-31,2013-09-30,2013-08-31,2013-08-17,16-Aug-2013,Aug 16th 2013,Fri 08-16-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-07-28,2013-08-11,2013-08-12,2013-08-16,Fri       ,2013M08,Aug-2013,N,2013M08   ,N,Year 2013 - Quarter 03,2013Q03   ,N,33,2013,2013-08-11,2013-08-17,N,Wk.33 - 13,2013-08-11 00:00:00,2013-08-17 00:00:00,2013W33   ,2013W33   ,08/11/13 - 08/17/13,08/11 - 08/17,2013,N,2012-08-16,2011-08-16,2013-05-16,2013-02-16,2013-07-16,2013-06-16,2013-08-09,2013-08-02,0,0,0,0,0,0,0,0,8,3,33,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-01-03,2013-01-01,2013-01-01,2013-01-01,2013-01-01,0,-3,-10,-47,-328,0,-3,-11,-47,-47,41276,3,3,3,5,1,5896,1,1,1357,1,453,114,2013-12-31,2013-03-31,2013-01-31,2013-01-05,03-Jan-2013,Jan 3rd 2013,Thu 01-03-13,1,0,0,0,2012-12-21,365,90,31,5,2012-12-30,2012-12-30,2012-12-30,2012-12-30,2012-12-31,2013-01-03,Thu       ,2013M01,Jan-2013,N,2013M01   ,N,Year 2013 - Quarter 01,2013Q01   ,N,1,2013,2012-12-30,2013-01-05,N,Wk.01 - 13,2013-01-01 00:00:00,2013-01-05 00:00:00,2013W01   ,2013W01   ,01/01/13 - 01/05/13,01/01 - 01/05,2013,N,2012-01-03,2011-01-03,2012-10-03,2012-07-03,2012-12-03,2012-11-03,2012-12-27,2012-12-20,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-04-10,2013-01-01,2013-04-01,2013-04-01,2013-04-07,0,-2,-7,-33,-231,0,-2,-8,-33,-33,41373,100,10,10,4,15,5910,1,4,1360,2,454,114,2013-12-31,2013-06-30,2013-04-30,2013-04-13,10-Apr-2013,Apr 10th 2013,Wed 04-10-13,1,0,0,0,2013-03-21,365,91,30,7,2012-12-30,2013-03-31,2013-03-31,2013-04-07,2013-04-08,2013-04-10,Wed       ,2013M04,Apr-2013,N,2013M04   ,N,Year 2013 - Quarter 02,2013Q02   ,N,15,2013,2013-04-07,2013-04-13,N,Wk.15 - 13,2013-04-07 00:00:00,2013-04-13 00:00:00,2013W15   ,2013W15   ,04/07/13 - 04/13/13,04/07 - 04/13,2013,N,2012-04-10,2011-04-10,2013-01-10,2012-10-10,2013-03-10,2013-02-10,2013-04-03,2013-03-27,0,0,0,0,0,0,0,0,4,2,15,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-06-12,2013-01-01,2013-04-01,2013-06-01,2013-06-09,0,-2,-5,-24,-168,0,-2,-6,-24,-24,41436,163,73,12,4,24,5919,3,6,1362,2,454,114,2013-12-31,2013-06-30,2013-06-30,2013-06-15,12-Jun-2013,Jun 12th 2013,Wed 06-12-13,1,0,0,0,2013-03-21,365,91,30,7,2012-12-30,2013-03-31,2013-05-26,2013-06-09,2013-06-10,2013-06-12,Wed       ,2013M06,Jun-2013,N,2013M06   ,N,Year 2013 - Quarter 02,2013Q02   ,N,24,2013,2013-06-09,2013-06-15,N,Wk.24 - 13,2013-06-09 00:00:00,2013-06-15 00:00:00,2013W24   ,2013W24   ,06/09/13 - 06/15/13,06/09 - 06/15,2013,N,2012-06-12,2011-06-12,2013-03-12,2012-12-12,2013-05-12,2013-04-12,2013-06-05,2013-05-29,0,0,0,0,0,0,0,0,6,2,24,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-04-27,2013-01-01,2013-04-01,2013-04-01,2013-04-21,0,-2,-7,-31,-214,0,-2,-8,-31,-31,41390,117,27,27,7,17,5912,1,4,1360,2,454,114,2013-12-31,2013-06-30,2013-04-30,2013-04-27,27-Apr-2013,Apr 27th 2013,Sat 04-27-13,1,0,0,0,2013-03-21,365,91,30,7,2012-12-30,2013-03-31,2013-03-31,2013-04-21,2013-04-22,2013-04-27,Sat       ,2013M04,Apr-2013,N,2013M04   ,N,Year 2013 - Quarter 02,2013Q02   ,N,17,2013,2013-04-21,2013-04-27,N,Wk.17 - 13,2013-04-21 00:00:00,2013-04-27 00:00:00,2013W17   ,2013W17   ,04/21/13 - 04/27/13,04/21 - 04/27,2013,N,2012-04-27,2011-04-27,2013-01-27,2012-10-27,2013-03-27,2013-02-27,2013-04-20,2013-04-13,0,0,0,0,0,0,0,0,4,2,17,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-09-19,2013-01-01,2013-07-01,2013-09-01,2013-09-15,0,-1,-2,-10,-69,0,-1,-3,-10,-10,41535,262,81,19,5,38,5933,3,9,1365,3,455,114,2013-12-31,2013-09-30,2013-09-30,2013-09-21,19-Sep-2013,Sep 19th 2013,Thu 09-19-13,1,0,0,0,2013-06-21,365,92,30,7,2012-12-30,2013-06-30,2013-08-25,2013-09-15,2013-09-16,2013-09-19,Thu       ,2013M09,Sep-2013,N,2013M09   ,N,Year 2013 - Quarter 03,2013Q03   ,N,38,2013,2013-09-15,2013-09-21,N,Wk.38 - 13,2013-09-15 00:00:00,2013-09-21 00:00:00,2013W38   ,2013W38   ,09/15/13 - 09/21/13,09/15 - 09/21,2013,N,2012-09-19,2011-09-19,2013-06-19,2013-03-19,2013-08-19,2013-07-19,2013-09-12,2013-09-05,0,0,0,0,0,0,0,0,9,3,38,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-03-07,2013-01-01,2013-01-01,2013-03-01,2013-03-03,0,-3,-8,-38,-265,0,-3,-9,-38,-38,41339,66,66,7,5,10,5905,3,3,1359,1,453,114,2013-12-31,2013-03-31,2013-03-31,2013-03-09,07-Mar-2013,Mar 7th 2013,Thu 03-07-13,1,0,0,0,2012-12-21,365,90,31,7,2012-12-30,2012-12-30,2013-02-24,2013-03-03,2013-03-04,2013-03-07,Thu       ,2013M03,Mar-2013,N,2013M03   ,N,Year 2013 - Quarter 01,2013Q01   ,N,10,2013,2013-03-03,2013-03-09,N,Wk.10 - 13,2013-03-03 00:00:00,2013-03-09 00:00:00,2013W10   ,2013W10   ,03/03/13 - 03/09/13,03/03 - 03/09,2013,N,2012-03-07,2011-03-07,2012-12-07,2012-09-07,2013-02-07,2013-01-07,2013-02-28,2013-02-21,0,0,0,0,0,0,0,0,3,1,10,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-03-28,2013-01-01,2013-01-01,2013-03-01,2013-03-24,0,-3,-8,-35,-244,0,-3,-9,-35,-35,41360,87,87,28,5,13,5908,3,3,1359,1,453,114,2013-12-31,2013-03-31,2013-03-31,2013-03-30,28-Mar-2013,Mar 28th 2013,Thu 03-28-13,1,0,0,0,2013-03-21,365,90,31,7,2012-12-30,2012-12-30,2013-02-24,2013-03-24,2013-03-25,2013-03-28,Thu       ,2013M03,Mar-2013,N,2013M03   ,N,Year 2013 - Quarter 01,2013Q01   ,N,13,2013,2013-03-24,2013-03-30,N,Wk.13 - 13,2013-03-24 00:00:00,2013-03-30 00:00:00,2013W13   ,2013W13   ,03/24/13 - 03/30/13,03/24 - 03/30,2013,N,2012-03-28,2011-03-28,2012-12-28,2012-09-28,2013-02-28,2013-01-28,2013-03-21,2013-03-14,0,0,0,0,0,0,0,0,3,1,13,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-11-23,2013-01-01,2013-10-01,2013-11-01,2013-11-17,0,0,0,-1,-4,0,0,-1,-1,-1,41600,327,54,23,7,47,5942,2,11,1367,4,456,114,2013-12-31,2013-12-31,2013-11-30,2013-11-23,23-Nov-2013,Nov 23rd 2013,Sat 11-23-13,1,1,1,0,2013-09-21,365,92,30,7,2012-12-30,2013-09-29,2013-10-27,2013-11-17,2013-11-18,2013-11-23,Sat       ,2013M11,Nov-2013,N,2013M11   ,N,Year 2013 - Quarter 04,2013Q04   ,N,47,2013,2013-11-17,2013-11-23,N,Wk.47 - 13,2013-11-17 00:00:00,2013-11-23 00:00:00,2013W47   ,2013W47   ,11/17/13 - 11/23/13,11/17 - 11/23,2013,N,2012-11-23,2011-11-23,2013-08-23,2013-05-23,2013-10-23,2013-09-23,2013-11-16,2013-11-09,0,0,0,0,0,0,0,0,11,4,47,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-03-09,2013-01-01,2013-01-01,2013-03-01,2013-03-03,0,-3,-8,-38,-263,0,-3,-9,-38,-38,41341,68,68,9,7,10,5905,3,3,1359,1,453,114,2013-12-31,2013-03-31,2013-03-31,2013-03-09,09-Mar-2013,Mar 9th 2013,Sat 03-09-13,1,0,0,0,2012-12-21,365,90,31,7,2012-12-30,2012-12-30,2013-02-24,2013-03-03,2013-03-04,2013-03-09,Sat       ,2013M03,Mar-2013,N,2013M03   ,N,Year 2013 - Quarter 01,2013Q01   ,N,10,2013,2013-03-03,2013-03-09,N,Wk.10 - 13,2013-03-03 00:00:00,2013-03-09 00:00:00,2013W10   ,2013W10   ,03/03/13 - 03/09/13,03/03 - 03/09,2013,N,2012-03-09,2011-03-09,2012-12-09,2012-09-09,2013-02-09,2013-01-09,2013-03-02,2013-02-23,0,0,0,0,0,0,0,0,3,1,10,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-11-21,2013-01-01,2013-10-01,2013-11-01,2013-11-17,0,0,0,-1,-6,0,0,-1,-1,-1,41598,325,52,21,5,47,5942,2,11,1367,4,456,114,2013-12-31,2013-12-31,2013-11-30,2013-11-23,21-Nov-2013,Nov 21st 2013,Thu 11-21-13,1,1,1,0,2013-09-21,365,92,30,7,2012-12-30,2013-09-29,2013-10-27,2013-11-17,2013-11-18,2013-11-21,Thu       ,2013M11,Nov-2013,N,2013M11   ,N,Year 2013 - Quarter 04,2013Q04   ,N,47,2013,2013-11-17,2013-11-23,N,Wk.47 - 13,2013-11-17 00:00:00,2013-11-23 00:00:00,2013W47   ,2013W47   ,11/17/13 - 11/23/13,11/17 - 11/23,2013,N,2012-11-21,2011-11-21,2013-08-21,2013-05-21,2013-10-21,2013-09-21,2013-11-14,2013-11-07,0,0,0,0,0,0,0,0,11,4,47,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-03-26,2013-01-01,2013-01-01,2013-03-01,2013-03-24,0,-3,-8,-35,-246,0,-3,-9,-35,-35,41358,85,85,26,3,13,5908,3,3,1359,1,453,114,2013-12-31,2013-03-31,2013-03-31,2013-03-30,26-Mar-2013,Mar 26th 2013,Tue 03-26-13,1,0,0,0,2013-03-21,365,90,31,7,2012-12-30,2012-12-30,2013-02-24,2013-03-24,2013-03-25,2013-03-26,Tue       ,2013M03,Mar-2013,N,2013M03   ,N,Year 2013 - Quarter 01,2013Q01   ,N,13,2013,2013-03-24,2013-03-30,N,Wk.13 - 13,2013-03-24 00:00:00,2013-03-30 00:00:00,2013W13   ,2013W13   ,03/24/13 - 03/30/13,03/24 - 03/30,2013,N,2012-03-26,2011-03-26,2012-12-26,2012-09-26,2013-02-26,2013-01-26,2013-03-19,2013-03-12,0,0,0,0,0,0,0,0,3,1,13,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-01-24,2013-01-01,2013-01-01,2013-01-01,2013-01-20,0,-3,-10,-44,-307,0,-3,-11,-44,-44,41297,24,24,24,5,4,5899,1,1,1357,1,453,114,2013-12-31,2013-03-31,2013-01-31,2013-01-26,24-Jan-2013,Jan 24th 2013,Thu 01-24-13,1,0,0,0,2012-12-21,365,90,31,7,2012-12-30,2012-12-30,2012-12-30,2013-01-20,2013-01-21,2013-01-24,Thu       ,2013M01,Jan-2013,N,2013M01   ,N,Year 2013 - Quarter 01,2013Q01   ,N,4,2013,2013-01-20,2013-01-26,N,Wk.04 - 13,2013-01-20 00:00:00,2013-01-26 00:00:00,2013W04   ,2013W04   ,01/20/13 - 01/26/13,01/20 - 01/26,2013,N,2012-01-24,2011-01-24,2012-10-24,2012-07-24,2012-12-24,2012-11-24,2013-01-17,2013-01-10,0,0,0,0,0,0,0,0,1,1,4,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-01-01,2013-01-01,2013-01-01,2013-01-01,2013-01-01,0,-3,-10,-47,-330,0,-3,-11,-47,-47,41274,1,1,1,3,1,5896,1,1,1357,1,453,114,2013-12-31,2013-03-31,2013-01-31,2013-01-05,01-Jan-2013,Jan 1st 2013,Tue 01-01-13,1,0,0,0,2012-12-21,365,90,31,5,2012-12-30,2012-12-30,2012-12-30,2012-12-30,2012-12-31,2013-01-01,Tue       ,2013M01,Jan-2013,Y,2013M01   ,Y,Year 2013 - Quarter 01,2013Q01   ,Y,1,2013,2012-12-30,2013-01-05,Y,Wk.01 - 13,2013-01-01 00:00:00,2013-01-05 00:00:00,2013W01   ,2013W01   ,01/01/13 - 01/05/13,01/01 - 01/05,2013,Y,2012-01-01,2011-01-01,2012-10-01,2012-07-01,2012-12-01,2012-11-01,2012-12-25,2012-12-18,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-04-29,2013-01-01,2013-04-01,2013-04-01,2013-04-28,0,-2,-7,-30,-212,0,-2,-7,-30,-30,41392,119,29,29,2,18,5913,1,4,1360,2,454,114,2013-12-31,2013-06-30,2013-04-30,2013-05-04,29-Apr-2013,Apr 29th 2013,Mon 04-29-13,1,0,0,0,2013-03-21,365,91,30,7,2012-12-30,2013-03-31,2013-04-28,2013-04-28,2013-04-29,2013-04-29,Mon       ,2013M04,Apr-2013,N,2013M04   ,N,Year 2013 - Quarter 02,2013Q02   ,N,18,2013,2013-04-28,2013-05-04,N,Wk.18 - 13,2013-04-28 00:00:00,2013-05-04 00:00:00,2013W18   ,2013W18   ,04/28/13 - 05/04/13,04/28 - 05/04,2013,N,2012-04-29,2011-04-29,2013-01-29,2012-10-29,2013-03-29,2013-02-28,2013-04-22,2013-04-15,0,0,0,0,0,0,0,0,5,2,18,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-05-11,2013-01-01,2013-04-01,2013-05-01,2013-05-05,0,-2,-6,-29,-200,0,-2,-7,-29,-29,41404,131,41,11,7,19,5914,2,5,1361,2,454,114,2013-12-31,2013-06-30,2013-05-31,2013-05-11,11-May-2013,May 11th 2013,Sat 05-11-13,1,0,0,0,2013-03-21,365,91,31,7,2012-12-30,2013-03-31,2013-04-28,2013-05-05,2013-05-06,2013-05-11,Sat       ,2013M05,May-2013,N,2013M05   ,N,Year 2013 - Quarter 02,2013Q02   ,N,19,2013,2013-05-05,2013-05-11,N,Wk.19 - 13,2013-05-05 00:00:00,2013-05-11 00:00:00,2013W19   ,2013W19   ,05/05/13 - 05/11/13,05/05 - 05/11,2013,N,2012-05-11,2011-05-11,2013-02-11,2012-11-11,2013-04-11,2013-03-11,2013-05-04,2013-04-27,0,0,0,0,0,0,0,0,5,2,19,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-10-01,2013-01-01,2013-10-01,2013-10-01,2013-09-29,0,0,-1,-8,-57,0,0,-2,-8,-8,41547,274,1,1,3,40,5935,1,10,1366,4,456,114,2013-12-31,2013-12-31,2013-10-31,2013-10-05,01-Oct-2013,Oct 1st 2013,Tue 10-01-13,1,1,0,0,2013-09-21,365,92,31,7,2012-12-30,2013-09-29,2013-09-29,2013-09-29,2013-09-30,2013-10-01,Tue       ,2013M10,Oct-2013,Y,2013M10   ,Y,Year 2013 - Quarter 04,2013Q04   ,Y,40,2013,2013-09-29,2013-10-05,N,Wk.40 - 13,2013-09-29 00:00:00,2013-10-05 00:00:00,2013W40   ,2013W40   ,09/29/13 - 10/05/13,09/29 - 10/05,2013,N,2012-10-01,2011-10-01,2013-07-01,2013-04-01,2013-09-01,2013-08-01,2013-09-24,2013-09-17,0,0,0,0,0,0,0,0,10,4,40,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-12-05,2013-01-01,2013-10-01,2013-12-01,2013-12-01,0,0,1,1,8,0,0,0,1,1,41612,339,66,5,5,49,5944,3,12,1368,4,456,114,2013-12-31,2013-12-31,2013-12-31,2013-12-07,05-Dec-2013,Dec 5th 2013,Thu 12-05-13,0,0,0,0,2013-09-21,365,92,31,7,2012-12-30,2013-09-29,2013-11-24,2013-12-01,2013-12-02,2013-12-05,Thu       ,2013M12,Dec-2013,N,2013M12   ,N,Year 2013 - Quarter 04,2013Q04   ,N,49,2013,2013-12-01,2013-12-07,N,Wk.49 - 13,2013-12-01 00:00:00,2013-12-07 00:00:00,2013W49   ,2013W49   ,12/01/13 - 12/07/13,12/01 - 12/07,2013,N,2012-12-05,2011-12-05,2013-09-05,2013-06-05,2013-11-05,2013-10-05,2013-11-28,2013-11-21,0,0,0,0,0,0,0,0,12,4,49,4,0,1,0,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-03-05,2013-01-01,2013-01-01,2013-03-01,2013-03-03,0,-3,-8,-38,-267,0,-3,-9,-38,-38,41337,64,64,5,3,10,5905,3,3,1359,1,453,114,2013-12-31,2013-03-31,2013-03-31,2013-03-09,05-Mar-2013,Mar 5th 2013,Tue 03-05-13,1,0,0,0,2012-12-21,365,90,31,7,2012-12-30,2012-12-30,2013-02-24,2013-03-03,2013-03-04,2013-03-05,Tue       ,2013M03,Mar-2013,N,2013M03   ,N,Year 2013 - Quarter 01,2013Q01   ,N,10,2013,2013-03-03,2013-03-09,N,Wk.10 - 13,2013-03-03 00:00:00,2013-03-09 00:00:00,2013W10   ,2013W10   ,03/03/13 - 03/09/13,03/03 - 03/09,2013,N,2012-03-05,2011-03-05,2012-12-05,2012-09-05,2013-02-05,2013-01-05,2013-02-26,2013-02-19,0,0,0,0,0,0,0,0,3,1,10,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-10-03,2013-01-01,2013-10-01,2013-10-01,2013-09-29,0,0,-1,-8,-55,0,0,-2,-8,-8,41549,276,3,3,5,40,5935,1,10,1366,4,456,114,2013-12-31,2013-12-31,2013-10-31,2013-10-05,03-Oct-2013,Oct 3rd 2013,Thu 10-03-13,1,1,0,0,2013-09-21,365,92,31,7,2012-12-30,2013-09-29,2013-09-29,2013-09-29,2013-09-30,2013-10-03,Thu       ,2013M10,Oct-2013,N,2013M10   ,N,Year 2013 - Quarter 04,2013Q04   ,N,40,2013,2013-09-29,2013-10-05,N,Wk.40 - 13,2013-09-29 00:00:00,2013-10-05 00:00:00,2013W40   ,2013W40   ,09/29/13 - 10/05/13,09/29 - 10/05,2013,N,2012-10-03,2011-10-03,2013-07-03,2013-04-03,2013-09-03,2013-08-03,2013-09-26,2013-09-19,0,0,0,0,0,0,0,0,10,4,40,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-01-22,2013-01-01,2013-01-01,2013-01-01,2013-01-20,0,-3,-10,-44,-309,0,-3,-11,-44,-44,41295,22,22,22,3,4,5899,1,1,1357,1,453,114,2013-12-31,2013-03-31,2013-01-31,2013-01-26,22-Jan-2013,Jan 22nd 2013,Tue 01-22-13,1,0,0,0,2012-12-21,365,90,31,7,2012-12-30,2012-12-30,2012-12-30,2013-01-20,2013-01-21,2013-01-22,Tue       ,2013M01,Jan-2013,N,2013M01   ,N,Year 2013 - Quarter 01,2013Q01   ,N,4,2013,2013-01-20,2013-01-26,N,Wk.04 - 13,2013-01-20 00:00:00,2013-01-26 00:00:00,2013W04   ,2013W04   ,01/20/13 - 01/26/13,01/20 - 01/26,2013,N,2012-01-22,2011-01-22,2012-10-22,2012-07-22,2012-12-22,2012-11-22,2013-01-15,2013-01-08,0,0,0,0,0,0,0,0,1,1,4,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-12-26,2013-01-01,2013-10-01,2013-12-01,2013-12-22,0,0,1,4,29,0,0,0,4,4,41633,360,87,26,5,52,5947,3,12,1368,4,456,114,2013-12-31,2013-12-31,2013-12-31,2013-12-28,26-Dec-2013,Dec 26th 2013,Thu 12-26-13,0,0,0,0,2013-12-21,365,92,31,7,2012-12-30,2013-09-29,2013-11-24,2013-12-22,2013-12-23,2013-12-26,Thu       ,2013M12,Dec-2013,N,2013M12   ,N,Year 2013 - Quarter 04,2013Q04   ,N,52,2013,2013-12-22,2013-12-28,N,Wk.52 - 13,2013-12-22 00:00:00,2013-12-28 00:00:00,2013W52   ,2013W52   ,12/22/13 - 12/28/13,12/22 - 12/28,2013,N,2012-12-26,2011-12-26,2013-09-26,2013-06-26,2013-11-26,2013-10-26,2013-12-19,2013-12-12,0,0,0,0,0,0,0,0,12,4,52,1,0,1,0,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-09-17,2013-01-01,2013-07-01,2013-09-01,2013-09-15,0,-1,-2,-10,-71,0,-1,-3,-10,-10,41533,260,79,17,3,38,5933,3,9,1365,3,455,114,2013-12-31,2013-09-30,2013-09-30,2013-09-21,17-Sep-2013,Sep 17th 2013,Tue 09-17-13,1,0,0,0,2013-06-21,365,92,30,7,2012-12-30,2013-06-30,2013-08-25,2013-09-15,2013-09-16,2013-09-17,Tue       ,2013M09,Sep-2013,N,2013M09   ,N,Year 2013 - Quarter 03,2013Q03   ,N,38,2013,2013-09-15,2013-09-21,N,Wk.38 - 13,2013-09-15 00:00:00,2013-09-21 00:00:00,2013W38   ,2013W38   ,09/15/13 - 09/21/13,09/15 - 09/21,2013,N,2012-09-17,2011-09-17,2013-06-17,2013-03-17,2013-08-17,2013-07-17,2013-09-10,2013-09-03,0,0,0,0,0,0,0,0,9,3,38,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-10-20,2013-01-01,2013-10-01,2013-10-01,2013-10-20,0,0,-1,-5,-38,0,0,-2,-5,-6,41566,293,20,20,1,43,5938,1,10,1366,4,456,114,2013-12-31,2013-12-31,2013-10-31,2013-10-26,20-Oct-2013,Oct 20th 2013,Sun 10-20-13,1,1,0,0,2013-09-21,365,92,31,7,2012-12-30,2013-09-29,2013-09-29,2013-10-20,2013-10-14,2013-10-20,Sun       ,2013M10,Oct-2013,N,2013M10   ,N,Year 2013 - Quarter 04,2013Q04   ,N,43,2013,2013-10-20,2013-10-26,Y,Wk.43 - 13,2013-10-20 00:00:00,2013-10-26 00:00:00,2013W43   ,2013W43   ,10/20/13 - 10/26/13,10/20 - 10/26,2013,N,2012-10-20,2011-10-20,2013-07-20,2013-04-20,2013-09-20,2013-08-20,2013-10-13,2013-10-06,0,0,0,0,0,0,0,0,10,4,43,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-11-25,2013-01-01,2013-10-01,2013-11-01,2013-11-24,0,0,0,0,-2,0,0,0,0,0,41602,329,56,25,2,48,5943,2,11,1367,4,456,114,2013-12-31,2013-12-31,2013-11-30,2013-11-30,25-Nov-2013,Nov 25th 2013,Mon 11-25-13,1,1,1,1,2013-09-21,365,92,30,7,2012-12-30,2013-09-29,2013-11-24,2013-11-24,2013-11-25,2013-11-25,Mon       ,2013M11,Nov-2013,N,2013M11   ,N,Year 2013 - Quarter 04,2013Q04   ,N,48,2013,2013-11-24,2013-11-30,N,Wk.48 - 13,2013-11-24 00:00:00,2013-11-30 00:00:00,2013W48   ,2013W48   ,11/24/13 - 11/30/13,11/24 - 11/30,2013,N,2012-11-25,2011-11-25,2013-08-25,2013-05-25,2013-10-25,2013-09-25,2013-11-18,2013-11-11,0,0,0,0,0,0,0,0,12,4,48,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-01-26,2013-01-01,2013-01-01,2013-01-01,2013-01-20,0,-3,-10,-44,-305,0,-3,-11,-44,-44,41299,26,26,26,7,4,5899,1,1,1357,1,453,114,2013-12-31,2013-03-31,2013-01-31,2013-01-26,26-Jan-2013,Jan 26th 2013,Sat 01-26-13,1,0,0,0,2012-12-21,365,90,31,7,2012-12-30,2012-12-30,2012-12-30,2013-01-20,2013-01-21,2013-01-26,Sat       ,2013M01,Jan-2013,N,2013M01   ,N,Year 2013 - Quarter 01,2013Q01   ,N,4,2013,2013-01-20,2013-01-26,N,Wk.04 - 13,2013-01-20 00:00:00,2013-01-26 00:00:00,2013W04   ,2013W04   ,01/20/13 - 01/26/13,01/20 - 01/26,2013,N,2012-01-26,2011-01-26,2012-10-26,2012-07-26,2012-12-26,2012-11-26,2013-01-19,2013-01-12,0,0,0,0,0,0,0,0,1,1,4,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-12-24,2013-01-01,2013-10-01,2013-12-01,2013-12-22,0,0,1,4,27,0,0,0,4,4,41631,358,85,24,3,52,5947,3,12,1368,4,456,114,2013-12-31,2013-12-31,2013-12-31,2013-12-28,24-Dec-2013,Dec 24th 2013,Tue 12-24-13,0,0,0,0,2013-12-21,365,92,31,7,2012-12-30,2013-09-29,2013-11-24,2013-12-22,2013-12-23,2013-12-24,Tue       ,2013M12,Dec-2013,N,2013M12   ,N,Year 2013 - Quarter 04,2013Q04   ,N,52,2013,2013-12-22,2013-12-28,N,Wk.52 - 13,2013-12-22 00:00:00,2013-12-28 00:00:00,2013W52   ,2013W52   ,12/22/13 - 12/28/13,12/22 - 12/28,2013,N,2012-12-24,2011-12-24,2013-09-24,2013-06-24,2013-11-24,2013-10-24,2013-12-17,2013-12-10,0,0,0,0,0,0,0,0,12,4,52,1,0,1,0,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-02-04,2013-01-01,2013-01-01,2013-02-01,2013-02-03,0,-3,-9,-42,-296,0,-3,-10,-42,-42,41308,35,35,4,2,6,5901,2,2,1358,1,453,114,2013-12-31,2013-03-31,2013-02-28,2013-02-09,04-Feb-2013,Feb 4th 2013,Mon 02-04-13,1,0,0,0,2012-12-21,365,90,28,7,2012-12-30,2012-12-30,2013-01-27,2013-02-03,2013-02-04,2013-02-04,Mon       ,2013M02,Feb-2013,N,2013M02   ,N,Year 2013 - Quarter 01,2013Q01   ,N,6,2013,2013-02-03,2013-02-09,N,Wk.06 - 13,2013-02-03 00:00:00,2013-02-09 00:00:00,2013W06   ,2013W06   ,02/03/13 - 02/09/13,02/03 - 02/09,2013,N,2012-02-04,2011-02-04,2012-11-04,2012-08-04,2013-01-04,2012-12-04,2013-01-28,2013-01-21,0,0,0,0,0,0,0,0,2,1,6,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-05-30,2013-01-01,2013-04-01,2013-05-01,2013-05-26,0,-2,-6,-26,-181,0,-2,-6,-26,-26,41423,150,60,30,5,22,5917,2,5,1361,2,454,114,2013-12-31,2013-06-30,2013-05-31,2013-06-01,30-May-2013,May 30th 2013,Thu 05-30-13,1,0,0,0,2013-03-21,365,91,31,7,2012-12-30,2013-03-31,2013-05-26,2013-05-26,2013-05-27,2013-05-30,Thu       ,2013M05,May-2013,N,2013M05   ,N,Year 2013 - Quarter 02,2013Q02   ,N,22,2013,2013-05-26,2013-06-01,N,Wk.22 - 13,2013-05-26 00:00:00,2013-06-01 00:00:00,2013W22   ,2013W22   ,05/26/13 - 06/01/13,05/26 - 06/01,2013,N,2012-05-30,2011-05-30,2013-02-28,2012-11-30,2013-04-30,2013-03-30,2013-05-23,2013-05-16,0,0,0,0,0,0,0,0,6,2,22,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-04-12,2013-01-01,2013-04-01,2013-04-01,2013-04-07,0,-2,-7,-33,-229,0,-2,-8,-33,-33,41375,102,12,12,6,15,5910,1,4,1360,2,454,114,2013-12-31,2013-06-30,2013-04-30,2013-04-13,12-Apr-2013,Apr 12th 2013,Fri 04-12-13,1,0,0,0,2013-03-21,365,91,30,7,2012-12-30,2013-03-31,2013-03-31,2013-04-07,2013-04-08,2013-04-12,Fri       ,2013M04,Apr-2013,N,2013M04   ,N,Year 2013 - Quarter 02,2013Q02   ,N,15,2013,2013-04-07,2013-04-13,N,Wk.15 - 13,2013-04-07 00:00:00,2013-04-13 00:00:00,2013W15   ,2013W15   ,04/07/13 - 04/13/13,04/07 - 04/13,2013,N,2012-04-12,2011-04-12,2013-01-12,2012-10-12,2013-03-12,2013-02-12,2013-04-05,2013-03-29,0,0,0,0,0,0,0,0,4,2,15,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-04-08,2013-01-01,2013-04-01,2013-04-01,2013-04-07,0,-2,-7,-33,-233,0,-2,-8,-33,-33,41371,98,8,8,2,15,5910,1,4,1360,2,454,114,2013-12-31,2013-06-30,2013-04-30,2013-04-13,08-Apr-2013,Apr 8th 2013,Mon 04-08-13,1,0,0,0,2013-03-21,365,91,30,7,2012-12-30,2013-03-31,2013-03-31,2013-04-07,2013-04-08,2013-04-08,Mon       ,2013M04,Apr-2013,N,2013M04   ,N,Year 2013 - Quarter 02,2013Q02   ,N,15,2013,2013-04-07,2013-04-13,N,Wk.15 - 13,2013-04-07 00:00:00,2013-04-13 00:00:00,2013W15   ,2013W15   ,04/07/13 - 04/13/13,04/07 - 04/13,2013,N,2012-04-08,2011-04-08,2013-01-08,2012-10-08,2013-03-08,2013-02-08,2013-04-01,2013-03-25,0,0,0,0,0,0,0,0,4,2,15,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-12-28,2013-01-01,2013-10-01,2013-12-01,2013-12-22,0,0,1,4,31,0,0,0,4,4,41635,362,89,28,7,52,5947,3,12,1368,4,456,114,2013-12-31,2013-12-31,2013-12-31,2013-12-28,28-Dec-2013,Dec 28th 2013,Sat 12-28-13,0,0,0,0,2013-12-21,365,92,31,7,2012-12-30,2013-09-29,2013-11-24,2013-12-22,2013-12-23,2013-12-28,Sat       ,2013M12,Dec-2013,N,2013M12   ,N,Year 2013 - Quarter 04,2013Q04   ,N,52,2013,2013-12-22,2013-12-28,N,Wk.52 - 13,2013-12-22 00:00:00,2013-12-28 00:00:00,2013W52   ,2013W52   ,12/22/13 - 12/28/13,12/22 - 12/28,2013,N,2012-12-28,2011-12-28,2013-09-28,2013-06-28,2013-11-28,2013-10-28,2013-12-21,2013-12-14,0,0,0,0,0,0,0,0,12,4,52,1,0,1,0,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-12-03,2013-01-01,2013-10-01,2013-12-01,2013-12-01,0,0,1,1,6,0,0,0,1,1,41610,337,64,3,3,49,5944,3,12,1368,4,456,114,2013-12-31,2013-12-31,2013-12-31,2013-12-07,03-Dec-2013,Dec 3rd 2013,Tue 12-03-13,0,0,0,0,2013-09-21,365,92,31,7,2012-12-30,2013-09-29,2013-11-24,2013-12-01,2013-12-02,2013-12-03,Tue       ,2013M12,Dec-2013,N,2013M12   ,N,Year 2013 - Quarter 04,2013Q04   ,N,49,2013,2013-12-01,2013-12-07,N,Wk.49 - 13,2013-12-01 00:00:00,2013-12-07 00:00:00,2013W49   ,2013W49   ,12/01/13 - 12/07/13,12/01 - 12/07,2013,N,2012-12-03,2011-12-03,2013-09-03,2013-06-03,2013-11-03,2013-10-03,2013-11-26,2013-11-19,0,0,0,0,0,0,0,0,12,4,49,4,0,1,0,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-07-15,2013-01-01,2013-07-01,2013-07-01,2013-07-14,0,-1,-4,-19,-135,0,-1,-5,-19,-19,41469,196,15,15,2,29,5924,1,7,1363,3,455,114,2013-12-31,2013-09-30,2013-07-31,2013-07-20,15-Jul-2013,Jul 15th 2013,Mon 07-15-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-06-30,2013-07-14,2013-07-15,2013-07-15,Mon       ,2013M07,Jul-2013,N,2013M07   ,N,Year 2013 - Quarter 03,2013Q03   ,N,29,2013,2013-07-14,2013-07-20,N,Wk.29 - 13,2013-07-14 00:00:00,2013-07-20 00:00:00,2013W29   ,2013W29   ,07/14/13 - 07/20/13,07/14 - 07/20,2013,N,2012-07-15,2011-07-15,2013-04-15,2013-01-15,2013-06-15,2013-05-15,2013-07-08,2013-07-01,0,0,0,0,0,0,0,0,7,3,29,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-05-13,2013-01-01,2013-04-01,2013-05-01,2013-05-12,0,-2,-6,-28,-198,0,-2,-7,-28,-28,41406,133,43,13,2,20,5915,2,5,1361,2,454,114,2013-12-31,2013-06-30,2013-05-31,2013-05-18,13-May-2013,May 13th 2013,Mon 05-13-13,1,0,0,0,2013-03-21,365,91,31,7,2012-12-30,2013-03-31,2013-04-28,2013-05-12,2013-05-13,2013-05-13,Mon       ,2013M05,May-2013,N,2013M05   ,N,Year 2013 - Quarter 02,2013Q02   ,N,20,2013,2013-05-12,2013-05-18,N,Wk.20 - 13,2013-05-12 00:00:00,2013-05-18 00:00:00,2013W20   ,2013W20   ,05/12/13 - 05/18/13,05/12 - 05/18,2013,N,2012-05-13,2011-05-13,2013-02-13,2012-11-13,2013-04-13,2013-03-13,2013-05-06,2013-04-29,0,0,0,0,0,0,0,0,5,2,20,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-03-30,2013-01-01,2013-01-01,2013-03-01,2013-03-24,0,-3,-8,-35,-242,0,-3,-9,-35,-35,41362,89,89,30,7,13,5908,3,3,1359,1,453,114,2013-12-31,2013-03-31,2013-03-31,2013-03-30,30-Mar-2013,Mar 30th 2013,Sat 03-30-13,1,0,0,0,2013-03-21,365,90,31,7,2012-12-30,2012-12-30,2013-02-24,2013-03-24,2013-03-25,2013-03-30,Sat       ,2013M03,Mar-2013,N,2013M03   ,N,Year 2013 - Quarter 01,2013Q01   ,N,13,2013,2013-03-24,2013-03-30,N,Wk.13 - 13,2013-03-24 00:00:00,2013-03-30 00:00:00,2013W13   ,2013W13   ,03/24/13 - 03/30/13,03/24 - 03/30,2013,N,2012-03-30,2011-03-30,2012-12-30,2012-09-30,2013-02-28,2013-01-30,2013-03-23,2013-03-16,0,0,0,0,0,0,0,0,3,1,13,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-12-09,2013-01-01,2013-10-01,2013-12-01,2013-12-08,0,0,1,2,12,0,0,0,2,2,41616,343,70,9,2,50,5945,3,12,1368,4,456,114,2013-12-31,2013-12-31,2013-12-31,2013-12-14,09-Dec-2013,Dec 9th 2013,Mon 12-09-13,0,0,0,0,2013-09-21,365,92,31,7,2012-12-30,2013-09-29,2013-11-24,2013-12-08,2013-12-09,2013-12-09,Mon       ,2013M12,Dec-2013,N,2013M12   ,N,Year 2013 - Quarter 04,2013Q04   ,N,50,2013,2013-12-08,2013-12-14,N,Wk.50 - 13,2013-12-08 00:00:00,2013-12-14 00:00:00,2013W50   ,2013W50   ,12/08/13 - 12/14/13,12/08 - 12/14,2013,N,2012-12-09,2011-12-09,2013-09-09,2013-06-09,2013-11-09,2013-10-09,2013-12-02,2013-11-25,0,0,0,0,0,0,0,0,12,4,50,4,0,1,0,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-02-25,2013-01-01,2013-01-01,2013-02-01,2013-02-24,0,-3,-9,-39,-275,0,-3,-9,-39,-39,41329,56,56,25,2,9,5904,2,2,1358,1,453,114,2013-12-31,2013-03-31,2013-02-28,2013-03-02,25-Feb-2013,Feb 25th 2013,Mon 02-25-13,1,0,0,0,2012-12-21,365,90,28,7,2012-12-30,2012-12-30,2013-02-24,2013-02-24,2013-02-25,2013-02-25,Mon       ,2013M02,Feb-2013,N,2013M02   ,N,Year 2013 - Quarter 01,2013Q01   ,N,9,2013,2013-02-24,2013-03-02,N,Wk.09 - 13,2013-02-24 00:00:00,2013-03-02 00:00:00,2013W09   ,2013W09   ,02/24/13 - 03/02/13,02/24 - 03/02,2013,N,2012-02-25,2011-02-25,2012-11-25,2012-08-25,2013-01-25,2012-12-25,2013-02-18,2013-02-11,0,0,0,0,0,0,0,0,3,1,9,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-11-04,2013-01-01,2013-10-01,2013-11-01,2013-11-03,0,0,0,-3,-23,0,0,-1,-3,-3,41581,308,35,4,2,45,5940,2,11,1367,4,456,114,2013-12-31,2013-12-31,2013-11-30,2013-11-09,04-Nov-2013,Nov 4th 2013,Mon 11-04-13,1,1,1,0,2013-09-21,365,92,30,7,2012-12-30,2013-09-29,2013-10-27,2013-11-03,2013-11-04,2013-11-04,Mon       ,2013M11,Nov-2013,N,2013M11   ,N,Year 2013 - Quarter 04,2013Q04   ,N,45,2013,2013-11-03,2013-11-09,N,Wk.45 - 13,2013-11-03 00:00:00,2013-11-09 00:00:00,2013W45   ,2013W45   ,11/03/13 - 11/09/13,11/03 - 11/09,2013,N,2012-11-04,2011-11-04,2013-08-04,2013-05-04,2013-10-04,2013-09-04,2013-10-28,2013-10-21,0,0,0,0,0,0,0,0,11,4,45,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-11-08,2013-01-01,2013-10-01,2013-11-01,2013-11-03,0,0,0,-3,-19,0,0,-1,-3,-3,41585,312,39,8,6,45,5940,2,11,1367,4,456,114,2013-12-31,2013-12-31,2013-11-30,2013-11-09,08-Nov-2013,Nov 8th 2013,Fri 11-08-13,1,1,1,0,2013-09-21,365,92,30,7,2012-12-30,2013-09-29,2013-10-27,2013-11-03,2013-11-04,2013-11-08,Fri       ,2013M11,Nov-2013,N,2013M11   ,N,Year 2013 - Quarter 04,2013Q04   ,N,45,2013,2013-11-03,2013-11-09,N,Wk.45 - 13,2013-11-03 00:00:00,2013-11-09 00:00:00,2013W45   ,2013W45   ,11/03/13 - 11/09/13,11/03 - 11/09,2013,N,2012-11-08,2011-11-08,2013-08-08,2013-05-08,2013-10-08,2013-09-08,2013-11-01,2013-10-25,0,0,0,0,0,0,0,0,11,4,45,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-08-22,2013-01-01,2013-07-01,2013-08-01,2013-08-18,0,-1,-3,-14,-97,0,-1,-4,-14,-14,41507,234,53,22,5,34,5929,2,8,1364,3,455,114,2013-12-31,2013-09-30,2013-08-31,2013-08-24,22-Aug-2013,Aug 22nd 2013,Thu 08-22-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-07-28,2013-08-18,2013-08-19,2013-08-22,Thu       ,2013M08,Aug-2013,N,2013M08   ,N,Year 2013 - Quarter 03,2013Q03   ,N,34,2013,2013-08-18,2013-08-24,N,Wk.34 - 13,2013-08-18 00:00:00,2013-08-24 00:00:00,2013W34   ,2013W34   ,08/18/13 - 08/24/13,08/18 - 08/24,2013,N,2012-08-22,2011-08-22,2013-05-22,2013-02-22,2013-07-22,2013-06-22,2013-08-15,2013-08-08,0,0,0,0,0,0,0,0,8,3,34,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-06-18,2013-01-01,2013-04-01,2013-06-01,2013-06-16,0,-2,-5,-23,-162,0,-2,-6,-23,-23,41442,169,79,18,3,25,5920,3,6,1362,2,454,114,2013-12-31,2013-06-30,2013-06-30,2013-06-22,18-Jun-2013,Jun 18th 2013,Tue 06-18-13,1,0,0,0,2013-03-21,365,91,30,7,2012-12-30,2013-03-31,2013-05-26,2013-06-16,2013-06-17,2013-06-18,Tue       ,2013M06,Jun-2013,N,2013M06   ,N,Year 2013 - Quarter 02,2013Q02   ,N,25,2013,2013-06-16,2013-06-22,N,Wk.25 - 13,2013-06-16 00:00:00,2013-06-22 00:00:00,2013W25   ,2013W25   ,06/16/13 - 06/22/13,06/16 - 06/22,2013,N,2012-06-18,2011-06-18,2013-03-18,2012-12-18,2013-05-18,2013-04-18,2013-06-11,2013-06-04,0,0,0,0,0,0,0,0,6,2,25,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-07-19,2013-01-01,2013-07-01,2013-07-01,2013-07-14,0,-1,-4,-19,-131,0,-1,-5,-19,-19,41473,200,19,19,6,29,5924,1,7,1363,3,455,114,2013-12-31,2013-09-30,2013-07-31,2013-07-20,19-Jul-2013,Jul 19th 2013,Fri 07-19-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-06-30,2013-07-14,2013-07-15,2013-07-19,Fri       ,2013M07,Jul-2013,N,2013M07   ,N,Year 2013 - Quarter 03,2013Q03   ,N,29,2013,2013-07-14,2013-07-20,N,Wk.29 - 13,2013-07-14 00:00:00,2013-07-20 00:00:00,2013W29   ,2013W29   ,07/14/13 - 07/20/13,07/14 - 07/20,2013,N,2012-07-19,2011-07-19,2013-04-19,2013-01-19,2013-06-19,2013-05-19,2013-07-12,2013-07-05,0,0,0,0,0,0,0,0,7,3,29,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-08-20,2013-01-01,2013-07-01,2013-08-01,2013-08-18,0,-1,-3,-14,-99,0,-1,-4,-14,-14,41505,232,51,20,3,34,5929,2,8,1364,3,455,114,2013-12-31,2013-09-30,2013-08-31,2013-08-24,20-Aug-2013,Aug 20th 2013,Tue 08-20-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-07-28,2013-08-18,2013-08-19,2013-08-20,Tue       ,2013M08,Aug-2013,N,2013M08   ,N,Year 2013 - Quarter 03,2013Q03   ,N,34,2013,2013-08-18,2013-08-24,N,Wk.34 - 13,2013-08-18 00:00:00,2013-08-24 00:00:00,2013W34   ,2013W34   ,08/18/13 - 08/24/13,08/18 - 08/24,2013,N,2012-08-20,2011-08-20,2013-05-20,2013-02-20,2013-07-20,2013-06-20,2013-08-13,2013-08-06,0,0,0,0,0,0,0,0,8,3,34,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-08-24,2013-01-01,2013-07-01,2013-08-01,2013-08-18,0,-1,-3,-14,-95,0,-1,-4,-14,-14,41509,236,55,24,7,34,5929,2,8,1364,3,455,114,2013-12-31,2013-09-30,2013-08-31,2013-08-24,24-Aug-2013,Aug 24th 2013,Sat 08-24-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-07-28,2013-08-18,2013-08-19,2013-08-24,Sat       ,2013M08,Aug-2013,N,2013M08   ,N,Year 2013 - Quarter 03,2013Q03   ,N,34,2013,2013-08-18,2013-08-24,N,Wk.34 - 13,2013-08-18 00:00:00,2013-08-24 00:00:00,2013W34   ,2013W34   ,08/18/13 - 08/24/13,08/18 - 08/24,2013,N,2012-08-24,2011-08-24,2013-05-24,2013-02-24,2013-07-24,2013-06-24,2013-08-17,2013-08-10,0,0,0,0,0,0,0,0,8,3,34,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-07-13,2013-01-01,2013-07-01,2013-07-01,2013-07-07,0,-1,-4,-20,-137,0,-1,-5,-20,-20,41467,194,13,13,7,28,5923,1,7,1363,3,455,114,2013-12-31,2013-09-30,2013-07-31,2013-07-13,13-Jul-2013,Jul 13th 2013,Sat 07-13-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-06-30,2013-07-07,2013-07-08,2013-07-13,Sat       ,2013M07,Jul-2013,N,2013M07   ,N,Year 2013 - Quarter 03,2013Q03   ,N,28,2013,2013-07-07,2013-07-13,N,Wk.28 - 13,2013-07-07 00:00:00,2013-07-13 00:00:00,2013W28   ,2013W28   ,07/07/13 - 07/13/13,07/07 - 07/13,2013,N,2012-07-13,2011-07-13,2013-04-13,2013-01-13,2013-06-13,2013-05-13,2013-07-06,2013-06-29,0,0,0,0,0,0,0,0,7,3,28,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-11-06,2013-01-01,2013-10-01,2013-11-01,2013-11-03,0,0,0,-3,-21,0,0,-1,-3,-3,41583,310,37,6,4,45,5940,2,11,1367,4,456,114,2013-12-31,2013-12-31,2013-11-30,2013-11-09,06-Nov-2013,Nov 6th 2013,Wed 11-06-13,1,1,1,0,2013-09-21,365,92,30,7,2012-12-30,2013-09-29,2013-10-27,2013-11-03,2013-11-04,2013-11-06,Wed       ,2013M11,Nov-2013,N,2013M11   ,N,Year 2013 - Quarter 04,2013Q04   ,N,45,2013,2013-11-03,2013-11-09,N,Wk.45 - 13,2013-11-03 00:00:00,2013-11-09 00:00:00,2013W45   ,2013W45   ,11/03/13 - 11/09/13,11/03 - 11/09,2013,N,2012-11-06,2011-11-06,2013-08-06,2013-05-06,2013-10-06,2013-09-06,2013-10-30,2013-10-23,0,0,0,0,0,0,0,0,11,4,45,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-12-07,2013-01-01,2013-10-01,2013-12-01,2013-12-01,0,0,1,1,10,0,0,0,1,1,41614,341,68,7,7,49,5944,3,12,1368,4,456,114,2013-12-31,2013-12-31,2013-12-31,2013-12-07,07-Dec-2013,Dec 7th 2013,Sat 12-07-13,0,0,0,0,2013-09-21,365,92,31,7,2012-12-30,2013-09-29,2013-11-24,2013-12-01,2013-12-02,2013-12-07,Sat       ,2013M12,Dec-2013,N,2013M12   ,N,Year 2013 - Quarter 04,2013Q04   ,N,49,2013,2013-12-01,2013-12-07,N,Wk.49 - 13,2013-12-01 00:00:00,2013-12-07 00:00:00,2013W49   ,2013W49   ,12/01/13 - 12/07/13,12/01 - 12/07,2013,N,2012-12-07,2011-12-07,2013-09-07,2013-06-07,2013-11-07,2013-10-07,2013-11-30,2013-11-23,0,0,0,0,0,0,0,0,12,4,49,4,0,1,0,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-07-04,2013-01-01,2013-07-01,2013-07-01,2013-06-30,0,-1,-4,-21,-146,0,-1,-5,-21,-21,41458,185,4,4,5,27,5922,1,7,1363,3,455,114,2013-12-31,2013-09-30,2013-07-31,2013-07-06,04-Jul-2013,Jul 4th 2013,Thu 07-04-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-06-30,2013-06-30,2013-07-01,2013-07-04,Thu       ,2013M07,Jul-2013,N,2013M07   ,N,Year 2013 - Quarter 03,2013Q03   ,N,27,2013,2013-06-30,2013-07-06,N,Wk.27 - 13,2013-06-30 00:00:00,2013-07-06 00:00:00,2013W27   ,2013W27   ,06/30/13 - 07/06/13,06/30 - 07/06,2013,N,2012-07-04,2011-07-04,2013-04-04,2013-01-04,2013-06-04,2013-05-04,2013-06-27,2013-06-20,0,0,0,0,0,0,0,0,7,3,27,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-10-07,2013-01-01,2013-10-01,2013-10-01,2013-10-06,0,0,-1,-7,-51,0,0,-2,-7,-7,41553,280,7,7,2,41,5936,1,10,1366,4,456,114,2013-12-31,2013-12-31,2013-10-31,2013-10-12,07-Oct-2013,Oct 7th 2013,Mon 10-07-13,1,1,0,0,2013-09-21,365,92,31,7,2012-12-30,2013-09-29,2013-09-29,2013-10-06,2013-10-07,2013-10-07,Mon       ,2013M10,Oct-2013,N,2013M10   ,N,Year 2013 - Quarter 04,2013Q04   ,N,41,2013,2013-10-06,2013-10-12,N,Wk.41 - 13,2013-10-06 00:00:00,2013-10-12 00:00:00,2013W41   ,2013W41   ,10/06/13 - 10/12/13,10/06 - 10/12,2013,N,2012-10-07,2011-10-07,2013-07-07,2013-04-07,2013-09-07,2013-08-07,2013-09-30,2013-09-23,0,0,0,0,0,0,0,0,10,4,41,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-11-10,2013-01-01,2013-10-01,2013-11-01,2013-11-10,0,0,0,-2,-17,0,0,-1,-2,-3,41587,314,41,10,1,46,5941,2,11,1367,4,456,114,2013-12-31,2013-12-31,2013-11-30,2013-11-16,10-Nov-2013,Nov 10th 2013,Sun 11-10-13,1,1,1,0,2013-09-21,365,92,30,7,2012-12-30,2013-09-29,2013-10-27,2013-11-10,2013-11-04,2013-11-10,Sun       ,2013M11,Nov-2013,N,2013M11   ,N,Year 2013 - Quarter 04,2013Q04   ,N,46,2013,2013-11-10,2013-11-16,Y,Wk.46 - 13,2013-11-10 00:00:00,2013-11-16 00:00:00,2013W46   ,2013W46   ,11/10/13 - 11/16/13,11/10 - 11/16,2013,N,2012-11-10,2011-11-10,2013-08-10,2013-05-10,2013-10-10,2013-09-10,2013-11-03,2013-10-27,0,0,0,0,0,0,0,0,11,4,46,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-08-07,2013-01-01,2013-07-01,2013-08-01,2013-08-04,0,-1,-3,-16,-112,0,-1,-4,-16,-16,41492,219,38,7,4,32,5927,2,8,1364,3,455,114,2013-12-31,2013-09-30,2013-08-31,2013-08-10,07-Aug-2013,Aug 7th 2013,Wed 08-07-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-07-28,2013-08-04,2013-08-05,2013-08-07,Wed       ,2013M08,Aug-2013,N,2013M08   ,N,Year 2013 - Quarter 03,2013Q03   ,N,32,2013,2013-08-04,2013-08-10,N,Wk.32 - 13,2013-08-04 00:00:00,2013-08-10 00:00:00,2013W32   ,2013W32   ,08/04/13 - 08/10/13,08/04 - 08/10,2013,N,2012-08-07,2011-08-07,2013-05-07,2013-02-07,2013-07-07,2013-06-07,2013-07-31,2013-07-24,0,0,0,0,0,0,0,0,8,3,32,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-09-25,2013-01-01,2013-07-01,2013-09-01,2013-09-22,0,-1,-2,-9,-63,0,-1,-3,-9,-9,41541,268,87,25,4,39,5934,3,9,1365,3,455,114,2013-12-31,2013-09-30,2013-09-30,2013-09-28,25-Sep-2013,Sep 25th 2013,Wed 09-25-13,1,0,0,0,2013-09-21,365,92,30,7,2012-12-30,2013-06-30,2013-08-25,2013-09-22,2013-09-23,2013-09-25,Wed       ,2013M09,Sep-2013,N,2013M09   ,N,Year 2013 - Quarter 03,2013Q03   ,N,39,2013,2013-09-22,2013-09-28,N,Wk.39 - 13,2013-09-22 00:00:00,2013-09-28 00:00:00,2013W39   ,2013W39   ,09/22/13 - 09/28/13,09/22 - 09/28,2013,N,2012-09-25,2011-09-25,2013-06-25,2013-03-25,2013-08-25,2013-07-25,2013-09-18,2013-09-11,0,0,0,0,0,0,0,0,9,3,39,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-10-05,2013-01-01,2013-10-01,2013-10-01,2013-09-29,0,0,-1,-8,-53,0,0,-2,-8,-8,41551,278,5,5,7,40,5935,1,10,1366,4,456,114,2013-12-31,2013-12-31,2013-10-31,2013-10-05,05-Oct-2013,Oct 5th 2013,Sat 10-05-13,1,1,0,0,2013-09-21,365,92,31,7,2012-12-30,2013-09-29,2013-09-29,2013-09-29,2013-09-30,2013-10-05,Sat       ,2013M10,Oct-2013,N,2013M10   ,N,Year 2013 - Quarter 04,2013Q04   ,N,40,2013,2013-09-29,2013-10-05,N,Wk.40 - 13,2013-09-29 00:00:00,2013-10-05 00:00:00,2013W40   ,2013W40   ,09/29/13 - 10/05/13,09/29 - 10/05,2013,N,2012-10-05,2011-10-05,2013-07-05,2013-04-05,2013-09-05,2013-08-05,2013-09-28,2013-09-21,0,0,0,0,0,0,0,0,10,4,40,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-09-02,2013-01-01,2013-07-01,2013-09-01,2013-09-01,0,-1,-2,-12,-86,0,-1,-3,-12,-12,41518,245,64,2,2,36,5931,3,9,1365,3,455,114,2013-12-31,2013-09-30,2013-09-30,2013-09-07,02-Sep-2013,Sep 2nd 2013,Mon 09-02-13,1,0,0,0,2013-06-21,365,92,30,7,2012-12-30,2013-06-30,2013-08-25,2013-09-01,2013-09-02,2013-09-02,Mon       ,2013M09,Sep-2013,N,2013M09   ,N,Year 2013 - Quarter 03,2013Q03   ,N,36,2013,2013-09-01,2013-09-07,N,Wk.36 - 13,2013-09-01 00:00:00,2013-09-07 00:00:00,2013W36   ,2013W36   ,09/01/13 - 09/07/13,09/01 - 09/07,2013,N,2012-09-02,2011-09-02,2013-06-02,2013-03-02,2013-08-02,2013-07-02,2013-08-26,2013-08-19,0,0,0,0,0,0,0,0,9,3,36,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-05-15,2013-01-01,2013-04-01,2013-05-01,2013-05-12,0,-2,-6,-28,-196,0,-2,-7,-28,-28,41408,135,45,15,4,20,5915,2,5,1361,2,454,114,2013-12-31,2013-06-30,2013-05-31,2013-05-18,15-May-2013,May 15th 2013,Wed 05-15-13,1,0,0,0,2013-03-21,365,91,31,7,2012-12-30,2013-03-31,2013-04-28,2013-05-12,2013-05-13,2013-05-15,Wed       ,2013M05,May-2013,N,2013M05   ,N,Year 2013 - Quarter 02,2013Q02   ,N,20,2013,2013-05-12,2013-05-18,N,Wk.20 - 13,2013-05-12 00:00:00,2013-05-18 00:00:00,2013W20   ,2013W20   ,05/12/13 - 05/18/13,05/12 - 05/18,2013,N,2012-05-15,2011-05-15,2013-02-15,2012-11-15,2013-04-15,2013-03-15,2013-05-08,2013-05-01,0,0,0,0,0,0,0,0,5,2,20,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-02-10,2013-01-01,2013-01-01,2013-02-01,2013-02-10,0,-3,-9,-41,-290,0,-3,-10,-41,-42,41314,41,41,10,1,7,5902,2,2,1358,1,453,114,2013-12-31,2013-03-31,2013-02-28,2013-02-16,10-Feb-2013,Feb 10th 2013,Sun 02-10-13,1,0,0,0,2012-12-21,365,90,28,7,2012-12-30,2012-12-30,2013-01-27,2013-02-10,2013-02-04,2013-02-10,Sun       ,2013M02,Feb-2013,N,2013M02   ,N,Year 2013 - Quarter 01,2013Q01   ,N,7,2013,2013-02-10,2013-02-16,Y,Wk.07 - 13,2013-02-10 00:00:00,2013-02-16 00:00:00,2013W07   ,2013W07   ,02/10/13 - 02/16/13,02/10 - 02/16,2013,N,2012-02-10,2011-02-10,2012-11-10,2012-08-10,2013-01-10,2012-12-10,2013-02-03,2013-01-27,0,0,0,0,0,0,0,0,2,1,7,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-12-21,2013-01-01,2013-10-01,2013-12-01,2013-12-15,0,0,1,3,24,0,0,0,3,3,41628,355,82,21,7,51,5946,3,12,1368,4,456,114,2013-12-31,2013-12-31,2013-12-31,2013-12-21,21-Dec-2013,Dec 21st 2013,Sat 12-21-13,0,0,0,0,2013-12-21,365,92,31,7,2012-12-30,2013-09-29,2013-11-24,2013-12-15,2013-12-16,2013-12-21,Sat       ,2013M12,Dec-2013,N,2013M12   ,N,Year 2013 - Quarter 04,2013Q04   ,N,51,2013,2013-12-15,2013-12-21,N,Wk.51 - 13,2013-12-15 00:00:00,2013-12-21 00:00:00,2013W51   ,2013W51   ,12/15/13 - 12/21/13,12/15 - 12/21,2013,N,2012-12-21,2011-12-21,2013-09-21,2013-06-21,2013-11-21,2013-10-21,2013-12-14,2013-12-07,0,0,0,0,0,0,0,0,12,4,51,1,0,1,0,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-10-22,2013-01-01,2013-10-01,2013-10-01,2013-10-20,0,0,-1,-5,-36,0,0,-2,-5,-5,41568,295,22,22,3,43,5938,1,10,1366,4,456,114,2013-12-31,2013-12-31,2013-10-31,2013-10-26,22-Oct-2013,Oct 22nd 2013,Tue 10-22-13,1,1,0,0,2013-09-21,365,92,31,7,2012-12-30,2013-09-29,2013-09-29,2013-10-20,2013-10-21,2013-10-22,Tue       ,2013M10,Oct-2013,N,2013M10   ,N,Year 2013 - Quarter 04,2013Q04   ,N,43,2013,2013-10-20,2013-10-26,N,Wk.43 - 13,2013-10-20 00:00:00,2013-10-26 00:00:00,2013W43   ,2013W43   ,10/20/13 - 10/26/13,10/20 - 10/26,2013,N,2012-10-22,2011-10-22,2013-07-22,2013-04-22,2013-09-22,2013-08-22,2013-10-15,2013-10-08,0,0,0,0,0,0,0,0,10,4,43,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-10-24,2013-01-01,2013-10-01,2013-10-01,2013-10-20,0,0,-1,-5,-34,0,0,-2,-5,-5,41570,297,24,24,5,43,5938,1,10,1366,4,456,114,2013-12-31,2013-12-31,2013-10-31,2013-10-26,24-Oct-2013,Oct 24th 2013,Thu 10-24-13,1,1,0,0,2013-09-21,365,92,31,7,2012-12-30,2013-09-29,2013-09-29,2013-10-20,2013-10-21,2013-10-24,Thu       ,2013M10,Oct-2013,N,2013M10   ,N,Year 2013 - Quarter 04,2013Q04   ,N,43,2013,2013-10-20,2013-10-26,N,Wk.43 - 13,2013-10-20 00:00:00,2013-10-26 00:00:00,2013W43   ,2013W43   ,10/20/13 - 10/26/13,10/20 - 10/26,2013,N,2012-10-24,2011-10-24,2013-07-24,2013-04-24,2013-09-24,2013-08-24,2013-10-17,2013-10-10,0,0,0,0,0,0,0,0,10,4,43,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-07-10,2013-01-01,2013-07-01,2013-07-01,2013-07-07,0,-1,-4,-20,-140,0,-1,-5,-20,-20,41464,191,10,10,4,28,5923,1,7,1363,3,455,114,2013-12-31,2013-09-30,2013-07-31,2013-07-13,10-Jul-2013,Jul 10th 2013,Wed 07-10-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-06-30,2013-07-07,2013-07-08,2013-07-10,Wed       ,2013M07,Jul-2013,N,2013M07   ,N,Year 2013 - Quarter 03,2013Q03   ,N,28,2013,2013-07-07,2013-07-13,N,Wk.28 - 13,2013-07-07 00:00:00,2013-07-13 00:00:00,2013W28   ,2013W28   ,07/07/13 - 07/13/13,07/07 - 07/13,2013,N,2012-07-10,2011-07-10,2013-04-10,2013-01-10,2013-06-10,2013-05-10,2013-07-03,2013-06-26,0,0,0,0,0,0,0,0,7,3,28,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-02-27,2013-01-01,2013-01-01,2013-02-01,2013-02-24,0,-3,-9,-39,-273,0,-3,-9,-39,-39,41331,58,58,27,4,9,5904,2,2,1358,1,453,114,2013-12-31,2013-03-31,2013-02-28,2013-03-02,27-Feb-2013,Feb 27th 2013,Wed 02-27-13,1,0,0,0,2012-12-21,365,90,28,7,2012-12-30,2012-12-30,2013-02-24,2013-02-24,2013-02-25,2013-02-27,Wed       ,2013M02,Feb-2013,N,2013M02   ,N,Year 2013 - Quarter 01,2013Q01   ,N,9,2013,2013-02-24,2013-03-02,N,Wk.09 - 13,2013-02-24 00:00:00,2013-03-02 00:00:00,2013W09   ,2013W09   ,02/24/13 - 03/02/13,02/24 - 03/02,2013,N,2012-02-27,2011-02-27,2012-11-27,2012-08-27,2013-01-27,2012-12-27,2013-02-20,2013-02-13,0,0,0,0,0,0,0,0,3,1,9,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-03-02,2013-01-01,2013-01-01,2013-03-01,2013-02-24,0,-3,-8,-39,-270,0,-3,-9,-39,-39,41334,61,61,2,7,9,5904,3,3,1359,1,453,114,2013-12-31,2013-03-31,2013-03-31,2013-03-02,02-Mar-2013,Mar 2nd 2013,Sat 03-02-13,1,0,0,0,2012-12-21,365,90,31,7,2012-12-30,2012-12-30,2013-02-24,2013-02-24,2013-02-25,2013-03-02,Sat       ,2013M03,Mar-2013,N,2013M03   ,N,Year 2013 - Quarter 01,2013Q01   ,N,9,2013,2013-02-24,2013-03-02,N,Wk.09 - 13,2013-02-24 00:00:00,2013-03-02 00:00:00,2013W09   ,2013W09   ,02/24/13 - 03/02/13,02/24 - 03/02,2013,N,2012-03-02,2011-03-02,2012-12-02,2012-09-02,2013-02-02,2013-01-02,2013-02-23,2013-02-16,0,0,0,0,0,0,0,0,3,1,9,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-11-02,2013-01-01,2013-10-01,2013-11-01,2013-10-27,0,0,0,-4,-25,0,0,-1,-4,-4,41579,306,33,2,7,44,5939,2,11,1367,4,456,114,2013-12-31,2013-12-31,2013-11-30,2013-11-02,02-Nov-2013,Nov 2nd 2013,Sat 11-02-13,1,1,1,0,2013-09-21,365,92,30,7,2012-12-30,2013-09-29,2013-10-27,2013-10-27,2013-10-28,2013-11-02,Sat       ,2013M11,Nov-2013,N,2013M11   ,N,Year 2013 - Quarter 04,2013Q04   ,N,44,2013,2013-10-27,2013-11-02,N,Wk.44 - 13,2013-10-27 00:00:00,2013-11-02 00:00:00,2013W44   ,2013W44   ,10/27/13 - 11/02/13,10/27 - 11/02,2013,N,2012-11-02,2011-11-02,2013-08-02,2013-05-02,2013-10-02,2013-09-02,2013-10-26,2013-10-19,0,0,0,0,0,0,0,0,11,4,44,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-03-21,2013-01-01,2013-01-01,2013-03-01,2013-03-17,0,-3,-8,-36,-251,0,-3,-9,-36,-36,41353,80,80,21,5,12,5907,3,3,1359,1,453,114,2013-12-31,2013-03-31,2013-03-31,2013-03-23,21-Mar-2013,Mar 21st 2013,Thu 03-21-13,1,0,0,0,2013-03-21,365,90,31,7,2012-12-30,2012-12-30,2013-02-24,2013-03-17,2013-03-18,2013-03-21,Thu       ,2013M03,Mar-2013,N,2013M03   ,N,Year 2013 - Quarter 01,2013Q01   ,N,12,2013,2013-03-17,2013-03-23,N,Wk.12 - 13,2013-03-17 00:00:00,2013-03-23 00:00:00,2013W12   ,2013W12   ,03/17/13 - 03/23/13,03/17 - 03/23,2013,N,2012-03-21,2011-03-21,2012-12-21,2012-09-21,2013-02-21,2013-01-21,2013-03-14,2013-03-07,0,0,0,0,0,0,0,0,3,1,12,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-04-03,2013-01-01,2013-04-01,2013-04-01,2013-03-31,0,-2,-7,-34,-238,0,-2,-8,-34,-34,41366,93,3,3,4,14,5909,1,4,1360,2,454,114,2013-12-31,2013-06-30,2013-04-30,2013-04-06,03-Apr-2013,Apr 3rd 2013,Wed 04-03-13,1,0,0,0,2013-03-21,365,91,30,7,2012-12-30,2013-03-31,2013-03-31,2013-03-31,2013-04-01,2013-04-03,Wed       ,2013M04,Apr-2013,N,2013M04   ,N,Year 2013 - Quarter 02,2013Q02   ,N,14,2013,2013-03-31,2013-04-06,N,Wk.14 - 13,2013-03-31 00:00:00,2013-04-06 00:00:00,2013W14   ,2013W14   ,03/31/13 - 04/06/13,03/31 - 04/06,2013,N,2012-04-03,2011-04-03,2013-01-03,2012-10-03,2013-03-03,2013-02-03,2013-03-27,2013-03-20,0,0,0,0,0,0,0,0,4,2,14,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-07-06,2013-01-01,2013-07-01,2013-07-01,2013-06-30,0,-1,-4,-21,-144,0,-1,-5,-21,-21,41460,187,6,6,7,27,5922,1,7,1363,3,455,114,2013-12-31,2013-09-30,2013-07-31,2013-07-06,06-Jul-2013,Jul 6th 2013,Sat 07-06-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-06-30,2013-06-30,2013-07-01,2013-07-06,Sat       ,2013M07,Jul-2013,N,2013M07   ,N,Year 2013 - Quarter 03,2013Q03   ,N,27,2013,2013-06-30,2013-07-06,N,Wk.27 - 13,2013-06-30 00:00:00,2013-07-06 00:00:00,2013W27   ,2013W27   ,06/30/13 - 07/06/13,06/30 - 07/06,2013,N,2012-07-06,2011-07-06,2013-04-06,2013-01-06,2013-06-06,2013-05-06,2013-06-29,2013-06-22,0,0,0,0,0,0,0,0,7,3,27,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-06-26,2013-01-01,2013-04-01,2013-06-01,2013-06-23,0,-2,-5,-22,-154,0,-2,-6,-22,-22,41450,177,87,26,4,26,5921,3,6,1362,2,454,114,2013-12-31,2013-06-30,2013-06-30,2013-06-29,26-Jun-2013,Jun 26th 2013,Wed 06-26-13,1,0,0,0,2013-06-21,365,91,30,7,2012-12-30,2013-03-31,2013-05-26,2013-06-23,2013-06-24,2013-06-26,Wed       ,2013M06,Jun-2013,N,2013M06   ,N,Year 2013 - Quarter 02,2013Q02   ,N,26,2013,2013-06-23,2013-06-29,N,Wk.26 - 13,2013-06-23 00:00:00,2013-06-29 00:00:00,2013W26   ,2013W26   ,06/23/13 - 06/29/13,06/23 - 06/29,2013,N,2012-06-26,2011-06-26,2013-03-26,2012-12-26,2013-05-26,2013-04-26,2013-06-19,2013-06-12,0,0,0,0,0,0,0,0,6,2,26,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-08-26,2013-01-01,2013-07-01,2013-08-01,2013-08-25,0,-1,-3,-13,-93,0,-1,-3,-13,-13,41511,238,57,26,2,35,5930,2,8,1364,3,455,114,2013-12-31,2013-09-30,2013-08-31,2013-08-31,26-Aug-2013,Aug 26th 2013,Mon 08-26-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-08-25,2013-08-25,2013-08-26,2013-08-26,Mon       ,2013M08,Aug-2013,N,2013M08   ,N,Year 2013 - Quarter 03,2013Q03   ,N,35,2013,2013-08-25,2013-08-31,N,Wk.35 - 13,2013-08-25 00:00:00,2013-08-31 00:00:00,2013W35   ,2013W35   ,08/25/13 - 08/31/13,08/25 - 08/31,2013,N,2012-08-26,2011-08-26,2013-05-26,2013-02-26,2013-07-26,2013-06-26,2013-08-19,2013-08-12,0,0,0,0,0,0,0,0,9,3,35,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-12-13,2013-01-01,2013-10-01,2013-12-01,2013-12-08,0,0,1,2,16,0,0,0,2,2,41620,347,74,13,6,50,5945,3,12,1368,4,456,114,2013-12-31,2013-12-31,2013-12-31,2013-12-14,13-Dec-2013,Dec 13th 2013,Fri 12-13-13,0,0,0,0,2013-09-21,365,92,31,7,2012-12-30,2013-09-29,2013-11-24,2013-12-08,2013-12-09,2013-12-13,Fri       ,2013M12,Dec-2013,N,2013M12   ,N,Year 2013 - Quarter 04,2013Q04   ,N,50,2013,2013-12-08,2013-12-14,N,Wk.50 - 13,2013-12-08 00:00:00,2013-12-14 00:00:00,2013W50   ,2013W50   ,12/08/13 - 12/14/13,12/08 - 12/14,2013,N,2012-12-13,2011-12-13,2013-09-13,2013-06-13,2013-11-13,2013-10-13,2013-12-06,2013-11-29,0,0,0,0,0,0,0,0,12,4,50,4,0,1,0,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-07-02,2013-01-01,2013-07-01,2013-07-01,2013-06-30,0,-1,-4,-21,-148,0,-1,-5,-21,-21,41456,183,2,2,3,27,5922,1,7,1363,3,455,114,2013-12-31,2013-09-30,2013-07-31,2013-07-06,02-Jul-2013,Jul 2nd 2013,Tue 07-02-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-06-30,2013-06-30,2013-07-01,2013-07-02,Tue       ,2013M07,Jul-2013,N,2013M07   ,N,Year 2013 - Quarter 03,2013Q03   ,N,27,2013,2013-06-30,2013-07-06,N,Wk.27 - 13,2013-06-30 00:00:00,2013-07-06 00:00:00,2013W27   ,2013W27   ,06/30/13 - 07/06/13,06/30 - 07/06,2013,N,2012-07-02,2011-07-02,2013-04-02,2013-01-02,2013-06-02,2013-05-02,2013-06-25,2013-06-18,0,0,0,0,0,0,0,0,7,3,27,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-08-05,2013-01-01,2013-07-01,2013-08-01,2013-08-04,0,-1,-3,-16,-114,0,-1,-4,-16,-16,41490,217,36,5,2,32,5927,2,8,1364,3,455,114,2013-12-31,2013-09-30,2013-08-31,2013-08-10,05-Aug-2013,Aug 5th 2013,Mon 08-05-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-07-28,2013-08-04,2013-08-05,2013-08-05,Mon       ,2013M08,Aug-2013,N,2013M08   ,N,Year 2013 - Quarter 03,2013Q03   ,N,32,2013,2013-08-04,2013-08-10,N,Wk.32 - 13,2013-08-04 00:00:00,2013-08-10 00:00:00,2013W32   ,2013W32   ,08/04/13 - 08/10/13,08/04 - 08/10,2013,N,2012-08-05,2011-08-05,2013-05-05,2013-02-05,2013-07-05,2013-06-05,2013-07-29,2013-07-22,0,0,0,0,0,0,0,0,8,3,32,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-02-08,2013-01-01,2013-01-01,2013-02-01,2013-02-03,0,-3,-9,-42,-292,0,-3,-10,-42,-42,41312,39,39,8,6,6,5901,2,2,1358,1,453,114,2013-12-31,2013-03-31,2013-02-28,2013-02-09,08-Feb-2013,Feb 8th 2013,Fri 02-08-13,1,0,0,0,2012-12-21,365,90,28,7,2012-12-30,2012-12-30,2013-01-27,2013-02-03,2013-02-04,2013-02-08,Fri       ,2013M02,Feb-2013,N,2013M02   ,N,Year 2013 - Quarter 01,2013Q01   ,N,6,2013,2013-02-03,2013-02-09,N,Wk.06 - 13,2013-02-03 00:00:00,2013-02-09 00:00:00,2013W06   ,2013W06   ,02/03/13 - 02/09/13,02/03 - 02/09,2013,N,2012-02-08,2011-02-08,2012-11-08,2012-08-08,2013-01-08,2012-12-08,2013-02-01,2013-01-25,0,0,0,0,0,0,0,0,2,1,6,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-02-14,2013-01-01,2013-01-01,2013-02-01,2013-02-10,0,-3,-9,-41,-286,0,-3,-10,-41,-41,41318,45,45,14,5,7,5902,2,2,1358,1,453,114,2013-12-31,2013-03-31,2013-02-28,2013-02-16,14-Feb-2013,Feb 14th 2013,Thu 02-14-13,1,0,0,0,2012-12-21,365,90,28,7,2012-12-30,2012-12-30,2013-01-27,2013-02-10,2013-02-11,2013-02-14,Thu       ,2013M02,Feb-2013,N,2013M02   ,N,Year 2013 - Quarter 01,2013Q01   ,N,7,2013,2013-02-10,2013-02-16,N,Wk.07 - 13,2013-02-10 00:00:00,2013-02-16 00:00:00,2013W07   ,2013W07   ,02/10/13 - 02/16/13,02/10 - 02/16,2013,N,2012-02-14,2011-02-14,2012-11-14,2012-08-14,2013-01-14,2012-12-14,2013-02-07,2013-01-31,0,0,0,0,0,0,0,0,2,1,7,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-09-21,2013-01-01,2013-07-01,2013-09-01,2013-09-15,0,-1,-2,-10,-67,0,-1,-3,-10,-10,41537,264,83,21,7,38,5933,3,9,1365,3,455,114,2013-12-31,2013-09-30,2013-09-30,2013-09-21,21-Sep-2013,Sep 21st 2013,Sat 09-21-13,1,0,0,0,2013-09-21,365,92,30,7,2012-12-30,2013-06-30,2013-08-25,2013-09-15,2013-09-16,2013-09-21,Sat       ,2013M09,Sep-2013,N,2013M09   ,N,Year 2013 - Quarter 03,2013Q03   ,N,38,2013,2013-09-15,2013-09-21,N,Wk.38 - 13,2013-09-15 00:00:00,2013-09-21 00:00:00,2013W38   ,2013W38   ,09/15/13 - 09/21/13,09/15 - 09/21,2013,N,2012-09-21,2011-09-21,2013-06-21,2013-03-21,2013-08-21,2013-07-21,2013-09-14,2013-09-07,0,0,0,0,0,0,0,0,9,3,38,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-03-19,2013-01-01,2013-01-01,2013-03-01,2013-03-17,0,-3,-8,-36,-253,0,-3,-9,-36,-36,41351,78,78,19,3,12,5907,3,3,1359,1,453,114,2013-12-31,2013-03-31,2013-03-31,2013-03-23,19-Mar-2013,Mar 19th 2013,Tue 03-19-13,1,0,0,0,2012-12-21,365,90,31,7,2012-12-30,2012-12-30,2013-02-24,2013-03-17,2013-03-18,2013-03-19,Tue       ,2013M03,Mar-2013,N,2013M03   ,N,Year 2013 - Quarter 01,2013Q01   ,N,12,2013,2013-03-17,2013-03-23,N,Wk.12 - 13,2013-03-17 00:00:00,2013-03-23 00:00:00,2013W12   ,2013W12   ,03/17/13 - 03/23/13,03/17 - 03/23,2013,N,2012-03-19,2011-03-19,2012-12-19,2012-09-19,2013-02-19,2013-01-19,2013-03-12,2013-03-05,0,0,0,0,0,0,0,0,3,1,12,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-01-28,2013-01-01,2013-01-01,2013-01-01,2013-01-27,0,-3,-10,-43,-303,0,-3,-10,-43,-43,41301,28,28,28,2,5,5900,1,1,1357,1,453,114,2013-12-31,2013-03-31,2013-01-31,2013-02-02,28-Jan-2013,Jan 28th 2013,Mon 01-28-13,1,0,0,0,2012-12-21,365,90,31,7,2012-12-30,2012-12-30,2013-01-27,2013-01-27,2013-01-28,2013-01-28,Mon       ,2013M01,Jan-2013,N,2013M01   ,N,Year 2013 - Quarter 01,2013Q01   ,N,5,2013,2013-01-27,2013-02-02,N,Wk.05 - 13,2013-01-27 00:00:00,2013-02-02 00:00:00,2013W05   ,2013W05   ,01/27/13 - 02/02/13,01/27 - 02/02,2013,N,2012-01-28,2011-01-28,2012-10-28,2012-07-28,2012-12-28,2012-11-28,2013-01-21,2013-01-14,0,0,0,0,0,0,0,0,2,1,5,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-08-03,2013-01-01,2013-07-01,2013-08-01,2013-07-28,0,-1,-3,-17,-116,0,-1,-4,-17,-17,41488,215,34,3,7,31,5926,2,8,1364,3,455,114,2013-12-31,2013-09-30,2013-08-31,2013-08-03,03-Aug-2013,Aug 3rd 2013,Sat 08-03-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-07-28,2013-07-28,2013-07-29,2013-08-03,Sat       ,2013M08,Aug-2013,N,2013M08   ,N,Year 2013 - Quarter 03,2013Q03   ,N,31,2013,2013-07-28,2013-08-03,N,Wk.31 - 13,2013-07-28 00:00:00,2013-08-03 00:00:00,2013W31   ,2013W31   ,07/28/13 - 08/03/13,07/28 - 08/03,2013,N,2012-08-03,2011-08-03,2013-05-03,2013-02-03,2013-07-03,2013-06-03,2013-07-27,2013-07-20,0,0,0,0,0,0,0,0,8,3,31,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-06-14,2013-01-01,2013-04-01,2013-06-01,2013-06-09,0,-2,-5,-24,-166,0,-2,-6,-24,-24,41438,165,75,14,6,24,5919,3,6,1362,2,454,114,2013-12-31,2013-06-30,2013-06-30,2013-06-15,14-Jun-2013,Jun 14th 2013,Fri 06-14-13,1,0,0,0,2013-03-21,365,91,30,7,2012-12-30,2013-03-31,2013-05-26,2013-06-09,2013-06-10,2013-06-14,Fri       ,2013M06,Jun-2013,N,2013M06   ,N,Year 2013 - Quarter 02,2013Q02   ,N,24,2013,2013-06-09,2013-06-15,N,Wk.24 - 13,2013-06-09 00:00:00,2013-06-15 00:00:00,2013W24   ,2013W24   ,06/09/13 - 06/15/13,06/09 - 06/15,2013,N,2012-06-14,2011-06-14,2013-03-14,2012-12-14,2013-05-14,2013-04-14,2013-06-07,2013-05-31,0,0,0,0,0,0,0,0,6,2,24,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-07-08,2013-01-01,2013-07-01,2013-07-01,2013-07-07,0,-1,-4,-20,-142,0,-1,-5,-20,-20,41462,189,8,8,2,28,5923,1,7,1363,3,455,114,2013-12-31,2013-09-30,2013-07-31,2013-07-13,08-Jul-2013,Jul 8th 2013,Mon 07-08-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-06-30,2013-07-07,2013-07-08,2013-07-08,Mon       ,2013M07,Jul-2013,N,2013M07   ,N,Year 2013 - Quarter 03,2013Q03   ,N,28,2013,2013-07-07,2013-07-13,N,Wk.28 - 13,2013-07-07 00:00:00,2013-07-13 00:00:00,2013W28   ,2013W28   ,07/07/13 - 07/13/13,07/07 - 07/13,2013,N,2012-07-08,2011-07-08,2013-04-08,2013-01-08,2013-06-08,2013-05-08,2013-07-01,2013-06-24,0,0,0,0,0,0,0,0,7,3,28,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-11-30,2013-01-01,2013-10-01,2013-11-01,2013-11-24,0,0,0,0,3,0,0,0,0,0,41607,334,61,30,7,48,5943,2,11,1367,4,456,114,2013-12-31,2013-12-31,2013-11-30,2013-11-30,30-Nov-2013,Nov 30th 2013,Sat 11-30-13,0,0,0,0,2013-09-21,365,92,30,7,2012-12-30,2013-09-29,2013-11-24,2013-11-24,2013-11-25,2013-11-30,Sat       ,2013M11,Nov-2013,N,2013M11   ,N,Year 2013 - Quarter 04,2013Q04   ,N,48,2013,2013-11-24,2013-11-30,N,Wk.48 - 13,2013-11-24 00:00:00,2013-11-30 00:00:00,2013W48   ,2013W48   ,11/24/13 - 11/30/13,11/24 - 11/30,2013,N,2012-11-30,2011-11-30,2013-08-30,2013-05-30,2013-10-30,2013-09-30,2013-11-23,2013-11-16,0,0,0,0,0,0,0,0,12,4,48,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-01-10,2013-01-01,2013-01-01,2013-01-01,2013-01-06,0,-3,-10,-46,-321,0,-3,-11,-46,-46,41283,10,10,10,5,2,5897,1,1,1357,1,453,114,2013-12-31,2013-03-31,2013-01-31,2013-01-12,10-Jan-2013,Jan 10th 2013,Thu 01-10-13,1,0,0,0,2012-12-21,365,90,31,7,2012-12-30,2012-12-30,2012-12-30,2013-01-06,2013-01-07,2013-01-10,Thu       ,2013M01,Jan-2013,N,2013M01   ,N,Year 2013 - Quarter 01,2013Q01   ,N,2,2013,2013-01-06,2013-01-12,N,Wk.02 - 13,2013-01-06 00:00:00,2013-01-12 00:00:00,2013W02   ,2013W02   ,01/06/13 - 01/12/13,01/06 - 01/12,2013,N,2012-01-10,2011-01-10,2012-10-10,2012-07-10,2012-12-10,2012-11-10,2013-01-03,2012-12-27,0,0,0,0,0,0,0,0,1,1,2,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-05-18,2013-01-01,2013-04-01,2013-05-01,2013-05-12,0,-2,-6,-28,-193,0,-2,-7,-28,-28,41411,138,48,18,7,20,5915,2,5,1361,2,454,114,2013-12-31,2013-06-30,2013-05-31,2013-05-18,18-May-2013,May 18th 2013,Sat 05-18-13,1,0,0,0,2013-03-21,365,91,31,7,2012-12-30,2013-03-31,2013-04-28,2013-05-12,2013-05-13,2013-05-18,Sat       ,2013M05,May-2013,N,2013M05   ,N,Year 2013 - Quarter 02,2013Q02   ,N,20,2013,2013-05-12,2013-05-18,N,Wk.20 - 13,2013-05-12 00:00:00,2013-05-18 00:00:00,2013W20   ,2013W20   ,05/12/13 - 05/18/13,05/12 - 05/18,2013,N,2012-05-18,2011-05-18,2013-02-18,2012-11-18,2013-04-18,2013-03-18,2013-05-11,2013-05-04,0,0,0,0,0,0,0,0,5,2,20,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-04-15,2013-01-01,2013-04-01,2013-04-01,2013-04-14,0,-2,-7,-32,-226,0,-2,-8,-32,-32,41378,105,15,15,2,16,5911,1,4,1360,2,454,114,2013-12-31,2013-06-30,2013-04-30,2013-04-20,15-Apr-2013,Apr 15th 2013,Mon 04-15-13,1,0,0,0,2013-03-21,365,91,30,7,2012-12-30,2013-03-31,2013-03-31,2013-04-14,2013-04-15,2013-04-15,Mon       ,2013M04,Apr-2013,N,2013M04   ,N,Year 2013 - Quarter 02,2013Q02   ,N,16,2013,2013-04-14,2013-04-20,N,Wk.16 - 13,2013-04-14 00:00:00,2013-04-20 00:00:00,2013W16   ,2013W16   ,04/14/13 - 04/20/13,04/14 - 04/20,2013,N,2012-04-15,2011-04-15,2013-01-15,2012-10-15,2013-03-15,2013-02-15,2013-04-08,2013-04-01,0,0,0,0,0,0,0,0,4,2,16,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-03-12,2013-01-01,2013-01-01,2013-03-01,2013-03-10,0,-3,-8,-37,-260,0,-3,-9,-37,-37,41344,71,71,12,3,11,5906,3,3,1359,1,453,114,2013-12-31,2013-03-31,2013-03-31,2013-03-16,12-Mar-2013,Mar 12th 2013,Tue 03-12-13,1,0,0,0,2012-12-21,365,90,31,7,2012-12-30,2012-12-30,2013-02-24,2013-03-10,2013-03-11,2013-03-12,Tue       ,2013M03,Mar-2013,N,2013M03   ,N,Year 2013 - Quarter 01,2013Q01   ,N,11,2013,2013-03-10,2013-03-16,N,Wk.11 - 13,2013-03-10 00:00:00,2013-03-16 00:00:00,2013W11   ,2013W11   ,03/10/13 - 03/16/13,03/10 - 03/16,2013,N,2012-03-12,2011-03-12,2012-12-12,2012-09-12,2013-02-12,2013-01-12,2013-03-05,2013-02-26,0,0,0,0,0,0,0,0,3,1,11,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-11-09,2013-01-01,2013-10-01,2013-11-01,2013-11-03,0,0,0,-3,-18,0,0,-1,-3,-3,41586,313,40,9,7,45,5940,2,11,1367,4,456,114,2013-12-31,2013-12-31,2013-11-30,2013-11-09,09-Nov-2013,Nov 9th 2013,Sat 11-09-13,1,1,1,0,2013-09-21,365,92,30,7,2012-12-30,2013-09-29,2013-10-27,2013-11-03,2013-11-04,2013-11-09,Sat       ,2013M11,Nov-2013,N,2013M11   ,N,Year 2013 - Quarter 04,2013Q04   ,N,45,2013,2013-11-03,2013-11-09,N,Wk.45 - 13,2013-11-03 00:00:00,2013-11-09 00:00:00,2013W45   ,2013W45   ,11/03/13 - 11/09/13,11/03 - 11/09,2013,N,2012-11-09,2011-11-09,2013-08-09,2013-05-09,2013-10-09,2013-09-09,2013-11-02,2013-10-26,0,0,0,0,0,0,0,0,11,4,45,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-03-14,2013-01-01,2013-01-01,2013-03-01,2013-03-10,0,-3,-8,-37,-258,0,-3,-9,-37,-37,41346,73,73,14,5,11,5906,3,3,1359,1,453,114,2013-12-31,2013-03-31,2013-03-31,2013-03-16,14-Mar-2013,Mar 14th 2013,Thu 03-14-13,1,0,0,0,2012-12-21,365,90,31,7,2012-12-30,2012-12-30,2013-02-24,2013-03-10,2013-03-11,2013-03-14,Thu       ,2013M03,Mar-2013,N,2013M03   ,N,Year 2013 - Quarter 01,2013Q01   ,N,11,2013,2013-03-10,2013-03-16,N,Wk.11 - 13,2013-03-10 00:00:00,2013-03-16 00:00:00,2013W11   ,2013W11   ,03/10/13 - 03/16/13,03/10 - 03/16,2013,N,2012-03-14,2011-03-14,2012-12-14,2012-09-14,2013-02-14,2013-01-14,2013-03-07,2013-02-28,0,0,0,0,0,0,0,0,3,1,11,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-09-03,2013-01-01,2013-07-01,2013-09-01,2013-09-01,0,-1,-2,-12,-85,0,-1,-3,-12,-12,41519,246,65,3,3,36,5931,3,9,1365,3,455,114,2013-12-31,2013-09-30,2013-09-30,2013-09-07,03-Sep-2013,Sep 3rd 2013,Tue 09-03-13,1,0,0,0,2013-06-21,365,92,30,7,2012-12-30,2013-06-30,2013-08-25,2013-09-01,2013-09-02,2013-09-03,Tue       ,2013M09,Sep-2013,N,2013M09   ,N,Year 2013 - Quarter 03,2013Q03   ,N,36,2013,2013-09-01,2013-09-07,N,Wk.36 - 13,2013-09-01 00:00:00,2013-09-07 00:00:00,2013W36   ,2013W36   ,09/01/13 - 09/07/13,09/01 - 09/07,2013,N,2012-09-03,2011-09-03,2013-06-03,2013-03-03,2013-08-03,2013-07-03,2013-08-27,2013-08-20,0,0,0,0,0,0,0,0,9,3,36,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-05-16,2013-01-01,2013-04-01,2013-05-01,2013-05-12,0,-2,-6,-28,-195,0,-2,-7,-28,-28,41409,136,46,16,5,20,5915,2,5,1361,2,454,114,2013-12-31,2013-06-30,2013-05-31,2013-05-18,16-May-2013,May 16th 2013,Thu 05-16-13,1,0,0,0,2013-03-21,365,91,31,7,2012-12-30,2013-03-31,2013-04-28,2013-05-12,2013-05-13,2013-05-16,Thu       ,2013M05,May-2013,N,2013M05   ,N,Year 2013 - Quarter 02,2013Q02   ,N,20,2013,2013-05-12,2013-05-18,N,Wk.20 - 13,2013-05-12 00:00:00,2013-05-18 00:00:00,2013W20   ,2013W20   ,05/12/13 - 05/18/13,05/12 - 05/18,2013,N,2012-05-16,2011-05-16,2013-02-16,2012-11-16,2013-04-16,2013-03-16,2013-05-09,2013-05-02,0,0,0,0,0,0,0,0,5,2,20,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-10-27,2013-01-01,2013-10-01,2013-10-01,2013-10-27,0,0,-1,-4,-31,0,0,-1,-4,-5,41573,300,27,27,1,44,5939,1,10,1366,4,456,114,2013-12-31,2013-12-31,2013-10-31,2013-11-02,27-Oct-2013,Oct 27th 2013,Sun 10-27-13,1,1,0,0,2013-09-21,365,92,31,7,2012-12-30,2013-09-29,2013-10-27,2013-10-27,2013-10-21,2013-10-27,Sun       ,2013M10,Oct-2013,N,2013M10   ,N,Year 2013 - Quarter 04,2013Q04   ,N,44,2013,2013-10-27,2013-11-02,Y,Wk.44 - 13,2013-10-27 00:00:00,2013-11-02 00:00:00,2013W44   ,2013W44   ,10/27/13 - 11/02/13,10/27 - 11/02,2013,N,2012-10-27,2011-10-27,2013-07-27,2013-04-27,2013-09-27,2013-08-27,2013-10-20,2013-10-13,0,0,0,0,1,0,0,0,11,4,44,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-08-21,2013-01-01,2013-07-01,2013-08-01,2013-08-18,0,-1,-3,-14,-98,0,-1,-4,-14,-14,41506,233,52,21,4,34,5929,2,8,1364,3,455,114,2013-12-31,2013-09-30,2013-08-31,2013-08-24,21-Aug-2013,Aug 21st 2013,Wed 08-21-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-07-28,2013-08-18,2013-08-19,2013-08-21,Wed       ,2013M08,Aug-2013,N,2013M08   ,N,Year 2013 - Quarter 03,2013Q03   ,N,34,2013,2013-08-18,2013-08-24,N,Wk.34 - 13,2013-08-18 00:00:00,2013-08-24 00:00:00,2013W34   ,2013W34   ,08/18/13 - 08/24/13,08/18 - 08/24,2013,N,2012-08-21,2011-08-21,2013-05-21,2013-02-21,2013-07-21,2013-06-21,2013-08-14,2013-08-07,0,0,0,0,0,0,0,0,8,3,34,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-10-25,2013-01-01,2013-10-01,2013-10-01,2013-10-20,0,0,-1,-5,-33,0,0,-2,-5,-5,41571,298,25,25,6,43,5938,1,10,1366,4,456,114,2013-12-31,2013-12-31,2013-10-31,2013-10-26,25-Oct-2013,Oct 25th 2013,Fri 10-25-13,1,1,0,0,2013-09-21,365,92,31,7,2012-12-30,2013-09-29,2013-09-29,2013-10-20,2013-10-21,2013-10-25,Fri       ,2013M10,Oct-2013,N,2013M10   ,N,Year 2013 - Quarter 04,2013Q04   ,N,43,2013,2013-10-20,2013-10-26,N,Wk.43 - 13,2013-10-20 00:00:00,2013-10-26 00:00:00,2013W43   ,2013W43   ,10/20/13 - 10/26/13,10/20 - 10/26,2013,N,2012-10-25,2011-10-25,2013-07-25,2013-04-25,2013-09-25,2013-08-25,2013-10-18,2013-10-11,0,0,0,0,0,0,0,0,10,4,43,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-09-24,2013-01-01,2013-07-01,2013-09-01,2013-09-22,0,-1,-2,-9,-64,0,-1,-3,-9,-9,41540,267,86,24,3,39,5934,3,9,1365,3,455,114,2013-12-31,2013-09-30,2013-09-30,2013-09-28,24-Sep-2013,Sep 24th 2013,Tue 09-24-13,1,0,0,0,2013-09-21,365,92,30,7,2012-12-30,2013-06-30,2013-08-25,2013-09-22,2013-09-23,2013-09-24,Tue       ,2013M09,Sep-2013,N,2013M09   ,N,Year 2013 - Quarter 03,2013Q03   ,N,39,2013,2013-09-22,2013-09-28,N,Wk.39 - 13,2013-09-22 00:00:00,2013-09-28 00:00:00,2013W39   ,2013W39   ,09/22/13 - 09/28/13,09/22 - 09/28,2013,N,2012-09-24,2011-09-24,2013-06-24,2013-03-24,2013-08-24,2013-07-24,2013-09-17,2013-09-10,0,0,0,0,0,0,0,0,9,3,39,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-01-27,2013-01-01,2013-01-01,2013-01-01,2013-01-27,0,-3,-10,-43,-304,0,-3,-10,-43,-44,41300,27,27,27,1,5,5900,1,1,1357,1,453,114,2013-12-31,2013-03-31,2013-01-31,2013-02-02,27-Jan-2013,Jan 27th 2013,Sun 01-27-13,1,0,0,0,2012-12-21,365,90,31,7,2012-12-30,2012-12-30,2013-01-27,2013-01-27,2013-01-21,2013-01-27,Sun       ,2013M01,Jan-2013,N,2013M01   ,N,Year 2013 - Quarter 01,2013Q01   ,N,5,2013,2013-01-27,2013-02-02,Y,Wk.05 - 13,2013-01-27 00:00:00,2013-02-02 00:00:00,2013W05   ,2013W05   ,01/27/13 - 02/02/13,01/27 - 02/02,2013,N,2012-01-27,2011-01-27,2012-10-27,2012-07-27,2012-12-27,2012-11-27,2013-01-20,2013-01-13,0,0,0,0,0,0,0,0,2,1,5,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-03-10,2013-01-01,2013-01-01,2013-03-01,2013-03-10,0,-3,-8,-37,-262,0,-3,-9,-37,-38,41342,69,69,10,1,11,5906,3,3,1359,1,453,114,2013-12-31,2013-03-31,2013-03-31,2013-03-16,10-Mar-2013,Mar 10th 2013,Sun 03-10-13,1,0,0,0,2012-12-21,365,90,31,7,2012-12-30,2012-12-30,2013-02-24,2013-03-10,2013-03-04,2013-03-10,Sun       ,2013M03,Mar-2013,N,2013M03   ,N,Year 2013 - Quarter 01,2013Q01   ,N,11,2013,2013-03-10,2013-03-16,Y,Wk.11 - 13,2013-03-10 00:00:00,2013-03-16 00:00:00,2013W11   ,2013W11   ,03/10/13 - 03/16/13,03/10 - 03/16,2013,N,2012-03-10,2011-03-10,2012-12-10,2012-09-10,2013-02-10,2013-01-10,2013-03-03,2013-02-24,0,0,0,0,0,0,0,0,3,1,11,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-05-14,2013-01-01,2013-04-01,2013-05-01,2013-05-12,0,-2,-6,-28,-197,0,-2,-7,-28,-28,41407,134,44,14,3,20,5915,2,5,1361,2,454,114,2013-12-31,2013-06-30,2013-05-31,2013-05-18,14-May-2013,May 14th 2013,Tue 05-14-13,1,0,0,0,2013-03-21,365,91,31,7,2012-12-30,2013-03-31,2013-04-28,2013-05-12,2013-05-13,2013-05-14,Tue       ,2013M05,May-2013,N,2013M05   ,N,Year 2013 - Quarter 02,2013Q02   ,N,20,2013,2013-05-12,2013-05-18,N,Wk.20 - 13,2013-05-12 00:00:00,2013-05-18 00:00:00,2013W20   ,2013W20   ,05/12/13 - 05/18/13,05/12 - 05/18,2013,N,2012-05-14,2011-05-14,2013-02-14,2012-11-14,2013-04-14,2013-03-14,2013-05-07,2013-04-30,0,0,0,0,0,0,0,0,5,2,20,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-12-31,2013-01-01,2013-10-01,2013-12-01,2013-12-29,0,0,1,5,34,1,1,1,5,5,41638,365,92,31,3,53,5948,3,12,1368,4,456,114,2013-12-31,2013-12-31,2013-12-31,2013-12-31,31-Dec-2013,Dec 31st 2013,Tue 12-31-13,0,0,0,0,2013-12-21,365,92,31,3,2013-12-29,2013-12-29,2013-12-29,2013-12-29,2013-12-30,2013-12-31,Tue       ,2013M12,Dec-2013,N,2013M12   ,N,Year 2013 - Quarter 04,2013Q04   ,N,1,2014,2013-12-29,2014-01-04,N,Wk.53 - 13,2013-12-29 00:00:00,2013-12-31 00:00:00,2013W53   ,2013W53   ,12/29/13 - 12/31/13,12/29 - 12/31,2013,N,2012-12-31,2011-12-31,2013-09-30,2013-06-30,2013-11-30,2013-10-31,2013-12-24,2013-12-17,0,0,0,0,0,0,0,0,1,1,1,1,0,1,0,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-09-01,2013-01-01,2013-07-01,2013-09-01,2013-09-01,0,-1,-2,-12,-87,0,-1,-3,-12,-13,41517,244,63,1,1,36,5931,3,9,1365,3,455,114,2013-12-31,2013-09-30,2013-09-30,2013-09-07,01-Sep-2013,Sep 1st 2013,Sun 09-01-13,1,0,0,0,2013-06-21,365,92,30,7,2012-12-30,2013-06-30,2013-08-25,2013-09-01,2013-08-26,2013-09-01,Sun       ,2013M09,Sep-2013,Y,2013M09   ,Y,Year 2013 - Quarter 03,2013Q03   ,N,36,2013,2013-09-01,2013-09-07,Y,Wk.36 - 13,2013-09-01 00:00:00,2013-09-07 00:00:00,2013W36   ,2013W36   ,09/01/13 - 09/07/13,09/01 - 09/07,2013,N,2012-09-01,2011-09-01,2013-06-01,2013-03-01,2013-08-01,2013-07-01,2013-08-25,2013-08-18,0,0,0,0,0,0,0,0,9,3,36,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-04-13,2013-01-01,2013-04-01,2013-04-01,2013-04-07,0,-2,-7,-33,-228,0,-2,-8,-33,-33,41376,103,13,13,7,15,5910,1,4,1360,2,454,114,2013-12-31,2013-06-30,2013-04-30,2013-04-13,13-Apr-2013,Apr 13th 2013,Sat 04-13-13,1,0,0,0,2013-03-21,365,91,30,7,2012-12-30,2013-03-31,2013-03-31,2013-04-07,2013-04-08,2013-04-13,Sat       ,2013M04,Apr-2013,N,2013M04   ,N,Year 2013 - Quarter 02,2013Q02   ,N,15,2013,2013-04-07,2013-04-13,N,Wk.15 - 13,2013-04-07 00:00:00,2013-04-13 00:00:00,2013W15   ,2013W15   ,04/07/13 - 04/13/13,04/07 - 04/13,2013,N,2012-04-13,2011-04-13,2013-01-13,2012-10-13,2013-03-13,2013-02-13,2013-04-06,2013-03-30,0,0,0,0,0,0,0,0,4,2,15,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-02-09,2013-01-01,2013-01-01,2013-02-01,2013-02-03,0,-3,-9,-42,-291,0,-3,-10,-42,-42,41313,40,40,9,7,6,5901,2,2,1358,1,453,114,2013-12-31,2013-03-31,2013-02-28,2013-02-09,09-Feb-2013,Feb 9th 2013,Sat 02-09-13,1,0,0,0,2012-12-21,365,90,28,7,2012-12-30,2012-12-30,2013-01-27,2013-02-03,2013-02-04,2013-02-09,Sat       ,2013M02,Feb-2013,N,2013M02   ,N,Year 2013 - Quarter 01,2013Q01   ,N,6,2013,2013-02-03,2013-02-09,N,Wk.06 - 13,2013-02-03 00:00:00,2013-02-09 00:00:00,2013W06   ,2013W06   ,02/03/13 - 02/09/13,02/03 - 02/09,2013,N,2012-02-09,2011-02-09,2012-11-09,2012-08-09,2013-01-09,2012-12-09,2013-02-02,2013-01-26,0,0,0,0,0,0,0,0,2,1,6,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-12-27,2013-01-01,2013-10-01,2013-12-01,2013-12-22,0,0,1,4,30,0,0,0,4,4,41634,361,88,27,6,52,5947,3,12,1368,4,456,114,2013-12-31,2013-12-31,2013-12-31,2013-12-28,27-Dec-2013,Dec 27th 2013,Fri 12-27-13,0,0,0,0,2013-12-21,365,92,31,7,2012-12-30,2013-09-29,2013-11-24,2013-12-22,2013-12-23,2013-12-27,Fri       ,2013M12,Dec-2013,N,2013M12   ,N,Year 2013 - Quarter 04,2013Q04   ,N,52,2013,2013-12-22,2013-12-28,N,Wk.52 - 13,2013-12-22 00:00:00,2013-12-28 00:00:00,2013W52   ,2013W52   ,12/22/13 - 12/28/13,12/22 - 12/28,2013,N,2012-12-27,2011-12-27,2013-09-27,2013-06-27,2013-11-27,2013-10-27,2013-12-20,2013-12-13,0,0,0,0,0,0,0,0,12,4,52,1,0,1,0,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-08-19,2013-01-01,2013-07-01,2013-08-01,2013-08-18,0,-1,-3,-14,-100,0,-1,-4,-14,-14,41504,231,50,19,2,34,5929,2,8,1364,3,455,114,2013-12-31,2013-09-30,2013-08-31,2013-08-24,19-Aug-2013,Aug 19th 2013,Mon 08-19-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-07-28,2013-08-18,2013-08-19,2013-08-19,Mon       ,2013M08,Aug-2013,N,2013M08   ,N,Year 2013 - Quarter 03,2013Q03   ,N,34,2013,2013-08-18,2013-08-24,N,Wk.34 - 13,2013-08-18 00:00:00,2013-08-24 00:00:00,2013W34   ,2013W34   ,08/18/13 - 08/24/13,08/18 - 08/24,2013,N,2012-08-19,2011-08-19,2013-05-19,2013-02-19,2013-07-19,2013-06-19,2013-08-12,2013-08-05,0,0,0,0,0,0,0,0,8,3,34,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-04-11,2013-01-01,2013-04-01,2013-04-01,2013-04-07,0,-2,-7,-33,-230,0,-2,-8,-33,-33,41374,101,11,11,5,15,5910,1,4,1360,2,454,114,2013-12-31,2013-06-30,2013-04-30,2013-04-13,11-Apr-2013,Apr 11th 2013,Thu 04-11-13,1,0,0,0,2013-03-21,365,91,30,7,2012-12-30,2013-03-31,2013-03-31,2013-04-07,2013-04-08,2013-04-11,Thu       ,2013M04,Apr-2013,N,2013M04   ,N,Year 2013 - Quarter 02,2013Q02   ,N,15,2013,2013-04-07,2013-04-13,N,Wk.15 - 13,2013-04-07 00:00:00,2013-04-13 00:00:00,2013W15   ,2013W15   ,04/07/13 - 04/13/13,04/07 - 04/13,2013,N,2012-04-11,2011-04-11,2013-01-11,2012-10-11,2013-03-11,2013-02-11,2013-04-04,2013-03-28,0,0,0,0,0,0,0,0,4,2,15,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-12-06,2013-01-01,2013-10-01,2013-12-01,2013-12-01,0,0,1,1,9,0,0,0,1,1,41613,340,67,6,6,49,5944,3,12,1368,4,456,114,2013-12-31,2013-12-31,2013-12-31,2013-12-07,06-Dec-2013,Dec 6th 2013,Fri 12-06-13,0,0,0,0,2013-09-21,365,92,31,7,2012-12-30,2013-09-29,2013-11-24,2013-12-01,2013-12-02,2013-12-06,Fri       ,2013M12,Dec-2013,N,2013M12   ,N,Year 2013 - Quarter 04,2013Q04   ,N,49,2013,2013-12-01,2013-12-07,N,Wk.49 - 13,2013-12-01 00:00:00,2013-12-07 00:00:00,2013W49   ,2013W49   ,12/01/13 - 12/07/13,12/01 - 12/07,2013,N,2012-12-06,2011-12-06,2013-09-06,2013-06-06,2013-11-06,2013-10-06,2013-11-29,2013-11-22,0,0,0,0,0,0,0,0,12,4,49,4,0,1,0,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-03-31,2013-01-01,2013-01-01,2013-03-01,2013-03-31,0,-3,-8,-34,-241,0,-2,-8,-34,-35,41363,90,90,31,1,14,5909,3,3,1359,1,453,114,2013-12-31,2013-03-31,2013-03-31,2013-04-06,31-Mar-2013,Mar 31st 2013,Sun 03-31-13,1,0,0,0,2013-03-21,365,90,31,7,2012-12-30,2013-03-31,2013-03-31,2013-03-31,2013-03-25,2013-03-31,Sun       ,2013M03,Mar-2013,N,2013M03   ,N,Year 2013 - Quarter 01,2013Q01   ,N,14,2013,2013-03-31,2013-04-06,Y,Wk.14 - 13,2013-03-31 00:00:00,2013-04-06 00:00:00,2013W14   ,2013W14   ,03/31/13 - 04/06/13,03/31 - 04/06,2013,N,2012-03-31,2011-03-31,2012-12-31,2012-09-30,2013-02-28,2013-01-31,2013-03-24,2013-03-17,0,0,0,0,0,0,0,0,4,2,14,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-09-22,2013-01-01,2013-07-01,2013-09-01,2013-09-22,0,-1,-2,-9,-66,0,-1,-3,-9,-10,41538,265,84,22,1,39,5934,3,9,1365,3,455,114,2013-12-31,2013-09-30,2013-09-30,2013-09-28,22-Sep-2013,Sep 22nd 2013,Sun 09-22-13,1,0,0,0,2013-09-21,365,92,30,7,2012-12-30,2013-06-30,2013-08-25,2013-09-22,2013-09-16,2013-09-22,Sun       ,2013M09,Sep-2013,N,2013M09   ,N,Year 2013 - Quarter 03,2013Q03   ,N,39,2013,2013-09-22,2013-09-28,Y,Wk.39 - 13,2013-09-22 00:00:00,2013-09-28 00:00:00,2013W39   ,2013W39   ,09/22/13 - 09/28/13,09/22 - 09/28,2013,N,2012-09-22,2011-09-22,2013-06-22,2013-03-22,2013-08-22,2013-07-22,2013-09-15,2013-09-08,0,0,0,0,0,0,0,0,9,3,39,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-01-06,2013-01-01,2013-01-01,2013-01-01,2013-01-06,0,-3,-10,-46,-325,0,-3,-11,-46,-47,41279,6,6,6,1,2,5897,1,1,1357,1,453,114,2013-12-31,2013-03-31,2013-01-31,2013-01-12,06-Jan-2013,Jan 6th 2013,Sun 01-06-13,1,0,0,0,2012-12-21,365,90,31,7,2012-12-30,2012-12-30,2012-12-30,2013-01-06,2012-12-31,2013-01-06,Sun       ,2013M01,Jan-2013,N,2013M01   ,N,Year 2013 - Quarter 01,2013Q01   ,N,2,2013,2013-01-06,2013-01-12,Y,Wk.02 - 13,2013-01-06 00:00:00,2013-01-12 00:00:00,2013W02   ,2013W02   ,01/06/13 - 01/12/13,01/06 - 01/12,2013,N,2012-01-06,2011-01-06,2012-10-06,2012-07-06,2012-12-06,2012-11-06,2012-12-30,2012-12-23,0,0,0,0,0,0,0,0,1,1,2,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-01-04,2013-01-01,2013-01-01,2013-01-01,2013-01-01,0,-3,-10,-47,-327,0,-3,-11,-47,-47,41277,4,4,4,6,1,5896,1,1,1357,1,453,114,2013-12-31,2013-03-31,2013-01-31,2013-01-05,04-Jan-2013,Jan 4th 2013,Fri 01-04-13,1,0,0,0,2012-12-21,365,90,31,5,2012-12-30,2012-12-30,2012-12-30,2012-12-30,2012-12-31,2013-01-04,Fri       ,2013M01,Jan-2013,N,2013M01   ,N,Year 2013 - Quarter 01,2013Q01   ,N,1,2013,2012-12-30,2013-01-05,N,Wk.01 - 13,2013-01-01 00:00:00,2013-01-05 00:00:00,2013W01   ,2013W01   ,01/01/13 - 01/05/13,01/01 - 01/05,2013,N,2012-01-04,2011-01-04,2012-10-04,2012-07-04,2012-12-04,2012-11-04,2012-12-28,2012-12-21,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-07-16,2013-01-01,2013-07-01,2013-07-01,2013-07-14,0,-1,-4,-19,-134,0,-1,-5,-19,-19,41470,197,16,16,3,29,5924,1,7,1363,3,455,114,2013-12-31,2013-09-30,2013-07-31,2013-07-20,16-Jul-2013,Jul 16th 2013,Tue 07-16-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-06-30,2013-07-14,2013-07-15,2013-07-16,Tue       ,2013M07,Jul-2013,N,2013M07   ,N,Year 2013 - Quarter 03,2013Q03   ,N,29,2013,2013-07-14,2013-07-20,N,Wk.29 - 13,2013-07-14 00:00:00,2013-07-20 00:00:00,2013W29   ,2013W29   ,07/14/13 - 07/20/13,07/14 - 07/20,2013,N,2012-07-16,2011-07-16,2013-04-16,2013-01-16,2013-06-16,2013-05-16,2013-07-09,2013-07-02,0,0,0,0,0,0,0,0,7,3,29,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-11-03,2013-01-01,2013-10-01,2013-11-01,2013-11-03,0,0,0,-3,-24,0,0,-1,-3,-4,41580,307,34,3,1,45,5940,2,11,1367,4,456,114,2013-12-31,2013-12-31,2013-11-30,2013-11-09,03-Nov-2013,Nov 3rd 2013,Sun 11-03-13,1,1,1,0,2013-09-21,365,92,30,7,2012-12-30,2013-09-29,2013-10-27,2013-11-03,2013-10-28,2013-11-03,Sun       ,2013M11,Nov-2013,N,2013M11   ,N,Year 2013 - Quarter 04,2013Q04   ,N,45,2013,2013-11-03,2013-11-09,Y,Wk.45 - 13,2013-11-03 00:00:00,2013-11-09 00:00:00,2013W45   ,2013W45   ,11/03/13 - 11/09/13,11/03 - 11/09,2013,N,2012-11-03,2011-11-03,2013-08-03,2013-05-03,2013-10-03,2013-09-03,2013-10-27,2013-10-20,0,0,0,0,0,0,0,0,11,4,45,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-08-25,2013-01-01,2013-07-01,2013-08-01,2013-08-25,0,-1,-3,-13,-94,0,-1,-3,-13,-14,41510,237,56,25,1,35,5930,2,8,1364,3,455,114,2013-12-31,2013-09-30,2013-08-31,2013-08-31,25-Aug-2013,Aug 25th 2013,Sun 08-25-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-08-25,2013-08-25,2013-08-19,2013-08-25,Sun       ,2013M08,Aug-2013,N,2013M08   ,N,Year 2013 - Quarter 03,2013Q03   ,N,35,2013,2013-08-25,2013-08-31,Y,Wk.35 - 13,2013-08-25 00:00:00,2013-08-31 00:00:00,2013W35   ,2013W35   ,08/25/13 - 08/31/13,08/25 - 08/31,2013,N,2012-08-25,2011-08-25,2013-05-25,2013-02-25,2013-07-25,2013-06-25,2013-08-18,2013-08-11,0,0,0,0,0,0,0,0,9,3,35,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-11-24,2013-01-01,2013-10-01,2013-11-01,2013-11-24,0,0,0,0,-3,0,0,0,0,-1,41601,328,55,24,1,48,5943,2,11,1367,4,456,114,2013-12-31,2013-12-31,2013-11-30,2013-11-30,24-Nov-2013,Nov 24th 2013,Sun 11-24-13,1,1,1,1,2013-09-21,365,92,30,7,2012-12-30,2013-09-29,2013-11-24,2013-11-24,2013-11-18,2013-11-24,Sun       ,2013M11,Nov-2013,N,2013M11   ,N,Year 2013 - Quarter 04,2013Q04   ,N,48,2013,2013-11-24,2013-11-30,Y,Wk.48 - 13,2013-11-24 00:00:00,2013-11-30 00:00:00,2013W48   ,2013W48   ,11/24/13 - 11/30/13,11/24 - 11/30,2013,N,2012-11-24,2011-11-24,2013-08-24,2013-05-24,2013-10-24,2013-09-24,2013-11-17,2013-11-10,0,0,0,0,0,0,0,0,12,4,48,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-05-12,2013-01-01,2013-04-01,2013-05-01,2013-05-12,0,-2,-6,-28,-199,0,-2,-7,-28,-29,41405,132,42,12,1,20,5915,2,5,1361,2,454,114,2013-12-31,2013-06-30,2013-05-31,2013-05-18,12-May-2013,May 12th 2013,Sun 05-12-13,1,0,0,0,2013-03-21,365,91,31,7,2012-12-30,2013-03-31,2013-04-28,2013-05-12,2013-05-06,2013-05-12,Sun       ,2013M05,May-2013,N,2013M05   ,N,Year 2013 - Quarter 02,2013Q02   ,N,20,2013,2013-05-12,2013-05-18,Y,Wk.20 - 13,2013-05-12 00:00:00,2013-05-18 00:00:00,2013W20   ,2013W20   ,05/12/13 - 05/18/13,05/12 - 05/18,2013,N,2012-05-12,2011-05-12,2013-02-12,2012-11-12,2013-04-12,2013-03-12,2013-05-05,2013-04-28,0,0,0,0,0,0,0,0,5,2,20,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-08-17,2013-01-01,2013-07-01,2013-08-01,2013-08-11,0,-1,-3,-15,-102,0,-1,-4,-15,-15,41502,229,48,17,7,33,5928,2,8,1364,3,455,114,2013-12-31,2013-09-30,2013-08-31,2013-08-17,17-Aug-2013,Aug 17th 2013,Sat 08-17-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-07-28,2013-08-11,2013-08-12,2013-08-17,Sat       ,2013M08,Aug-2013,N,2013M08   ,N,Year 2013 - Quarter 03,2013Q03   ,N,33,2013,2013-08-11,2013-08-17,N,Wk.33 - 13,2013-08-11 00:00:00,2013-08-17 00:00:00,2013W33   ,2013W33   ,08/11/13 - 08/17/13,08/11 - 08/17,2013,N,2012-08-17,2011-08-17,2013-05-17,2013-02-17,2013-07-17,2013-06-17,2013-08-10,2013-08-03,0,0,0,0,0,0,0,0,8,3,33,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-10-21,2013-01-01,2013-10-01,2013-10-01,2013-10-20,0,0,-1,-5,-37,0,0,-2,-5,-5,41567,294,21,21,2,43,5938,1,10,1366,4,456,114,2013-12-31,2013-12-31,2013-10-31,2013-10-26,21-Oct-2013,Oct 21st 2013,Mon 10-21-13,1,1,0,0,2013-09-21,365,92,31,7,2012-12-30,2013-09-29,2013-09-29,2013-10-20,2013-10-21,2013-10-21,Mon       ,2013M10,Oct-2013,N,2013M10   ,N,Year 2013 - Quarter 04,2013Q04   ,N,43,2013,2013-10-20,2013-10-26,N,Wk.43 - 13,2013-10-20 00:00:00,2013-10-26 00:00:00,2013W43   ,2013W43   ,10/20/13 - 10/26/13,10/20 - 10/26,2013,N,2012-10-21,2011-10-21,2013-07-21,2013-04-21,2013-09-21,2013-08-21,2013-10-14,2013-10-07,0,0,0,0,0,0,0,0,10,4,43,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-03-27,2013-01-01,2013-01-01,2013-03-01,2013-03-24,0,-3,-8,-35,-245,0,-3,-9,-35,-35,41359,86,86,27,4,13,5908,3,3,1359,1,453,114,2013-12-31,2013-03-31,2013-03-31,2013-03-30,27-Mar-2013,Mar 27th 2013,Wed 03-27-13,1,0,0,0,2013-03-21,365,90,31,7,2012-12-30,2012-12-30,2013-02-24,2013-03-24,2013-03-25,2013-03-27,Wed       ,2013M03,Mar-2013,N,2013M03   ,N,Year 2013 - Quarter 01,2013Q01   ,N,13,2013,2013-03-24,2013-03-30,N,Wk.13 - 13,2013-03-24 00:00:00,2013-03-30 00:00:00,2013W13   ,2013W13   ,03/24/13 - 03/30/13,03/24 - 03/30,2013,N,2012-03-27,2011-03-27,2012-12-27,2012-09-27,2013-02-27,2013-01-27,2013-03-20,2013-03-13,0,0,0,0,0,0,0,0,3,1,13,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-11-22,2013-01-01,2013-10-01,2013-11-01,2013-11-17,0,0,0,-1,-5,0,0,-1,-1,-1,41599,326,53,22,6,47,5942,2,11,1367,4,456,114,2013-12-31,2013-12-31,2013-11-30,2013-11-23,22-Nov-2013,Nov 22nd 2013,Fri 11-22-13,1,1,1,0,2013-09-21,365,92,30,7,2012-12-30,2013-09-29,2013-10-27,2013-11-17,2013-11-18,2013-11-22,Fri       ,2013M11,Nov-2013,N,2013M11   ,N,Year 2013 - Quarter 04,2013Q04   ,N,47,2013,2013-11-17,2013-11-23,N,Wk.47 - 13,2013-11-17 00:00:00,2013-11-23 00:00:00,2013W47   ,2013W47   ,11/17/13 - 11/23/13,11/17 - 11/23,2013,N,2012-11-22,2011-11-22,2013-08-22,2013-05-22,2013-10-22,2013-09-22,2013-11-15,2013-11-08,0,0,0,0,0,0,0,0,11,4,47,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-07-07,2013-01-01,2013-07-01,2013-07-01,2013-07-07,0,-1,-4,-20,-143,0,-1,-5,-20,-21,41461,188,7,7,1,28,5923,1,7,1363,3,455,114,2013-12-31,2013-09-30,2013-07-31,2013-07-13,07-Jul-2013,Jul 7th 2013,Sun 07-07-13,1,0,0,0,2013-06-21,365,92,31,7,2012-12-30,2013-06-30,2013-06-30,2013-07-07,2013-07-01,2013-07-07,Sun       ,2013M07,Jul-2013,N,2013M07   ,N,Year 2013 - Quarter 03,2013Q03   ,N,28,2013,2013-07-07,2013-07-13,Y,Wk.28 - 13,2013-07-07 00:00:00,2013-07-13 00:00:00,2013W28   ,2013W28   ,07/07/13 - 07/13/13,07/07 - 07/13,2013,N,2012-07-07,2011-07-07,2013-04-07,2013-01-07,2013-06-07,2013-05-07,2013-06-30,2013-06-23,0,0,0,0,0,0,0,0,7,3,28,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-09-11,2013-01-01,2013-07-01,2013-09-01,2013-09-08,0,-1,-2,-11,-77,0,-1,-3,-11,-11,41527,254,73,11,4,37,5932,3,9,1365,3,455,114,2013-12-31,2013-09-30,2013-09-30,2013-09-14,11-Sep-2013,Sep 11th 2013,Wed 09-11-13,1,0,0,0,2013-06-21,365,92,30,7,2012-12-30,2013-06-30,2013-08-25,2013-09-08,2013-09-09,2013-09-11,Wed       ,2013M09,Sep-2013,N,2013M09   ,N,Year 2013 - Quarter 03,2013Q03   ,N,37,2013,2013-09-08,2013-09-14,N,Wk.37 - 13,2013-09-08 00:00:00,2013-09-14 00:00:00,2013W37   ,2013W37   ,09/08/13 - 09/14/13,09/08 - 09/14,2013,N,2012-09-11,2011-09-11,2013-06-11,2013-03-11,2013-08-11,2013-07-11,2013-09-04,2013-08-28,0,0,0,0,0,0,0,0,9,3,37,3,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-10-19,2013-01-01,2013-10-01,2013-10-01,2013-10-13,0,0,-1,-6,-39,0,0,-2,-6,-6,41565,292,19,19,7,42,5937,1,10,1366,4,456,114,2013-12-31,2013-12-31,2013-10-31,2013-10-19,19-Oct-2013,Oct 19th 2013,Sat 10-19-13,1,1,0,0,2013-09-21,365,92,31,7,2012-12-30,2013-09-29,2013-09-29,2013-10-13,2013-10-14,2013-10-19,Sat       ,2013M10,Oct-2013,N,2013M10   ,N,Year 2013 - Quarter 04,2013Q04   ,N,42,2013,2013-10-13,2013-10-19,N,Wk.42 - 13,2013-10-13 00:00:00,2013-10-19 00:00:00,2013W42   ,2013W42   ,10/13/13 - 10/19/13,10/13 - 10/19,2013,N,2012-10-19,2011-10-19,2013-07-19,2013-04-19,2013-09-19,2013-08-19,2013-10-12,2013-10-05,0,0,0,0,0,0,0,0,10,4,42,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-12-23,2013-01-01,2013-10-01,2013-12-01,2013-12-22,0,0,1,4,26,0,0,0,4,4,41630,357,84,23,2,52,5947,3,12,1368,4,456,114,2013-12-31,2013-12-31,2013-12-31,2013-12-28,23-Dec-2013,Dec 23rd 2013,Mon 12-23-13,0,0,0,0,2013-12-21,365,92,31,7,2012-12-30,2013-09-29,2013-11-24,2013-12-22,2013-12-23,2013-12-23,Mon       ,2013M12,Dec-2013,N,2013M12   ,N,Year 2013 - Quarter 04,2013Q04   ,N,52,2013,2013-12-22,2013-12-28,N,Wk.52 - 13,2013-12-22 00:00:00,2013-12-28 00:00:00,2013W52   ,2013W52   ,12/22/13 - 12/28/13,12/22 - 12/28,2013,N,2012-12-23,2011-12-23,2013-09-23,2013-06-23,2013-11-23,2013-10-23,2013-12-16,2013-12-09,0,0,0,0,0,0,0,0,12,4,52,1,0,1,0,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-05-24,2013-01-01,2013-04-01,2013-05-01,2013-05-19,0,-2,-6,-27,-187,0,-2,-7,-27,-27,41417,144,54,24,6,21,5916,2,5,1361,2,454,114,2013-12-31,2013-06-30,2013-05-31,2013-05-25,24-May-2013,May 24th 2013,Fri 05-24-13,1,0,0,0,2013-03-21,365,91,31,7,2012-12-30,2013-03-31,2013-04-28,2013-05-19,2013-05-20,2013-05-24,Fri       ,2013M05,May-2013,N,2013M05   ,N,Year 2013 - Quarter 02,2013Q02   ,N,21,2013,2013-05-19,2013-05-25,N,Wk.21 - 13,2013-05-19 00:00:00,2013-05-25 00:00:00,2013W21   ,2013W21   ,05/19/13 - 05/25/13,05/19 - 05/25,2013,N,2012-05-24,2011-05-24,2013-02-24,2012-11-24,2013-04-24,2013-03-24,2013-05-17,2013-05-10,0,0,0,0,0,0,0,0,5,2,21,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-11-05,2013-01-01,2013-10-01,2013-11-01,2013-11-03,0,0,0,-3,-22,0,0,-1,-3,-3,41582,309,36,5,3,45,5940,2,11,1367,4,456,114,2013-12-31,2013-12-31,2013-11-30,2013-11-09,05-Nov-2013,Nov 5th 2013,Tue 11-05-13,1,1,1,0,2013-09-21,365,92,30,7,2012-12-30,2013-09-29,2013-10-27,2013-11-03,2013-11-04,2013-11-05,Tue       ,2013M11,Nov-2013,N,2013M11   ,N,Year 2013 - Quarter 04,2013Q04   ,N,45,2013,2013-11-03,2013-11-09,N,Wk.45 - 13,2013-11-03 00:00:00,2013-11-09 00:00:00,2013W45   ,2013W45   ,11/03/13 - 11/09/13,11/03 - 11/09,2013,N,2012-11-05,2011-11-05,2013-08-05,2013-05-05,2013-10-05,2013-09-05,2013-10-29,2013-10-22,0,0,0,0,0,0,0,0,11,4,45,4,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-05-29,2013-01-01,2013-04-01,2013-05-01,2013-05-26,0,-2,-6,-26,-182,0,-2,-6,-26,-26,41422,149,59,29,4,22,5917,2,5,1361,2,454,114,2013-12-31,2013-06-30,2013-05-31,2013-06-01,29-May-2013,May 29th 2013,Wed 05-29-13,1,0,0,0,2013-03-21,365,91,31,7,2012-12-30,2013-03-31,2013-05-26,2013-05-26,2013-05-27,2013-05-29,Wed       ,2013M05,May-2013,N,2013M05   ,N,Year 2013 - Quarter 02,2013Q02   ,N,22,2013,2013-05-26,2013-06-01,N,Wk.22 - 13,2013-05-26 00:00:00,2013-06-01 00:00:00,2013W22   ,2013W22   ,05/26/13 - 06/01/13,05/26 - 06/01,2013,N,2012-05-29,2011-05-29,2013-02-28,2012-11-29,2013-04-29,2013-03-29,2013-05-22,2013-05-15,0,0,0,0,0,0,0,0,6,2,22,2,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-01-02,2013-01-01,2013-01-01,2013-01-01,2013-01-01,0,-3,-10,-47,-329,0,-3,-11,-47,-47,41275,2,2,2,4,1,5896,1,1,1357,1,453,114,2013-12-31,2013-03-31,2013-01-31,2013-01-05,02-Jan-2013,Jan 2nd 2013,Wed 01-02-13,1,0,0,0,2012-12-21,365,90,31,5,2012-12-30,2012-12-30,2012-12-30,2012-12-30,2012-12-31,2013-01-02,Wed       ,2013M01,Jan-2013,N,2013M01   ,N,Year 2013 - Quarter 01,2013Q01   ,N,1,2013,2012-12-30,2013-01-05,N,Wk.01 - 13,2013-01-01 00:00:00,2013-01-05 00:00:00,2013W01   ,2013W01   ,01/01/13 - 01/05/13,01/01 - 01/05,2013,N,2012-01-02,2011-01-02,2012-10-02,2012-07-02,2012-12-02,2012-11-02,2012-12-26,2012-12-19,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-12-02,2013-01-01,2013-10-01,2013-12-01,2013-12-01,0,0,1,1,5,0,0,0,1,1,41609,336,63,2,2,49,5944,3,12,1368,4,456,114,2013-12-31,2013-12-31,2013-12-31,2013-12-07,02-Dec-2013,Dec 2nd 2013,Mon 12-02-13,0,0,0,0,2013-09-21,365,92,31,7,2012-12-30,2013-09-29,2013-11-24,2013-12-01,2013-12-02,2013-12-02,Mon       ,2013M12,Dec-2013,N,2013M12   ,N,Year 2013 - Quarter 04,2013Q04   ,N,49,2013,2013-12-01,2013-12-07,N,Wk.49 - 13,2013-12-01 00:00:00,2013-12-07 00:00:00,2013W49   ,2013W49   ,12/01/13 - 12/07/13,12/01 - 12/07,2013,N,2012-12-02,2011-12-02,2013-09-02,2013-06-02,2013-11-02,2013-10-02,2013-11-25,2013-11-18,0,0,0,0,0,0,0,0,12,4,49,4,0,1,0,2005-09-07,DW_BATCH  ,2013-11-27 00:16:56,DW_BATCH
-2013-02-24,2013-01-01,2013-01-01,2013-02-01,2013-02-24,0,-3,-9,-39,-276,0,-3,-9,-39,-40,41328,55,55,24,1,9,5904,2,2,1358,1,453,114,2013-12-31,2013-03-31,2013-02-28,2013-03-

<TRUNCATED>
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/resources/expected_result/flat_item/part-r-00000
----------------------------------------------------------------------
diff --git a/job/src/test/resources/expected_result/flat_item/part-r-00000 b/job/src/test/resources/expected_result/flat_item/part-r-00000
deleted file mode 100644
index 452085d..0000000
Binary files a/job/src/test/resources/expected_result/flat_item/part-r-00000 and /dev/null differ



[7/9] incubator-kylin git commit: KYLIN-1010 Decompose project job

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/engine-spark/src/test/java/org/apache/kylin/engine/spark/BuildCubeWithSparkTest.java
----------------------------------------------------------------------
diff --git a/engine-spark/src/test/java/org/apache/kylin/engine/spark/BuildCubeWithSparkTest.java b/engine-spark/src/test/java/org/apache/kylin/engine/spark/BuildCubeWithSparkTest.java
deleted file mode 100644
index d24cc79..0000000
--- a/engine-spark/src/test/java/org/apache/kylin/engine/spark/BuildCubeWithSparkTest.java
+++ /dev/null
@@ -1,148 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.engine.spark;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.List;
-import java.util.TimeZone;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.AbstractKylinTestCase;
-import org.apache.kylin.common.util.ClassUtil;
-import org.apache.kylin.cube.CubeInstance;
-import org.apache.kylin.cube.CubeManager;
-import org.apache.kylin.cube.CubeSegment;
-import org.apache.kylin.cube.CubeUpdate;
-import org.apache.kylin.job.DeployUtil;
-import org.apache.kylin.job.engine.JobEngineConfig;
-import org.apache.kylin.job.execution.AbstractExecutable;
-import org.apache.kylin.job.execution.DefaultChainedExecutable;
-import org.apache.kylin.job.execution.ExecutableState;
-import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
-import org.apache.kylin.job.lock.MockJobLock;
-import org.apache.kylin.job.manager.ExecutableManager;
-import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-import com.google.common.collect.Lists;
-
-public class BuildCubeWithSparkTest {
-
-    private CubeManager cubeManager;
-    private DefaultScheduler scheduler;
-    protected ExecutableManager jobService;
-
-    private static final Log logger = LogFactory.getLog(BuildCubeWithSparkTest.class);
-
-    protected void waitForJob(String jobId) {
-        while (true) {
-            AbstractExecutable job = jobService.getJob(jobId);
-            if (job.getStatus() == ExecutableState.SUCCEED || job.getStatus() == ExecutableState.ERROR) {
-                break;
-            } else {
-                try {
-                    Thread.sleep(5000);
-                } catch (InterruptedException e) {
-                    e.printStackTrace();
-                }
-            }
-        }
-    }
-
-    @BeforeClass
-    public static void beforeClass() throws Exception {
-        logger.info("Adding to classpath: " + new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
-        ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
-        System.setProperty(KylinConfig.KYLIN_CONF, "../examples/test_case_data/sandbox");
-        System.setProperty("hdp.version", "2.2.4.2-2"); // mapred-site.xml ref this
-    }
-
-    @Before
-    public void before() throws Exception {
-        HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA);
-
-        DeployUtil.initCliWorkDir();
-        DeployUtil.deployMetadata();
-        DeployUtil.overrideJobJarLocations();
-
-        final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-        jobService = ExecutableManager.getInstance(kylinConfig);
-        for (String jobId : jobService.getAllJobIds()) {
-            jobService.deleteJob(jobId);
-        }
-        scheduler = DefaultScheduler.getInstance();
-        scheduler.init(new JobEngineConfig(kylinConfig), new MockJobLock());
-        if (!scheduler.hasStarted()) {
-            throw new RuntimeException("scheduler has not been started");
-        }
-        cubeManager = CubeManager.getInstance(kylinConfig);
-
-    }
-
-    @After
-    public void after() {
-        HBaseMetadataTestCase.staticCleanupTestMetadata();
-    }
-
-    @Test
-    public void test() throws Exception {
-        final CubeSegment segment = createSegment();
-        String confPath = new File(AbstractKylinTestCase.SANDBOX_TEST_DATA).getAbsolutePath();
-        KylinConfig.getInstanceFromEnv().getCoprocessorLocalJar();
-        String coprocessor = KylinConfig.getInstanceFromEnv().getCoprocessorLocalJar();
-        logger.info("confPath location:" + confPath);
-        logger.info("coprocessor location:" + coprocessor);
-        final DefaultChainedExecutable cubingJob = new SparkBatchCubingEngine(confPath, coprocessor).createBatchCubingJob(segment, "BuildCubeWithSpark");
-        jobService.addJob(cubingJob);
-        waitForJob(cubingJob.getId());
-        assertEquals(ExecutableState.SUCCEED, jobService.getOutput(cubingJob.getId()).getState());
-    }
-
-    private void clearSegment(String cubeName) throws Exception {
-        CubeInstance cube = cubeManager.getCube(cubeName);
-        // remove all existing segments
-        CubeUpdate cubeBuilder = new CubeUpdate(cube);
-        cubeBuilder.setToRemoveSegs(cube.getSegments().toArray(new CubeSegment[cube.getSegments().size()]));
-        cubeManager.updateCube(cubeBuilder);
-    }
-
-    private CubeSegment createSegment() throws Exception {
-        String cubeName = "test_kylin_cube_with_slr_left_join_empty";
-        clearSegment(cubeName);
-
-        SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
-        f.setTimeZone(TimeZone.getTimeZone("GMT"));
-        long dateStart = cubeManager.getCube(cubeName).getDescriptor().getModel().getPartitionDesc().getPartitionDateStart();
-        long dateEnd = f.parse("2050-11-12").getTime();
-
-        // this cube's start date is 0, end date is 20501112000000
-        List<String> result = Lists.newArrayList();
-        return cubeManager.appendSegments(cubeManager.getCube(cubeName), dateEnd);
-
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/engine-streaming/pom.xml
----------------------------------------------------------------------
diff --git a/engine-streaming/pom.xml b/engine-streaming/pom.xml
index 46b63b3..955124c 100644
--- a/engine-streaming/pom.xml
+++ b/engine-streaming/pom.xml
@@ -26,11 +26,6 @@
         </dependency>
         <dependency>
             <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-invertedindex</artifactId>
-            <version>${project.parent.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
             <artifactId>kylin-core-storage</artifactId>
             <version>${project.parent.version}</version>
         </dependency>

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/invertedindex/pom.xml
----------------------------------------------------------------------
diff --git a/invertedindex/pom.xml b/invertedindex/pom.xml
index 4d1796f..9e8f92e 100644
--- a/invertedindex/pom.xml
+++ b/invertedindex/pom.xml
@@ -33,12 +33,12 @@
         <!--Kylin Jar -->
         <dependency>
             <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-core-metadata</artifactId>
+            <artifactId>kylin-engine-streaming</artifactId>
             <version>${project.parent.version}</version>
         </dependency>
         <dependency>
             <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-core-dictionary</artifactId>
+            <artifactId>kylin-source-hive</artifactId>
             <version>${project.parent.version}</version>
         </dependency>
 
@@ -56,43 +56,23 @@
             <version>${project.parent.version}</version>
         </dependency>
         <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
+            <groupId>org.apache.hive.hcatalog</groupId>
+            <artifactId>hive-hcatalog-core</artifactId>
+            <version>${hive-hcatalog.version}</version>
             <scope>provided</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-annotations</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-mapreduce-client-core</artifactId>
+            <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
             <scope>provided</scope>
         </dependency>
         <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-minicluster</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.mrunit</groupId>
-            <artifactId>mrunit</artifactId>
-            <classifier>hadoop2</classifier>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
             <groupId>org.apache.hbase</groupId>
             <artifactId>hbase-hadoop2-compat</artifactId>
             <scope>provided</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-common</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
             <artifactId>hbase-client</artifactId>
             <scope>provided</scope>
         </dependency>
@@ -100,18 +80,12 @@
             <groupId>org.apache.hbase</groupId>
             <artifactId>hbase-server</artifactId>
             <scope>provided</scope>
-            <!-- version conflict with hadoop2.2 -->
-            <exclusions>
-                <exclusion>
-                    <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
-                </exclusion>
-            </exclusions>
         </dependency>
         <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
-            <scope>provided</scope>
+            <groupId>org.apache.mrunit</groupId>
+            <artifactId>mrunit</artifactId>
+            <classifier>hadoop2</classifier>
+            <scope>test</scope>
         </dependency>
         <dependency>
             <groupId>junit</groupId>

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/invertedindex/src/main/java/org/apache/kylin/job/hadoop/dict/CreateInvertedIndexDictionaryJob.java
----------------------------------------------------------------------
diff --git a/invertedindex/src/main/java/org/apache/kylin/job/hadoop/dict/CreateInvertedIndexDictionaryJob.java b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/dict/CreateInvertedIndexDictionaryJob.java
new file mode 100644
index 0000000..87ee70e
--- /dev/null
+++ b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/dict/CreateInvertedIndexDictionaryJob.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.hadoop.dict;
+
+import org.apache.commons.cli.Options;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.dict.DistinctColumnValuesProvider;
+import org.apache.kylin.engine.mr.DFSFileTable;
+import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
+import org.apache.kylin.invertedindex.IIInstance;
+import org.apache.kylin.invertedindex.IIManager;
+import org.apache.kylin.metadata.model.TblColRef;
+import org.apache.kylin.source.ReadableTable;
+
+/**
+ */
+public class CreateInvertedIndexDictionaryJob extends AbstractHadoopJob {
+
+    @Override
+    public int run(String[] args) throws Exception {
+        Options options = new Options();
+
+        try {
+            options.addOption(OPTION_II_NAME);
+            options.addOption(OPTION_INPUT_PATH);
+            parseOptions(options, args);
+
+            final String iiname = getOptionValue(OPTION_II_NAME);
+            final String factColumnsInputPath = getOptionValue(OPTION_INPUT_PATH);
+            final KylinConfig config = KylinConfig.getInstanceFromEnv();
+
+            IIManager mgr = IIManager.getInstance(config);
+            IIInstance ii = mgr.getII(iiname);
+
+            mgr.buildInvertedIndexDictionary(ii.getFirstSegment(), new DistinctColumnValuesProvider() {
+                @Override
+                public ReadableTable getDistinctValuesFor(TblColRef col) {
+                    return new DFSFileTable(factColumnsInputPath + "/" + col.getName(), -1);
+                }
+            });
+            return 0;
+        } catch (Exception e) {
+            printUsage(options);
+            throw e;
+        }
+    }
+
+    public static void main(String[] args) throws Exception {
+        int exitCode = ToolRunner.run(new CreateInvertedIndexDictionaryJob(), args);
+        System.exit(exitCode);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIBulkLoadJob.java
----------------------------------------------------------------------
diff --git a/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIBulkLoadJob.java b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIBulkLoadJob.java
new file mode 100644
index 0000000..300c89b
--- /dev/null
+++ b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIBulkLoadJob.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.hadoop.invertedindex;
+
+import org.apache.commons.cli.Options;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
+import org.apache.kylin.invertedindex.IIInstance;
+import org.apache.kylin.invertedindex.IIManager;
+import org.apache.kylin.invertedindex.IISegment;
+import org.apache.kylin.invertedindex.model.IIDesc;
+import org.apache.kylin.metadata.model.SegmentStatusEnum;
+
+/**
+ */
+public class IIBulkLoadJob extends AbstractHadoopJob {
+
+    @Override
+    public int run(String[] args) throws Exception {
+        Options options = new Options();
+
+        try {
+            options.addOption(OPTION_INPUT_PATH);
+            options.addOption(OPTION_HTABLE_NAME);
+            options.addOption(OPTION_II_NAME);
+            parseOptions(options, args);
+
+            String tableName = getOptionValue(OPTION_HTABLE_NAME);
+            String input = getOptionValue(OPTION_INPUT_PATH);
+            String iiname = getOptionValue(OPTION_II_NAME);
+
+            FileSystem fs = FileSystem.get(getConf());
+            FsPermission permission = new FsPermission((short) 0777);
+            fs.setPermission(new Path(input, IIDesc.HBASE_FAMILY), permission);
+
+            int hbaseExitCode = ToolRunner.run(new LoadIncrementalHFiles(getConf()), new String[] { input, tableName });
+
+            IIManager mgr = IIManager.getInstance(KylinConfig.getInstanceFromEnv());
+            IIInstance ii = mgr.getII(iiname);
+            IISegment seg = ii.getFirstSegment();
+            seg.setStorageLocationIdentifier(tableName);
+            seg.setStatus(SegmentStatusEnum.READY);
+            mgr.updateII(ii);
+
+            return hbaseExitCode;
+
+        } catch (Exception e) {
+            printUsage(options);
+            throw e;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHFileJob.java
----------------------------------------------------------------------
diff --git a/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHFileJob.java b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHFileJob.java
new file mode 100644
index 0000000..528f06f
--- /dev/null
+++ b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHFileJob.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.hadoop.invertedindex;
+
+import org.apache.commons.cli.Options;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @author yangli9
+ * 
+ */
+public class IICreateHFileJob extends AbstractHadoopJob {
+
+    protected static final Logger logger = LoggerFactory.getLogger(IICreateHFileJob.class);
+
+    public int run(String[] args) throws Exception {
+        Options options = new Options();
+
+        try {
+            options.addOption(OPTION_JOB_NAME);
+            options.addOption(OPTION_II_NAME);
+            options.addOption(OPTION_INPUT_PATH);
+            options.addOption(OPTION_OUTPUT_PATH);
+            options.addOption(OPTION_HTABLE_NAME);
+            parseOptions(options, args);
+
+            Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
+
+            job = Job.getInstance(getConf(), getOptionValue(OPTION_JOB_NAME));
+
+            setJobClasspath(job);
+
+            addInputDirs(getOptionValue(OPTION_INPUT_PATH), job);
+            FileOutputFormat.setOutputPath(job, output);
+
+            job.setInputFormatClass(SequenceFileInputFormat.class);
+            job.setMapperClass(IICreateHFileMapper.class);
+            job.setMapOutputKeyClass(ImmutableBytesWritable.class);
+            job.setMapOutputValueClass(KeyValue.class);
+
+            String tableName = getOptionValue(OPTION_HTABLE_NAME);
+            HTable htable = new HTable(HBaseConfiguration.create(getConf()), tableName);
+            HFileOutputFormat.configureIncrementalLoad(job, htable);
+
+            this.deletePath(job.getConfiguration(), output);
+
+            return waitForCompletion(job);
+        } catch (Exception e) {
+            printUsage(options);
+            throw e;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHFileMapper.java
----------------------------------------------------------------------
diff --git a/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHFileMapper.java b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHFileMapper.java
new file mode 100644
index 0000000..1adf8d6
--- /dev/null
+++ b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHFileMapper.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.hadoop.invertedindex;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValue.Type;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.kylin.engine.mr.KylinMapper;
+import org.apache.kylin.invertedindex.model.IIDesc;
+
+/**
+ * @author yangli9
+ */
+public class IICreateHFileMapper extends KylinMapper<ImmutableBytesWritable, ImmutableBytesWritable, ImmutableBytesWritable, KeyValue> {
+
+    long timestamp;
+
+    @Override
+    protected void setup(Context context) throws IOException, InterruptedException {
+        super.bindCurrentConfiguration(context.getConfiguration());
+
+        timestamp = System.currentTimeMillis();
+    }
+
+    @Override
+    protected void map(ImmutableBytesWritable key, ImmutableBytesWritable value, Context context) throws IOException, InterruptedException {
+
+        KeyValue kv = new KeyValue(key.get(), key.getOffset(), key.getLength(), //
+                IIDesc.HBASE_FAMILY_BYTES, 0, IIDesc.HBASE_FAMILY_BYTES.length, //
+                IIDesc.HBASE_QUALIFIER_BYTES, 0, IIDesc.HBASE_QUALIFIER_BYTES.length, //
+                timestamp, Type.Put, //
+                value.get(), value.getOffset(), value.getLength());
+
+        context.write(key, kv);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHTableJob.java
----------------------------------------------------------------------
diff --git a/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHTableJob.java b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHTableJob.java
new file mode 100644
index 0000000..0b7cb7a
--- /dev/null
+++ b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHTableJob.java
@@ -0,0 +1,148 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.hadoop.invertedindex;
+
+import org.apache.commons.cli.Options;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.io.compress.Compression;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
+import org.apache.hadoop.hbase.security.User;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.BytesUtil;
+import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
+import org.apache.kylin.invertedindex.IIInstance;
+import org.apache.kylin.invertedindex.IIManager;
+import org.apache.kylin.invertedindex.model.IIDesc;
+import org.apache.kylin.invertedindex.model.IIKeyValueCodec;
+import org.apache.kylin.metadata.realization.IRealizationConstants;
+
+/**
+ * @author George Song (ysong1)
+ */
+public class IICreateHTableJob extends AbstractHadoopJob {
+
+    @Override
+    public int run(String[] args) throws Exception {
+        Options options = new Options();
+
+        try {
+            options.addOption(OPTION_II_NAME);
+            options.addOption(OPTION_HTABLE_NAME);
+            parseOptions(options, args);
+
+            String tableName = getOptionValue(OPTION_HTABLE_NAME);
+            String iiName = getOptionValue(OPTION_II_NAME);
+
+            KylinConfig config = KylinConfig.getInstanceFromEnv();
+            IIManager iiManager = IIManager.getInstance(config);
+            IIInstance ii = iiManager.getII(iiName);
+            int sharding = ii.getDescriptor().getSharding();
+
+            HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf(tableName));
+            HColumnDescriptor cf = new HColumnDescriptor(IIDesc.HBASE_FAMILY);
+            cf.setMaxVersions(1);
+
+            String hbaseDefaultCC = config.getHbaseDefaultCompressionCodec().toLowerCase();
+
+            switch (hbaseDefaultCC) {
+            case "snappy": {
+                logger.info("hbase will use snappy to compress data");
+                cf.setCompressionType(Compression.Algorithm.SNAPPY);
+                break;
+            }
+            case "lzo": {
+                logger.info("hbase will use lzo to compress data");
+                cf.setCompressionType(Compression.Algorithm.LZO);
+                break;
+            }
+            case "gz":
+            case "gzip": {
+                logger.info("hbase will use gzip to compress data");
+                cf.setCompressionType(Compression.Algorithm.GZ);
+                break;
+            }
+            case "lz4": {
+                logger.info("hbase will use lz4 to compress data");
+                cf.setCompressionType(Compression.Algorithm.LZ4);
+                break;
+            }
+            default: {
+                logger.info("hbase will not user any compression codec to compress data");
+            }
+            }
+
+            cf.setDataBlockEncoding(DataBlockEncoding.FAST_DIFF);
+            tableDesc.addFamily(cf);
+            tableDesc.setValue(IRealizationConstants.HTableTag, config.getMetadataUrlPrefix());
+            tableDesc.setValue(IRealizationConstants.HTableCreationTime, String.valueOf(System.currentTimeMillis()));
+            tableDesc.setValue(HTableDescriptor.SPLIT_POLICY, DisabledRegionSplitPolicy.class.getName());
+
+            Configuration conf = HBaseConfiguration.create(getConf());
+            if (User.isHBaseSecurityEnabled(conf)) {
+                // add coprocessor for bulk load
+                tableDesc.addCoprocessor("org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint");
+            }
+
+            IIDeployCoprocessorCLI.deployCoprocessor(tableDesc);
+
+            // drop the table first
+            HBaseAdmin admin = new HBaseAdmin(conf);
+            if (admin.tableExists(tableName)) {
+                admin.disableTable(tableName);
+                admin.deleteTable(tableName);
+            }
+
+            // create table
+            byte[][] splitKeys = getSplits(sharding);
+            if (splitKeys.length == 0)
+                splitKeys = null;
+            admin.createTable(tableDesc, splitKeys);
+            if (splitKeys != null) {
+                for (int i = 0; i < splitKeys.length; i++) {
+                    System.out.println("split key " + i + ": " + BytesUtil.toHex(splitKeys[i]));
+                }
+            }
+            System.out.println("create hbase table " + tableName + " done.");
+            admin.close();
+
+            return 0;
+        } catch (Exception e) {
+            printUsage(options);
+            throw e;
+        }
+    }
+
+    //one region for one shard
+    private byte[][] getSplits(int shard) {
+        byte[][] result = new byte[shard - 1][];
+        for (int i = 1; i < shard; ++i) {
+            byte[] split = new byte[IIKeyValueCodec.SHARD_LEN];
+            BytesUtil.writeUnsigned(i, split, 0, IIKeyValueCodec.SHARD_LEN);
+            result[i - 1] = split;
+        }
+        return result;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDeployCoprocessorCLI.java
----------------------------------------------------------------------
diff --git a/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDeployCoprocessorCLI.java b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDeployCoprocessorCLI.java
new file mode 100644
index 0000000..a4c1961
--- /dev/null
+++ b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDeployCoprocessorCLI.java
@@ -0,0 +1,157 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.hadoop.invertedindex;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.engine.mr.HadoopUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * THIS IS A TAILORED DUPLICATE OF org.apache.kylin.storage.hbase.util.DeployCoprocessorCLI TO AVOID CYCLIC
+ * DEPENDENCY. INVERTED-INDEX CODE NOW SPLITTED ACROSS kylin-invertedindex AND kylin-storage-hbase.
+ * DEFENITELY NEED FURTHER REFACTOR.
+ */
+public class IIDeployCoprocessorCLI {
+
+    private static final Logger logger = LoggerFactory.getLogger(IIDeployCoprocessorCLI.class);
+
+    public static final String CubeObserverClass = "org.apache.kylin.storage.hbase.cube.v1.coprocessor.observer.AggregateRegionObserver";
+    public static final String CubeEndpointClass = "org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.CubeVisitService";
+    public static final String IIEndpointClass = "org.apache.kylin.storage.hbase.ii.coprocessor.endpoint.IIEndpoint";
+
+    public static void deployCoprocessor(HTableDescriptor tableDesc) {
+        try {
+            initHTableCoprocessor(tableDesc);
+            logger.info("hbase table " + tableDesc.getName() + " deployed with coprocessor.");
+
+        } catch (Exception ex) {
+            logger.error("Error deploying coprocessor on " + tableDesc.getName(), ex);
+            logger.error("Will try creating the table without coprocessor.");
+        }
+    }
+
+    private static void initHTableCoprocessor(HTableDescriptor desc) throws IOException {
+        KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+        Configuration hconf = HadoopUtil.getCurrentConfiguration();
+        FileSystem fileSystem = FileSystem.get(hconf);
+
+        String localCoprocessorJar = kylinConfig.getCoprocessorLocalJar();
+        Path hdfsCoprocessorJar = uploadCoprocessorJar(localCoprocessorJar, fileSystem, null);
+
+        addCoprocessorOnHTable(desc, hdfsCoprocessorJar);
+    }
+
+    private static void addCoprocessorOnHTable(HTableDescriptor desc, Path hdfsCoprocessorJar) throws IOException {
+        logger.info("Add coprocessor on " + desc.getNameAsString());
+        desc.addCoprocessor(IIEndpointClass, hdfsCoprocessorJar, 1000, null);
+        desc.addCoprocessor(CubeEndpointClass, hdfsCoprocessorJar, 1001, null);
+        desc.addCoprocessor(CubeObserverClass, hdfsCoprocessorJar, 1002, null);
+    }
+
+    private static Path uploadCoprocessorJar(String localCoprocessorJar, FileSystem fileSystem, Set<String> oldJarPaths) throws IOException {
+        Path uploadPath = null;
+        File localCoprocessorFile = new File(localCoprocessorJar);
+
+        // check existing jars
+        if (oldJarPaths == null) {
+            oldJarPaths = new HashSet<String>();
+        }
+        Path coprocessorDir = getCoprocessorHDFSDir(fileSystem, KylinConfig.getInstanceFromEnv());
+        for (FileStatus fileStatus : fileSystem.listStatus(coprocessorDir)) {
+            if (isSame(localCoprocessorFile, fileStatus)) {
+                uploadPath = fileStatus.getPath();
+                break;
+            }
+            String filename = fileStatus.getPath().toString();
+            if (filename.endsWith(".jar")) {
+                oldJarPaths.add(filename);
+            }
+        }
+
+        // upload if not existing
+        if (uploadPath == null) {
+            // figure out a unique new jar file name
+            Set<String> oldJarNames = new HashSet<String>();
+            for (String path : oldJarPaths) {
+                oldJarNames.add(new Path(path).getName());
+            }
+            String baseName = getBaseFileName(localCoprocessorJar);
+            String newName = null;
+            int i = 0;
+            while (newName == null) {
+                newName = baseName + "-" + (i++) + ".jar";
+                if (oldJarNames.contains(newName))
+                    newName = null;
+            }
+
+            // upload
+            uploadPath = new Path(coprocessorDir, newName);
+            FileInputStream in = null;
+            FSDataOutputStream out = null;
+            try {
+                in = new FileInputStream(localCoprocessorFile);
+                out = fileSystem.create(uploadPath);
+                IOUtils.copy(in, out);
+            } finally {
+                IOUtils.closeQuietly(in);
+                IOUtils.closeQuietly(out);
+            }
+
+            fileSystem.setTimes(uploadPath, localCoprocessorFile.lastModified(), -1);
+
+        }
+
+        uploadPath = uploadPath.makeQualified(fileSystem.getUri(), null);
+        return uploadPath;
+    }
+
+    private static boolean isSame(File localCoprocessorFile, FileStatus fileStatus) {
+        return fileStatus.getLen() == localCoprocessorFile.length() && fileStatus.getModificationTime() == localCoprocessorFile.lastModified();
+    }
+
+    private static String getBaseFileName(String localCoprocessorJar) {
+        File localJar = new File(localCoprocessorJar);
+        String baseName = localJar.getName();
+        if (baseName.endsWith(".jar"))
+            baseName = baseName.substring(0, baseName.length() - ".jar".length());
+        return baseName;
+    }
+
+    private static Path getCoprocessorHDFSDir(FileSystem fileSystem, KylinConfig config) throws IOException {
+        String hdfsWorkingDirectory = config.getHdfsWorkingDirectory();
+        Path coprocessorDir = new Path(hdfsWorkingDirectory, "coprocessor");
+        fileSystem.mkdirs(coprocessorDir);
+        return coprocessorDir;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsCombiner.java
----------------------------------------------------------------------
diff --git a/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsCombiner.java b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsCombiner.java
new file mode 100644
index 0000000..1f4611b
--- /dev/null
+++ b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsCombiner.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.hadoop.invertedindex;
+
+import java.io.IOException;
+import java.util.HashSet;
+
+import org.apache.hadoop.io.ShortWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.kylin.common.util.ByteArray;
+import org.apache.kylin.common.util.Bytes;
+import org.apache.kylin.engine.mr.KylinReducer;
+
+/**
+ * @author yangli9
+ */
+public class IIDistinctColumnsCombiner extends KylinReducer<ShortWritable, Text, ShortWritable, Text> {
+
+    private Text outputValue = new Text();
+
+    @Override
+    protected void setup(Context context) throws IOException {
+        super.bindCurrentConfiguration(context.getConfiguration());
+
+    }
+
+    @Override
+    public void reduce(ShortWritable key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
+
+        HashSet<ByteArray> set = new HashSet<ByteArray>();
+        for (Text textValue : values) {
+            ByteArray value = new ByteArray(Bytes.copy(textValue.getBytes(), 0, textValue.getLength()));
+            set.add(value);
+        }
+
+        for (ByteArray value : set) {
+            outputValue.set(value.array(), value.offset(), value.length());
+            context.write(key, outputValue);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsJob.java
----------------------------------------------------------------------
diff --git a/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsJob.java b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsJob.java
new file mode 100644
index 0000000..042678e
--- /dev/null
+++ b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsJob.java
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.hadoop.invertedindex;
+
+import java.io.IOException;
+
+import org.apache.commons.cli.Options;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.ShortWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.engine.mr.HadoopUtil;
+import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
+import org.apache.kylin.engine.mr.common.BatchConstants;
+import org.apache.kylin.invertedindex.IIInstance;
+import org.apache.kylin.invertedindex.IIManager;
+import org.apache.kylin.invertedindex.model.IIJoinedFlatTableDesc;
+import org.apache.kylin.metadata.model.IntermediateColumnDesc;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @author yangli9
+ */
+public class IIDistinctColumnsJob extends AbstractHadoopJob {
+    protected static final Logger logger = LoggerFactory.getLogger(IIDistinctColumnsJob.class);
+
+    @Override
+    public int run(String[] args) throws Exception {
+        Options options = new Options();
+
+        try {
+            options.addOption(OPTION_JOB_NAME);
+            options.addOption(OPTION_TABLE_NAME);
+            options.addOption(OPTION_II_NAME);
+            options.addOption(OPTION_OUTPUT_PATH);
+            parseOptions(options, args);
+
+            job = Job.getInstance(getConf(), getOptionValue(OPTION_JOB_NAME));
+            String tableName = getOptionValue(OPTION_TABLE_NAME).toUpperCase();
+            String iiName = getOptionValue(OPTION_II_NAME);
+            Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
+
+            // ----------------------------------------------------------------------------
+
+            logger.info("Starting: " + job.getJobName() + " on table " + tableName);
+
+            IIManager iiMgr = IIManager.getInstance(KylinConfig.getInstanceFromEnv());
+            IIInstance ii = iiMgr.getII(iiName);
+            job.getConfiguration().set(BatchConstants.TABLE_NAME, tableName);
+            job.getConfiguration().set(BatchConstants.TABLE_COLUMNS, getColumns(ii));
+
+            setJobClasspath(job);
+
+            setupMapper();
+            setupReducer(output);
+
+            return waitForCompletion(job);
+
+        } catch (Exception e) {
+            printUsage(options);
+            throw e;
+        }
+
+    }
+
+    private String getColumns(IIInstance ii) {
+        IIJoinedFlatTableDesc iiflat = new IIJoinedFlatTableDesc(ii.getDescriptor());
+        StringBuilder buf = new StringBuilder();
+        for (IntermediateColumnDesc col : iiflat.getColumnList()) {
+            if (buf.length() > 0)
+                buf.append(",");
+            buf.append(col.getColumnName());
+        }
+        return buf.toString();
+    }
+
+    private void setupMapper() throws IOException {
+
+        String tableName = job.getConfiguration().get(BatchConstants.TABLE_NAME);
+        String[] dbTableNames = HadoopUtil.parseHiveTableName(tableName);
+
+        logger.info("setting hcat input format, db name {} , table name {}", dbTableNames[0], dbTableNames[1]);
+
+        HCatInputFormat.setInput(job, dbTableNames[0], dbTableNames[1]);
+
+        job.setInputFormatClass(HCatInputFormat.class);
+
+        job.setMapperClass(IIDistinctColumnsMapper.class);
+        job.setCombinerClass(IIDistinctColumnsCombiner.class);
+        job.setMapOutputKeyClass(ShortWritable.class);
+        job.setMapOutputValueClass(Text.class);
+    }
+
+    private void setupReducer(Path output) throws IOException {
+        job.setReducerClass(IIDistinctColumnsReducer.class);
+        job.setOutputFormatClass(SequenceFileOutputFormat.class);
+        job.setOutputKeyClass(NullWritable.class);
+        job.setOutputValueClass(Text.class);
+
+        FileOutputFormat.setOutputPath(job, output);
+        job.getConfiguration().set(BatchConstants.OUTPUT_PATH, output.toString());
+
+        job.setNumReduceTasks(1);
+
+        deletePath(job.getConfiguration(), output);
+    }
+
+    public static void main(String[] args) throws Exception {
+        IIDistinctColumnsJob job = new IIDistinctColumnsJob();
+        int exitCode = ToolRunner.run(job, args);
+        System.exit(exitCode);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsMapper.java
----------------------------------------------------------------------
diff --git a/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsMapper.java b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsMapper.java
new file mode 100644
index 0000000..3418a57
--- /dev/null
+++ b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsMapper.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.hadoop.invertedindex;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.ShortWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hive.hcatalog.data.HCatRecord;
+import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
+import org.apache.hive.hcatalog.data.schema.HCatSchema;
+import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.kylin.common.util.Bytes;
+import org.apache.kylin.engine.mr.KylinMapper;
+
+/**
+ * @author yangli9
+ */
+public class IIDistinctColumnsMapper<KEYIN> extends KylinMapper<KEYIN, HCatRecord, ShortWritable, Text> {
+
+    private ShortWritable outputKey = new ShortWritable();
+    private Text outputValue = new Text();
+    private HCatSchema schema = null;
+    private int columnSize = 0;
+
+    @Override
+    protected void setup(Context context) throws IOException {
+        super.bindCurrentConfiguration(context.getConfiguration());
+        schema = HCatInputFormat.getTableSchema(context.getConfiguration());
+        columnSize = schema.getFields().size();
+    }
+
+    @Override
+    public void map(KEYIN key, HCatRecord record, Context context) throws IOException, InterruptedException {
+
+        HCatFieldSchema fieldSchema = null;
+        for (short i = 0; i < columnSize; i++) {
+            outputKey.set(i);
+            fieldSchema = schema.get(i);
+            Object fieldValue = record.get(fieldSchema.getName(), schema);
+            if (fieldValue == null)
+                continue;
+            byte[] bytes = Bytes.toBytes(fieldValue.toString());
+            outputValue.set(bytes, 0, bytes.length);
+            context.write(outputKey, outputValue);
+        }
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsReducer.java
----------------------------------------------------------------------
diff --git a/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsReducer.java b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsReducer.java
new file mode 100644
index 0000000..fcb4dd5
--- /dev/null
+++ b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsReducer.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.hadoop.invertedindex;
+
+import java.io.IOException;
+import java.util.HashSet;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.ShortWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.kylin.common.util.ByteArray;
+import org.apache.kylin.common.util.Bytes;
+import org.apache.kylin.engine.mr.KylinReducer;
+import org.apache.kylin.engine.mr.common.BatchConstants;
+
+/**
+ * @author yangli9
+ */
+public class IIDistinctColumnsReducer extends KylinReducer<ShortWritable, Text, NullWritable, Text> {
+
+    private String[] columns;
+
+    @Override
+    protected void setup(Context context) throws IOException {
+        super.bindCurrentConfiguration(context.getConfiguration());
+
+        Configuration conf = context.getConfiguration();
+        this.columns = conf.get(BatchConstants.TABLE_COLUMNS).split(",");
+    }
+
+    @Override
+    public void reduce(ShortWritable key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
+        String columnName = columns[key.get()];
+
+        HashSet<ByteArray> set = new HashSet<ByteArray>();
+        for (Text textValue : values) {
+            ByteArray value = new ByteArray(Bytes.copy(textValue.getBytes(), 0, textValue.getLength()));
+            set.add(value);
+        }
+
+        Configuration conf = context.getConfiguration();
+        FileSystem fs = FileSystem.get(conf);
+        String outputPath = conf.get(BatchConstants.OUTPUT_PATH);
+        FSDataOutputStream out = fs.create(new Path(outputPath, columnName));
+
+        try {
+            for (ByteArray value : set) {
+                out.write(value.array(), value.offset(), value.length());
+                out.write('\n');
+            }
+        } finally {
+            out.close();
+        }
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexJob.java
----------------------------------------------------------------------
diff --git a/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexJob.java b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexJob.java
new file mode 100644
index 0000000..c9ad448
--- /dev/null
+++ b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexJob.java
@@ -0,0 +1,164 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.hadoop.invertedindex;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.commons.cli.Options;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.engine.mr.HadoopUtil;
+import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
+import org.apache.kylin.engine.mr.common.BatchConstants;
+import org.apache.kylin.invertedindex.IIInstance;
+import org.apache.kylin.invertedindex.IIManager;
+import org.apache.kylin.invertedindex.IISegment;
+import org.apache.kylin.metadata.MetadataManager;
+import org.apache.kylin.metadata.model.TableDesc;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @author yangli9
+ */
+public class InvertedIndexJob extends AbstractHadoopJob {
+    protected static final Logger logger = LoggerFactory.getLogger(InvertedIndexJob.class);
+
+    @Override
+    public int run(String[] args) throws Exception {
+        Options options = new Options();
+
+        try {
+            options.addOption(OPTION_JOB_NAME);
+            options.addOption(OPTION_II_NAME);
+            options.addOption(OPTION_TABLE_NAME);
+            options.addOption(OPTION_OUTPUT_PATH);
+            parseOptions(options, args);
+
+            job = Job.getInstance(getConf(), getOptionValue(OPTION_JOB_NAME));
+            String iiname = getOptionValue(OPTION_II_NAME);
+            String intermediateTable = getOptionValue(OPTION_TABLE_NAME);
+            Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
+
+            // ----------------------------------------------------------------------------
+
+            System.out.println("Starting: " + job.getJobName());
+
+            IIInstance ii = getII(iiname);
+            short sharding = ii.getDescriptor().getSharding();
+
+            setJobClasspath(job);
+
+            setupMapper(intermediateTable);
+            setupReducer(output, sharding);
+            attachMetadata(ii);
+
+            return waitForCompletion(job);
+
+        } catch (Exception e) {
+            printUsage(options);
+            throw e;
+        } finally {
+            if (job != null)
+                cleanupTempConfFile(job.getConfiguration());
+        }
+
+    }
+
+    private IIInstance getII(String iiName) {
+        IIManager mgr = IIManager.getInstance(KylinConfig.getInstanceFromEnv());
+        IIInstance ii = mgr.getII(iiName);
+        if (ii == null)
+            throw new IllegalArgumentException("No Inverted Index found by name " + iiName);
+        return ii;
+    }
+
+    private void attachMetadata(IIInstance ii) throws IOException {
+
+        Configuration conf = job.getConfiguration();
+        attachKylinPropsAndMetadata(ii, conf);
+
+        IISegment seg = ii.getFirstSegment();
+        conf.set(BatchConstants.CFG_II_NAME, ii.getName());
+        conf.set(BatchConstants.CFG_II_SEGMENT_NAME, seg.getName());
+    }
+
+    protected void attachKylinPropsAndMetadata(IIInstance ii, Configuration conf) throws IOException {
+        MetadataManager metaMgr = MetadataManager.getInstance(KylinConfig.getInstanceFromEnv());
+
+        // write II / model_desc / II_desc / dict / table
+        ArrayList<String> dumpList = new ArrayList<String>();
+        dumpList.add(ii.getResourcePath());
+        dumpList.add(ii.getDescriptor().getModel().getResourcePath());
+        dumpList.add(ii.getDescriptor().getResourcePath());
+
+        for (String tableName : ii.getDescriptor().getModel().getAllTables()) {
+            TableDesc table = metaMgr.getTableDesc(tableName);
+            dumpList.add(table.getResourcePath());
+        }
+        for (IISegment segment : ii.getSegments()) {
+            dumpList.addAll(segment.getDictionaryPaths());
+        }
+
+        attachKylinPropsAndMetadata(dumpList, conf);
+    }
+
+    private void setupMapper(String intermediateTable) throws IOException {
+
+        String[] dbTableNames = HadoopUtil.parseHiveTableName(intermediateTable);
+        HCatInputFormat.setInput(job, dbTableNames[0], dbTableNames[1]);
+
+        job.setInputFormatClass(HCatInputFormat.class);
+
+        job.setMapperClass(InvertedIndexMapper.class);
+        job.setMapOutputKeyClass(LongWritable.class);
+        job.setMapOutputValueClass(ImmutableBytesWritable.class);
+        job.setPartitionerClass(InvertedIndexPartitioner.class);
+    }
+
+    private void setupReducer(Path output, short sharding) throws IOException {
+        job.setReducerClass(InvertedIndexReducer.class);
+        job.setOutputFormatClass(SequenceFileOutputFormat.class);
+        job.setOutputKeyClass(ImmutableBytesWritable.class);
+        job.setOutputValueClass(ImmutableBytesWritable.class);
+
+        job.setNumReduceTasks(sharding);
+
+        FileOutputFormat.setOutputPath(job, output);
+
+        job.getConfiguration().set(BatchConstants.OUTPUT_PATH, output.toString());
+
+        deletePath(job.getConfiguration(), output);
+    }
+
+    public static void main(String[] args) throws Exception {
+        InvertedIndexJob job = new InvertedIndexJob();
+        int exitCode = ToolRunner.run(job, args);
+        System.exit(exitCode);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexMapper.java
----------------------------------------------------------------------
diff --git a/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexMapper.java b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexMapper.java
new file mode 100644
index 0000000..bc43b65
--- /dev/null
+++ b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexMapper.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.hadoop.invertedindex;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.hcatalog.data.HCatRecord;
+import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
+import org.apache.hive.hcatalog.data.schema.HCatSchema;
+import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.engine.mr.KylinMapper;
+import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
+import org.apache.kylin.engine.mr.common.BatchConstants;
+import org.apache.kylin.invertedindex.IIInstance;
+import org.apache.kylin.invertedindex.IIManager;
+import org.apache.kylin.invertedindex.IISegment;
+import org.apache.kylin.invertedindex.index.TableRecord;
+import org.apache.kylin.invertedindex.index.TableRecordInfo;
+import org.apache.kylin.metadata.model.SegmentStatusEnum;
+
+/**
+ * @author yangli9
+ */
+public class InvertedIndexMapper<KEYIN> extends KylinMapper<KEYIN, HCatRecord, LongWritable, ImmutableBytesWritable> {
+
+    private TableRecordInfo info;
+    private TableRecord rec;
+
+    private LongWritable outputKey;
+    private ImmutableBytesWritable outputValue;
+    private HCatSchema schema = null;
+    private List<HCatFieldSchema> fields;
+
+    @Override
+    protected void setup(Context context) throws IOException {
+        super.bindCurrentConfiguration(context.getConfiguration());
+
+        Configuration conf = context.getConfiguration();
+
+        KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata();
+        IIManager mgr = IIManager.getInstance(config);
+        IIInstance ii = mgr.getII(conf.get(BatchConstants.CFG_II_NAME));
+        IISegment seg = ii.getSegment(conf.get(BatchConstants.CFG_II_SEGMENT_NAME), SegmentStatusEnum.NEW);
+        this.info = new TableRecordInfo(seg);
+        this.rec = this.info.createTableRecord();
+
+        outputKey = new LongWritable();
+        outputValue = new ImmutableBytesWritable(rec.getBytes());
+
+        schema = HCatInputFormat.getTableSchema(context.getConfiguration());
+
+        fields = schema.getFields();
+    }
+
+    @Override
+    public void map(KEYIN key, HCatRecord record, Context context) throws IOException, InterruptedException {
+
+        rec.reset();
+        for (int i = 0; i < fields.size(); i++) {
+            Object fieldValue = record.get(i);
+            rec.setValueString(i, fieldValue == null ? null : fieldValue.toString());
+        }
+
+        outputKey.set(rec.getTimestamp());
+        // outputValue's backing bytes array is the same as rec
+
+        context.write(outputKey, outputValue);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexPartitioner.java
----------------------------------------------------------------------
diff --git a/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexPartitioner.java b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexPartitioner.java
new file mode 100644
index 0000000..396c221
--- /dev/null
+++ b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexPartitioner.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.hadoop.invertedindex;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.Partitioner;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
+import org.apache.kylin.engine.mr.common.BatchConstants;
+import org.apache.kylin.invertedindex.IIInstance;
+import org.apache.kylin.invertedindex.IIManager;
+import org.apache.kylin.invertedindex.IISegment;
+import org.apache.kylin.invertedindex.index.TableRecord;
+import org.apache.kylin.invertedindex.index.TableRecordInfo;
+import org.apache.kylin.metadata.model.SegmentStatusEnum;
+
+/**
+ * @author yangli9
+ */
+public class InvertedIndexPartitioner extends Partitioner<LongWritable, ImmutableBytesWritable> implements Configurable {
+
+    private Configuration conf;
+    private TableRecordInfo info;
+    private TableRecord rec;
+
+    @Override
+    public int getPartition(LongWritable key, ImmutableBytesWritable value, int numPartitions) {
+        rec.setBytes(value.get(), value.getOffset(), value.getLength());
+        return rec.getShard();
+    }
+
+    @Override
+    public void setConf(Configuration conf) {
+        this.conf = conf;
+        try {
+            KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata();
+            IIManager mgr = IIManager.getInstance(config);
+            IIInstance ii = mgr.getII(conf.get(BatchConstants.CFG_II_NAME));
+            IISegment seg = ii.getSegment(conf.get(BatchConstants.CFG_II_SEGMENT_NAME), SegmentStatusEnum.NEW);
+            this.info = new TableRecordInfo(seg);
+            this.rec = this.info.createTableRecord();
+        } catch (IOException e) {
+            throw new RuntimeException("", e);
+        }
+    }
+
+    @Override
+    public Configuration getConf() {
+        return conf;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexReducer.java
----------------------------------------------------------------------
diff --git a/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexReducer.java b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexReducer.java
new file mode 100644
index 0000000..5a69eec
--- /dev/null
+++ b/invertedindex/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexReducer.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.hadoop.invertedindex;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.engine.mr.KylinReducer;
+import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
+import org.apache.kylin.engine.mr.common.BatchConstants;
+import org.apache.kylin.invertedindex.IIInstance;
+import org.apache.kylin.invertedindex.IIManager;
+import org.apache.kylin.invertedindex.IISegment;
+import org.apache.kylin.invertedindex.index.IncrementalSliceMaker;
+import org.apache.kylin.invertedindex.index.Slice;
+import org.apache.kylin.invertedindex.index.TableRecord;
+import org.apache.kylin.invertedindex.index.TableRecordInfo;
+import org.apache.kylin.invertedindex.model.IIKeyValueCodec;
+import org.apache.kylin.invertedindex.model.IIRow;
+import org.apache.kylin.metadata.model.SegmentStatusEnum;
+
+/**
+ * @author yangli9
+ */
+public class InvertedIndexReducer extends KylinReducer<LongWritable, ImmutableBytesWritable, ImmutableBytesWritable, ImmutableBytesWritable> {
+
+    private TableRecordInfo info;
+    private TableRecord rec;
+    private IncrementalSliceMaker builder;
+    private IIKeyValueCodec kv;
+
+    @Override
+    protected void setup(Context context) throws IOException {
+        super.bindCurrentConfiguration(context.getConfiguration());
+
+        Configuration conf = context.getConfiguration();
+        KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata();
+        IIManager mgr = IIManager.getInstance(config);
+        IIInstance ii = mgr.getII(conf.get(BatchConstants.CFG_II_NAME));
+        IISegment seg = ii.getSegment(conf.get(BatchConstants.CFG_II_SEGMENT_NAME), SegmentStatusEnum.NEW);
+        info = new TableRecordInfo(seg);
+        rec = info.createTableRecord();
+        builder = null;
+        kv = new IIKeyValueCodec(info.getDigest());
+    }
+
+    @Override
+    public void reduce(LongWritable key, Iterable<ImmutableBytesWritable> values, Context context) //
+            throws IOException, InterruptedException {
+        for (ImmutableBytesWritable v : values) {
+            rec.setBytes(v.get(), v.getOffset(), v.getLength());
+
+            if (builder == null) {
+                builder = new IncrementalSliceMaker(info, rec.getShard());
+            }
+
+            //TODO: to delete this log
+            System.out.println(rec.getShard() + " - " + rec);
+
+            Slice slice = builder.append(rec);
+            if (slice != null) {
+                output(slice, context);
+            }
+        }
+    }
+
+    @Override
+    protected void cleanup(Context context) throws IOException, InterruptedException {
+        Slice slice = builder.close();
+        if (slice != null) {
+            output(slice, context);
+        }
+    }
+
+    private void output(Slice slice, Context context) throws IOException, InterruptedException {
+        for (IIRow pair : kv.encodeKeyValue(slice)) {
+            context.write(pair.getKey(), pair.getValue());
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/invertedindex/src/main/java/org/apache/kylin/job/invertedindex/IIJob.java
----------------------------------------------------------------------
diff --git a/invertedindex/src/main/java/org/apache/kylin/job/invertedindex/IIJob.java b/invertedindex/src/main/java/org/apache/kylin/job/invertedindex/IIJob.java
new file mode 100644
index 0000000..0af846b
--- /dev/null
+++ b/invertedindex/src/main/java/org/apache/kylin/job/invertedindex/IIJob.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.invertedindex;
+
+import org.apache.kylin.job.execution.DefaultChainedExecutable;
+
+/**
+ */
+public class IIJob extends DefaultChainedExecutable {
+
+    public IIJob() {
+        super();
+    }
+
+    private static final String II_INSTANCE_NAME = "iiName";
+    private static final String SEGMENT_ID = "segmentId";
+
+    void setIIName(String name) {
+        setParam(II_INSTANCE_NAME, name);
+    }
+
+    public String getIIName() {
+        return getParam(II_INSTANCE_NAME);
+    }
+
+    void setSegmentId(String segmentId) {
+        setParam(SEGMENT_ID, segmentId);
+    }
+
+    public String getSegmentId() {
+        return getParam(SEGMENT_ID);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/invertedindex/src/main/java/org/apache/kylin/job/invertedindex/IIJobBuilder.java
----------------------------------------------------------------------
diff --git a/invertedindex/src/main/java/org/apache/kylin/job/invertedindex/IIJobBuilder.java b/invertedindex/src/main/java/org/apache/kylin/job/invertedindex/IIJobBuilder.java
new file mode 100644
index 0000000..4bd06c5
--- /dev/null
+++ b/invertedindex/src/main/java/org/apache/kylin/job/invertedindex/IIJobBuilder.java
@@ -0,0 +1,230 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.invertedindex;
+
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.TimeZone;
+
+import org.apache.kylin.engine.mr.common.HadoopShellExecutable;
+import org.apache.kylin.engine.mr.common.MapReduceExecutable;
+import org.apache.kylin.invertedindex.IISegment;
+import org.apache.kylin.invertedindex.model.IIJoinedFlatTableDesc;
+import org.apache.kylin.job.constant.ExecutableConstants;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.execution.AbstractExecutable;
+import org.apache.kylin.job.hadoop.dict.CreateInvertedIndexDictionaryJob;
+import org.apache.kylin.job.hadoop.invertedindex.IIBulkLoadJob;
+import org.apache.kylin.job.hadoop.invertedindex.IICreateHFileJob;
+import org.apache.kylin.job.hadoop.invertedindex.IICreateHTableJob;
+import org.apache.kylin.job.hadoop.invertedindex.IIDistinctColumnsJob;
+import org.apache.kylin.job.hadoop.invertedindex.InvertedIndexJob;
+import org.apache.kylin.metadata.model.DataModelDesc.RealizationCapacity;
+import org.apache.kylin.source.hive.HiveMRInput.BatchCubingInputSide;
+
+import com.google.common.base.Preconditions;
+
+/**
+ */
+public final class IIJobBuilder {
+
+    final JobEngineConfig engineConfig;
+
+    public IIJobBuilder(JobEngineConfig engineConfig) {
+        this.engineConfig = engineConfig;
+    }
+
+    public IIJob buildJob(IISegment seg, String submitter) {
+        checkPreconditions(seg);
+
+        IIJob result = initialJob(seg, "BUILD", submitter);
+        final String jobId = result.getId();
+        final IIJoinedFlatTableDesc intermediateTableDesc = new IIJoinedFlatTableDesc(seg.getIIDesc());
+        final String intermediateTableIdentity = getIntermediateTableIdentity(intermediateTableDesc);
+        final String factDistinctColumnsPath = getIIDistinctColumnsPath(seg, jobId);
+        final String iiRootPath = getJobWorkingDir(jobId) + "/" + seg.getIIInstance().getName() + "/";
+        final String iiPath = iiRootPath + "*";
+
+        final AbstractExecutable intermediateHiveTableStep = createFlatHiveTableStep(intermediateTableDesc, jobId);
+        result.addTask(intermediateHiveTableStep);
+
+        result.addTask(createFactDistinctColumnsStep(seg, intermediateTableIdentity, jobId, factDistinctColumnsPath));
+
+        result.addTask(createBuildDictionaryStep(seg, factDistinctColumnsPath));
+
+        result.addTask(createInvertedIndexStep(seg, intermediateTableIdentity, iiRootPath));
+
+        // create htable step
+        result.addTask(createCreateHTableStep(seg));
+
+        // generate hfiles step
+        result.addTask(createConvertToHfileStep(seg, iiPath, jobId));
+
+        // bulk load step
+        result.addTask(createBulkLoadStep(seg, jobId));
+
+        return result;
+    }
+
+    private AbstractExecutable createFlatHiveTableStep(IIJoinedFlatTableDesc intermediateTableDesc, String jobId) {
+        return BatchCubingInputSide.createFlatHiveTableStep(engineConfig, intermediateTableDesc, jobId);
+    }
+
+    private IIJob initialJob(IISegment seg, String type, String submitter) {
+        IIJob result = new IIJob();
+        SimpleDateFormat format = new SimpleDateFormat("z yyyy-MM-dd HH:mm:ss");
+        format.setTimeZone(TimeZone.getTimeZone(engineConfig.getTimeZone()));
+        result.setIIName(seg.getIIInstance().getName());
+        result.setSegmentId(seg.getUuid());
+        result.setName(seg.getIIInstance().getName() + " - " + seg.getName() + " - " + type + " - " + format.format(new Date(System.currentTimeMillis())));
+        result.setSubmitter(submitter);
+        return result;
+    }
+
+    private void checkPreconditions(IISegment seg) {
+        Preconditions.checkNotNull(seg, "segment cannot be null");
+        Preconditions.checkNotNull(engineConfig, "jobEngineConfig cannot be null");
+    }
+
+    private void appendMapReduceParameters(StringBuilder builder, JobEngineConfig engineConfig) {
+        try {
+            String jobConf = engineConfig.getHadoopJobConfFilePath(RealizationCapacity.MEDIUM);
+            if (jobConf != null && jobConf.length() > 0) {
+                builder.append(" -conf ").append(jobConf);
+            }
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    private String getIIDistinctColumnsPath(IISegment seg, String jobUuid) {
+        return getJobWorkingDir(jobUuid) + "/" + seg.getIIInstance().getName() + "/ii_distinct_columns";
+    }
+
+    private String getHFilePath(IISegment seg, String jobId) {
+        return getJobWorkingDir(jobId) + "/" + seg.getIIInstance().getName() + "/hfile/";
+    }
+
+    private MapReduceExecutable createFactDistinctColumnsStep(IISegment seg, String factTableName, String jobId, String output) {
+        MapReduceExecutable result = new MapReduceExecutable();
+        result.setName(ExecutableConstants.STEP_NAME_FACT_DISTINCT_COLUMNS);
+        result.setMapReduceJobClass(IIDistinctColumnsJob.class);
+        StringBuilder cmd = new StringBuilder();
+        appendMapReduceParameters(cmd, engineConfig);
+        appendExecCmdParameters(cmd, "tablename", factTableName);
+        appendExecCmdParameters(cmd, "iiname", seg.getIIInstance().getName());
+        appendExecCmdParameters(cmd, "output", output);
+        appendExecCmdParameters(cmd, "jobname", "Kylin_Fact_Distinct_Columns_" + seg.getIIInstance().getName() + "_Step");
+
+        result.setMapReduceParams(cmd.toString());
+        return result;
+    }
+
+    private HadoopShellExecutable createBuildDictionaryStep(IISegment seg, String factDistinctColumnsPath) {
+        // base cuboid job
+        HadoopShellExecutable buildDictionaryStep = new HadoopShellExecutable();
+        buildDictionaryStep.setName(ExecutableConstants.STEP_NAME_BUILD_DICTIONARY);
+        StringBuilder cmd = new StringBuilder();
+        appendExecCmdParameters(cmd, "iiname", seg.getIIInstance().getName());
+        appendExecCmdParameters(cmd, "input", factDistinctColumnsPath);
+
+        buildDictionaryStep.setJobParams(cmd.toString());
+        buildDictionaryStep.setJobClass(CreateInvertedIndexDictionaryJob.class);
+        return buildDictionaryStep;
+    }
+
+    private MapReduceExecutable createInvertedIndexStep(IISegment seg, String intermediateHiveTable, String iiOutputTempPath) {
+        // base cuboid job
+        MapReduceExecutable buildIIStep = new MapReduceExecutable();
+
+        StringBuilder cmd = new StringBuilder();
+        appendMapReduceParameters(cmd, engineConfig);
+
+        buildIIStep.setName(ExecutableConstants.STEP_NAME_BUILD_II);
+
+        appendExecCmdParameters(cmd, "iiname", seg.getIIInstance().getName());
+        appendExecCmdParameters(cmd, "tablename", intermediateHiveTable);
+        appendExecCmdParameters(cmd, "output", iiOutputTempPath);
+        appendExecCmdParameters(cmd, "jobname", ExecutableConstants.STEP_NAME_BUILD_II);
+
+        buildIIStep.setMapReduceParams(cmd.toString());
+        buildIIStep.setMapReduceJobClass(InvertedIndexJob.class);
+        return buildIIStep;
+    }
+
+    private HadoopShellExecutable createCreateHTableStep(IISegment seg) {
+        HadoopShellExecutable createHtableStep = new HadoopShellExecutable();
+        createHtableStep.setName(ExecutableConstants.STEP_NAME_CREATE_HBASE_TABLE);
+        StringBuilder cmd = new StringBuilder();
+        appendExecCmdParameters(cmd, "iiname", seg.getIIInstance().getName());
+        appendExecCmdParameters(cmd, "htablename", seg.getStorageLocationIdentifier());
+
+        createHtableStep.setJobParams(cmd.toString());
+        createHtableStep.setJobClass(IICreateHTableJob.class);
+
+        return createHtableStep;
+    }
+
+    private MapReduceExecutable createConvertToHfileStep(IISegment seg, String inputPath, String jobId) {
+        MapReduceExecutable createHFilesStep = new MapReduceExecutable();
+        createHFilesStep.setName(ExecutableConstants.STEP_NAME_CONVERT_II_TO_HFILE);
+        StringBuilder cmd = new StringBuilder();
+
+        appendMapReduceParameters(cmd, engineConfig);
+        appendExecCmdParameters(cmd, "iiname", seg.getIIInstance().getName());
+        appendExecCmdParameters(cmd, "input", inputPath);
+        appendExecCmdParameters(cmd, "output", getHFilePath(seg, jobId));
+        appendExecCmdParameters(cmd, "htablename", seg.getStorageLocationIdentifier());
+        appendExecCmdParameters(cmd, "jobname", "Kylin_HFile_Generator_" + seg.getIIInstance().getName() + "_Step");
+
+        createHFilesStep.setMapReduceParams(cmd.toString());
+        createHFilesStep.setMapReduceJobClass(IICreateHFileJob.class);
+
+        return createHFilesStep;
+    }
+
+    private HadoopShellExecutable createBulkLoadStep(IISegment seg, String jobId) {
+        HadoopShellExecutable bulkLoadStep = new HadoopShellExecutable();
+        bulkLoadStep.setName(ExecutableConstants.STEP_NAME_BULK_LOAD_HFILE);
+
+        StringBuilder cmd = new StringBuilder();
+        appendExecCmdParameters(cmd, "input", getHFilePath(seg, jobId));
+        appendExecCmdParameters(cmd, "htablename", seg.getStorageLocationIdentifier());
+        appendExecCmdParameters(cmd, "iiname", seg.getIIInstance().getName());
+
+        bulkLoadStep.setJobParams(cmd.toString());
+        bulkLoadStep.setJobClass(IIBulkLoadJob.class);
+
+        return bulkLoadStep;
+
+    }
+
+    private StringBuilder appendExecCmdParameters(StringBuilder buf, String paraName, String paraValue) {
+        return buf.append(" -").append(paraName).append(" ").append(paraValue);
+    }
+
+    private String getJobWorkingDir(String uuid) {
+        return engineConfig.getHdfsWorkingDirectory() + "kylin-" + uuid;
+    }
+
+    private String getIntermediateTableIdentity(IIJoinedFlatTableDesc intermediateTableDesc) {
+        return engineConfig.getConfig().getHiveDatabaseForIntermediateTable() + "." + intermediateTableDesc.getTableName();
+    }
+}


[6/9] incubator-kylin git commit: KYLIN-1010 Decompose project job

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/invertedindex/src/main/java/org/apache/kylin/job/tools/IICLI.java
----------------------------------------------------------------------
diff --git a/invertedindex/src/main/java/org/apache/kylin/job/tools/IICLI.java b/invertedindex/src/main/java/org/apache/kylin/job/tools/IICLI.java
new file mode 100644
index 0000000..8c39aa1
--- /dev/null
+++ b/invertedindex/src/main/java/org/apache/kylin/job/tools/IICLI.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.tools;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.SequenceFile.Reader;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.engine.mr.HadoopUtil;
+import org.apache.kylin.invertedindex.IIInstance;
+import org.apache.kylin.invertedindex.IIManager;
+import org.apache.kylin.invertedindex.index.RawTableRecord;
+import org.apache.kylin.invertedindex.index.Slice;
+import org.apache.kylin.invertedindex.index.TableRecord;
+import org.apache.kylin.invertedindex.index.TableRecordInfo;
+import org.apache.kylin.invertedindex.model.IIKeyValueCodec;
+import org.apache.kylin.invertedindex.model.IIRow;
+
+/**
+ * @author yangli9
+ */
+public class IICLI {
+
+    public static void main(String[] args) throws IOException {
+        Configuration hconf = HadoopUtil.getCurrentConfiguration();
+        IIManager mgr = IIManager.getInstance(KylinConfig.getInstanceFromEnv());
+
+        String iiName = args[0];
+        IIInstance ii = mgr.getII(iiName);
+
+        String path = args[1];
+        System.out.println("Reading from " + path + " ...");
+
+        TableRecordInfo info = new TableRecordInfo(ii.getFirstSegment());
+        IIKeyValueCodec codec = new IIKeyValueCodec(info.getDigest());
+        int count = 0;
+        for (Slice slice : codec.decodeKeyValue(readSequenceKVs(hconf, path))) {
+            for (RawTableRecord rec : slice) {
+                System.out.printf(new TableRecord(rec, info).toString());
+                count++;
+            }
+        }
+        System.out.println("Total " + count + " records");
+    }
+
+    public static Iterable<IIRow> readSequenceKVs(Configuration hconf, String path) throws IOException {
+        final Reader reader = new Reader(hconf, SequenceFile.Reader.file(new Path(path)));
+        return new Iterable<IIRow>() {
+            @Override
+            public Iterator<IIRow> iterator() {
+                return new Iterator<IIRow>() {
+                    ImmutableBytesWritable k = new ImmutableBytesWritable();
+                    ImmutableBytesWritable v = new ImmutableBytesWritable();
+                    IIRow pair = new IIRow(k, v, null);
+
+                    @Override
+                    public boolean hasNext() {
+                        boolean hasNext = false;
+                        try {
+                            hasNext = reader.next(k, v);
+                        } catch (IOException e) {
+                            throw new RuntimeException(e);
+                        } finally {
+                            if (hasNext == false) {
+                                IOUtils.closeQuietly(reader);
+                            }
+                        }
+                        return hasNext;
+                    }
+
+                    @Override
+                    public IIRow next() {
+                        return pair;
+                    }
+
+                    @Override
+                    public void remove() {
+                        throw new UnsupportedOperationException();
+                    }
+                };
+            }
+        };
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/.gitignore
----------------------------------------------------------------------
diff --git a/job/.gitignore b/job/.gitignore
deleted file mode 100644
index 0b42d2d..0000000
--- a/job/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-/target

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/.settings/org.eclipse.core.resources.prefs
----------------------------------------------------------------------
diff --git a/job/.settings/org.eclipse.core.resources.prefs b/job/.settings/org.eclipse.core.resources.prefs
deleted file mode 100644
index 04cfa2c..0000000
--- a/job/.settings/org.eclipse.core.resources.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-eclipse.preferences.version=1
-encoding//src/main/java=UTF-8
-encoding//src/main/resources=UTF-8
-encoding//src/test/java=UTF-8
-encoding//src/test/resources=UTF-8
-encoding/<project>=UTF-8

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/.settings/org.eclipse.jdt.core.prefs
----------------------------------------------------------------------
diff --git a/job/.settings/org.eclipse.jdt.core.prefs b/job/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index a903301..0000000
--- a/job/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,379 +0,0 @@
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.annotation.inheritNullAnnotations=disabled
-org.eclipse.jdt.core.compiler.annotation.missingNonNullByDefaultAnnotation=ignore
-org.eclipse.jdt.core.compiler.annotation.nonnull=org.eclipse.jdt.annotation.NonNull
-org.eclipse.jdt.core.compiler.annotation.nonnullbydefault=org.eclipse.jdt.annotation.NonNullByDefault
-org.eclipse.jdt.core.compiler.annotation.nullable=org.eclipse.jdt.annotation.Nullable
-org.eclipse.jdt.core.compiler.annotation.nullanalysis=disabled
-org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
-org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
-org.eclipse.jdt.core.compiler.compliance=1.7
-org.eclipse.jdt.core.compiler.debug.lineNumber=generate
-org.eclipse.jdt.core.compiler.debug.localVariable=generate
-org.eclipse.jdt.core.compiler.debug.sourceFile=generate
-org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=warning
-org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
-org.eclipse.jdt.core.compiler.problem.autoboxing=ignore
-org.eclipse.jdt.core.compiler.problem.comparingIdentical=warning
-org.eclipse.jdt.core.compiler.problem.deadCode=warning
-org.eclipse.jdt.core.compiler.problem.deprecation=warning
-org.eclipse.jdt.core.compiler.problem.deprecationInDeprecatedCode=disabled
-org.eclipse.jdt.core.compiler.problem.deprecationWhenOverridingDeprecatedMethod=disabled
-org.eclipse.jdt.core.compiler.problem.discouragedReference=warning
-org.eclipse.jdt.core.compiler.problem.emptyStatement=ignore
-org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
-org.eclipse.jdt.core.compiler.problem.explicitlyClosedAutoCloseable=ignore
-org.eclipse.jdt.core.compiler.problem.fallthroughCase=ignore
-org.eclipse.jdt.core.compiler.problem.fatalOptionalError=disabled
-org.eclipse.jdt.core.compiler.problem.fieldHiding=ignore
-org.eclipse.jdt.core.compiler.problem.finalParameterBound=warning
-org.eclipse.jdt.core.compiler.problem.finallyBlockNotCompletingNormally=warning
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.problem.hiddenCatchBlock=warning
-org.eclipse.jdt.core.compiler.problem.includeNullInfoFromAsserts=disabled
-org.eclipse.jdt.core.compiler.problem.incompatibleNonInheritedInterfaceMethod=warning
-org.eclipse.jdt.core.compiler.problem.incompleteEnumSwitch=warning
-org.eclipse.jdt.core.compiler.problem.indirectStaticAccess=ignore
-org.eclipse.jdt.core.compiler.problem.localVariableHiding=ignore
-org.eclipse.jdt.core.compiler.problem.methodWithConstructorName=warning
-org.eclipse.jdt.core.compiler.problem.missingDefaultCase=ignore
-org.eclipse.jdt.core.compiler.problem.missingDeprecatedAnnotation=ignore
-org.eclipse.jdt.core.compiler.problem.missingEnumCaseDespiteDefault=disabled
-org.eclipse.jdt.core.compiler.problem.missingHashCodeMethod=ignore
-org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotation=ignore
-org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotationForInterfaceMethodImplementation=enabled
-org.eclipse.jdt.core.compiler.problem.missingSerialVersion=warning
-org.eclipse.jdt.core.compiler.problem.missingSynchronizedOnInheritedMethod=ignore
-org.eclipse.jdt.core.compiler.problem.noEffectAssignment=warning
-org.eclipse.jdt.core.compiler.problem.noImplicitStringConversion=warning
-org.eclipse.jdt.core.compiler.problem.nonExternalizedStringLiteral=ignore
-org.eclipse.jdt.core.compiler.problem.nonnullParameterAnnotationDropped=warning
-org.eclipse.jdt.core.compiler.problem.nullAnnotationInferenceConflict=error
-org.eclipse.jdt.core.compiler.problem.nullReference=warning
-org.eclipse.jdt.core.compiler.problem.nullSpecViolation=error
-org.eclipse.jdt.core.compiler.problem.nullUncheckedConversion=warning
-org.eclipse.jdt.core.compiler.problem.overridingPackageDefaultMethod=warning
-org.eclipse.jdt.core.compiler.problem.parameterAssignment=ignore
-org.eclipse.jdt.core.compiler.problem.possibleAccidentalBooleanAssignment=ignore
-org.eclipse.jdt.core.compiler.problem.potentialNullReference=ignore
-org.eclipse.jdt.core.compiler.problem.potentiallyUnclosedCloseable=ignore
-org.eclipse.jdt.core.compiler.problem.rawTypeReference=warning
-org.eclipse.jdt.core.compiler.problem.redundantNullAnnotation=warning
-org.eclipse.jdt.core.compiler.problem.redundantNullCheck=ignore
-org.eclipse.jdt.core.compiler.problem.redundantSpecificationOfTypeArguments=ignore
-org.eclipse.jdt.core.compiler.problem.redundantSuperinterface=ignore
-org.eclipse.jdt.core.compiler.problem.reportMethodCanBePotentiallyStatic=ignore
-org.eclipse.jdt.core.compiler.problem.reportMethodCanBeStatic=ignore
-org.eclipse.jdt.core.compiler.problem.specialParameterHidingField=disabled
-org.eclipse.jdt.core.compiler.problem.staticAccessReceiver=warning
-org.eclipse.jdt.core.compiler.problem.suppressOptionalErrors=disabled
-org.eclipse.jdt.core.compiler.problem.suppressWarnings=enabled
-org.eclipse.jdt.core.compiler.problem.syntacticNullAnalysisForFields=disabled
-org.eclipse.jdt.core.compiler.problem.syntheticAccessEmulation=ignore
-org.eclipse.jdt.core.compiler.problem.typeParameterHiding=warning
-org.eclipse.jdt.core.compiler.problem.unavoidableGenericTypeProblems=enabled
-org.eclipse.jdt.core.compiler.problem.uncheckedTypeOperation=warning
-org.eclipse.jdt.core.compiler.problem.unclosedCloseable=warning
-org.eclipse.jdt.core.compiler.problem.undocumentedEmptyBlock=ignore
-org.eclipse.jdt.core.compiler.problem.unhandledWarningToken=warning
-org.eclipse.jdt.core.compiler.problem.unnecessaryElse=ignore
-org.eclipse.jdt.core.compiler.problem.unnecessaryTypeCheck=ignore
-org.eclipse.jdt.core.compiler.problem.unqualifiedFieldAccess=ignore
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownException=ignore
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionExemptExceptionAndThrowable=enabled
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionIncludeDocCommentReference=enabled
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionWhenOverriding=disabled
-org.eclipse.jdt.core.compiler.problem.unusedImport=warning
-org.eclipse.jdt.core.compiler.problem.unusedLabel=warning
-org.eclipse.jdt.core.compiler.problem.unusedLocal=warning
-org.eclipse.jdt.core.compiler.problem.unusedObjectAllocation=ignore
-org.eclipse.jdt.core.compiler.problem.unusedParameter=ignore
-org.eclipse.jdt.core.compiler.problem.unusedParameterIncludeDocCommentReference=enabled
-org.eclipse.jdt.core.compiler.problem.unusedParameterWhenImplementingAbstract=disabled
-org.eclipse.jdt.core.compiler.problem.unusedParameterWhenOverridingConcrete=disabled
-org.eclipse.jdt.core.compiler.problem.unusedPrivateMember=warning
-org.eclipse.jdt.core.compiler.problem.unusedTypeParameter=ignore
-org.eclipse.jdt.core.compiler.problem.unusedWarningToken=warning
-org.eclipse.jdt.core.compiler.problem.varargsArgumentNeedCast=warning
-org.eclipse.jdt.core.compiler.source=1.7
-org.eclipse.jdt.core.formatter.align_type_members_on_columns=false
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation=0
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_assignment=0
-org.eclipse.jdt.core.formatter.alignment_for_binary_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_compact_if=16
-org.eclipse.jdt.core.formatter.alignment_for_conditional_expression=80
-org.eclipse.jdt.core.formatter.alignment_for_enum_constants=0
-org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer=16
-org.eclipse.jdt.core.formatter.alignment_for_method_declaration=0
-org.eclipse.jdt.core.formatter.alignment_for_multiple_fields=16
-org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_resources_in_try=80
-org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation=16
-org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch=16
-org.eclipse.jdt.core.formatter.blank_lines_after_imports=1
-org.eclipse.jdt.core.formatter.blank_lines_after_package=1
-org.eclipse.jdt.core.formatter.blank_lines_before_field=0
-org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration=0
-org.eclipse.jdt.core.formatter.blank_lines_before_imports=1
-org.eclipse.jdt.core.formatter.blank_lines_before_member_type=1
-org.eclipse.jdt.core.formatter.blank_lines_before_method=1
-org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk=1
-org.eclipse.jdt.core.formatter.blank_lines_before_package=0
-org.eclipse.jdt.core.formatter.blank_lines_between_import_groups=1
-org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations=1
-org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_array_initializer=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_block=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_block_in_case=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_enum_constant=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_method_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_switch=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment=false
-org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment=false
-org.eclipse.jdt.core.formatter.comment.format_block_comments=false
-org.eclipse.jdt.core.formatter.comment.format_header=false
-org.eclipse.jdt.core.formatter.comment.format_html=true
-org.eclipse.jdt.core.formatter.comment.format_javadoc_comments=false
-org.eclipse.jdt.core.formatter.comment.format_line_comments=false
-org.eclipse.jdt.core.formatter.comment.format_source_code=true
-org.eclipse.jdt.core.formatter.comment.indent_parameter_description=true
-org.eclipse.jdt.core.formatter.comment.indent_root_tags=true
-org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags=insert
-org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter=insert
-org.eclipse.jdt.core.formatter.comment.line_length=80
-org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries=true
-org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries=true
-org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments=false
-org.eclipse.jdt.core.formatter.compact_else_if=true
-org.eclipse.jdt.core.formatter.continuation_indentation=2
-org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer=2
-org.eclipse.jdt.core.formatter.disabling_tag=@formatter\:off
-org.eclipse.jdt.core.formatter.enabling_tag=@formatter\:on
-org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line=false
-org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header=true
-org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases=true
-org.eclipse.jdt.core.formatter.indent_empty_lines=false
-org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true
-org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true
-org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true
-org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=false
-org.eclipse.jdt.core.formatter.indentation.size=4
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_label=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter=insert
-org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_binary_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_ellipsis=insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources=insert
-org.eclipse.jdt.core.formatter.insert_space_after_unary_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter=insert
-org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_binary_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_ellipsis=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while=insert
-org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return=insert
-org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw=insert
-org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_semicolon=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_unary_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.join_lines_in_comments=true
-org.eclipse.jdt.core.formatter.join_wrapped_lines=true
-org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line=false
-org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line=false
-org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line=false
-org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line=false
-org.eclipse.jdt.core.formatter.lineSplit=999
-org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column=false
-org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column=false
-org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body=0
-org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve=1
-org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line=true
-org.eclipse.jdt.core.formatter.tabulation.char=space
-org.eclipse.jdt.core.formatter.tabulation.size=4
-org.eclipse.jdt.core.formatter.use_on_off_tags=false
-org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations=false
-org.eclipse.jdt.core.formatter.wrap_before_binary_operator=true
-org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch=true
-org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested=true

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/.settings/org.eclipse.jdt.ui.prefs
----------------------------------------------------------------------
diff --git a/job/.settings/org.eclipse.jdt.ui.prefs b/job/.settings/org.eclipse.jdt.ui.prefs
deleted file mode 100644
index dece0e6..0000000
--- a/job/.settings/org.eclipse.jdt.ui.prefs
+++ /dev/null
@@ -1,7 +0,0 @@
-eclipse.preferences.version=1
-formatter_profile=_Space Indent & Long Lines
-formatter_settings_version=12
-org.eclipse.jdt.ui.ignorelowercasenames=true
-org.eclipse.jdt.ui.importorder=java;javax;org;com;
-org.eclipse.jdt.ui.ondemandthreshold=99
-org.eclipse.jdt.ui.staticondemandthreshold=1

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/pom.xml
----------------------------------------------------------------------
diff --git a/job/pom.xml b/job/pom.xml
deleted file mode 100644
index 2e1e64b..0000000
--- a/job/pom.xml
+++ /dev/null
@@ -1,314 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
- 
-     http://www.apache.org/licenses/LICENSE-2.0
- 
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.kylin</groupId>
-        <artifactId>kylin</artifactId>
-        <version>2.0-incubating-SNAPSHOT</version>
-    </parent>
-
-    <artifactId>kylin-job</artifactId>
-    <name>Kylin:Job</name>
-    <url>http://maven.apache.org</url>
-    <properties>
-        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-    </properties>
-
-    <dependencies>
-
-        <!--Kylin Jar -->
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-core-job</artifactId>
-            <version>${project.parent.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-engine-mr</artifactId>
-            <version>${project.parent.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-source-hive</artifactId>
-            <version>${project.parent.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-invertedindex</artifactId>
-            <version>${project.parent.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-streaming</artifactId>
-            <version>${project.parent.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-storage-hbase</artifactId>
-            <version>${project.parent.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>commons-daemon</groupId>
-            <artifactId>commons-daemon</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.curator</groupId>
-            <artifactId>curator-framework</artifactId>
-            <exclusions>
-                <exclusion>
-                    <groupId>org.apache.zookeeper</groupId>
-                    <artifactId>zookeeper</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.curator</groupId>
-            <artifactId>curator-recipes</artifactId>
-            <exclusions>
-                <exclusion>
-                    <groupId>org.apache.zookeeper</groupId>
-                    <artifactId>zookeeper</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-
-        <!-- Env & Test -->
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-core-common</artifactId>
-            <type>test-jar</type>
-            <scope>test</scope>
-            <version>${project.parent.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-core-job</artifactId>
-            <type>test-jar</type>
-            <scope>test</scope>
-            <version>${project.parent.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-storage-hbase</artifactId>
-            <type>test-jar</type>
-            <scope>test</scope>
-            <version>${project.parent.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-annotations</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-mapreduce-client-core</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-minicluster</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.mrunit</groupId>
-            <artifactId>mrunit</artifactId>
-            <classifier>hadoop2</classifier>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-common</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-hadoop2-compat</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-client</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-server</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-yarn-server-resourcemanager</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.maven</groupId>
-            <artifactId>maven-model</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-yarn-api</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-hdfs</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hive.hcatalog</groupId>
-            <artifactId>hive-hcatalog-core</artifactId>
-            <version>${hive-hcatalog.version}</version>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-testing-util</artifactId>
-            <version>${hbase-hadoop2.version}</version>
-            <scope>test</scope>
-            <exclusions>
-                <exclusion>
-                    <groupId>javax.servlet</groupId>
-                    <artifactId>servlet-api</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>javax.servlet.jsp</groupId>
-                    <artifactId>jsp-api</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-    </dependencies>
-
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-shade-plugin</artifactId>
-                <version>2.3</version>
-
-                <executions>
-                    <execution>
-                        <phase>package</phase>
-                        <goals>
-                            <goal>shade</goal>
-                        </goals>
-                        <configuration>
-                            <minimizeJar>false</minimizeJar>
-                            <shadedArtifactAttached>true</shadedArtifactAttached>
-                            <shadedClassifierName>job</shadedClassifierName>
-                            <filters>
-                                <filter>
-                                    <artifact>*:*</artifact>
-                                    <excludes>
-                                        <exclude>META-INF/*.SF</exclude>
-                                        <exclude>META-INF/*.DSA</exclude>
-                                        <exclude>META-INF/*.RSA</exclude>
-                                    </excludes>
-                                </filter>
-                            </filters>
-                        </configuration>
-                    </execution>
-                </executions>
-            </plugin>
-        </plugins>
-    </build>
-
-    <profiles>
-        <profile>
-            <!-- This profile adds/overrides few features of the 'apache-release'
-                 profile in the parent pom. -->
-            <id>apache-release</id>
-            <build>
-                <plugins>
-                    <!-- Apache-RAT checks for files without headers.
-                         If run on a messy developer's sandbox, it will fail.
-                         This serves as a reminder to only build a release in a clean
-                         sandbox! -->
-                    <plugin>
-                        <groupId>org.apache.rat</groupId>
-                        <artifactId>apache-rat-plugin</artifactId>
-                        <configuration>
-                            <numUnapprovedLicenses>0</numUnapprovedLicenses>
-                            <excludes>
-                                <!-- test data -->
-                                <exclude>src/test/resources/data/test_cal_dt/part-r-00000</exclude>
-                                <exclude>src/test/**/*.json</exclude>
-                            </excludes>
-                        </configuration>
-                        <executions>
-                            <execution>
-                                <phase>verify</phase>
-                                <goals>
-                                    <goal>check</goal>
-                                </goals>
-                            </execution>
-                        </executions>
-                        <dependencies>
-                            <dependency>
-                                <groupId>org.apache.maven.doxia</groupId>
-                                <artifactId>doxia-core</artifactId>
-                                <version>1.6</version>
-                                <exclusions>
-                                    <exclusion>
-                                        <groupId>xerces</groupId>
-                                        <artifactId>xercesImpl</artifactId>
-                                    </exclusion>
-                                </exclusions>
-                            </dependency>
-                        </dependencies>
-                    </plugin>
-                    <plugin>
-                        <groupId>net.ju-n.maven.plugins</groupId>
-                        <artifactId>checksum-maven-plugin</artifactId>
-                        <version>1.2</version>
-                        <executions>
-                            <execution>
-                                <goals>
-                                    <goal>artifacts</goal>
-                                </goals>
-                            </execution>
-                        </executions>
-                        <configuration>
-                            <algorithms>
-                                <algorithm>MD5</algorithm>
-                                <algorithm>SHA-1</algorithm>
-                            </algorithms>
-                            <failOnError>false</failOnError>
-                        </configuration>
-                    </plugin>
-                </plugins>
-            </build>
-        </profile>
-    </profiles>
-
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/main/java/org/apache/kylin/job/hadoop/dict/CreateInvertedIndexDictionaryJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/hadoop/dict/CreateInvertedIndexDictionaryJob.java b/job/src/main/java/org/apache/kylin/job/hadoop/dict/CreateInvertedIndexDictionaryJob.java
deleted file mode 100644
index 87ee70e..0000000
--- a/job/src/main/java/org/apache/kylin/job/hadoop/dict/CreateInvertedIndexDictionaryJob.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.hadoop.dict;
-
-import org.apache.commons.cli.Options;
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.dict.DistinctColumnValuesProvider;
-import org.apache.kylin.engine.mr.DFSFileTable;
-import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.metadata.model.TblColRef;
-import org.apache.kylin.source.ReadableTable;
-
-/**
- */
-public class CreateInvertedIndexDictionaryJob extends AbstractHadoopJob {
-
-    @Override
-    public int run(String[] args) throws Exception {
-        Options options = new Options();
-
-        try {
-            options.addOption(OPTION_II_NAME);
-            options.addOption(OPTION_INPUT_PATH);
-            parseOptions(options, args);
-
-            final String iiname = getOptionValue(OPTION_II_NAME);
-            final String factColumnsInputPath = getOptionValue(OPTION_INPUT_PATH);
-            final KylinConfig config = KylinConfig.getInstanceFromEnv();
-
-            IIManager mgr = IIManager.getInstance(config);
-            IIInstance ii = mgr.getII(iiname);
-
-            mgr.buildInvertedIndexDictionary(ii.getFirstSegment(), new DistinctColumnValuesProvider() {
-                @Override
-                public ReadableTable getDistinctValuesFor(TblColRef col) {
-                    return new DFSFileTable(factColumnsInputPath + "/" + col.getName(), -1);
-                }
-            });
-            return 0;
-        } catch (Exception e) {
-            printUsage(options);
-            throw e;
-        }
-    }
-
-    public static void main(String[] args) throws Exception {
-        int exitCode = ToolRunner.run(new CreateInvertedIndexDictionaryJob(), args);
-        System.exit(exitCode);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIBulkLoadJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIBulkLoadJob.java b/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIBulkLoadJob.java
deleted file mode 100644
index d7b088b..0000000
--- a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIBulkLoadJob.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.hadoop.invertedindex;
-
-import org.apache.commons.cli.Options;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.invertedindex.IISegment;
-import org.apache.kylin.invertedindex.model.IIDesc;
-import org.apache.kylin.metadata.model.SegmentStatusEnum;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
-
-/**
- * @author ysong1
- * 
- */
-public class IIBulkLoadJob extends AbstractHadoopJob {
-
-    @Override
-    public int run(String[] args) throws Exception {
-        Options options = new Options();
-
-        try {
-            options.addOption(OPTION_INPUT_PATH);
-            options.addOption(OPTION_HTABLE_NAME);
-            options.addOption(OPTION_II_NAME);
-            parseOptions(options, args);
-
-            String tableName = getOptionValue(OPTION_HTABLE_NAME);
-            String input = getOptionValue(OPTION_INPUT_PATH);
-            String iiname = getOptionValue(OPTION_II_NAME);
-
-            FileSystem fs = FileSystem.get(getConf());
-            FsPermission permission = new FsPermission((short) 0777);
-            fs.setPermission(new Path(input, IIDesc.HBASE_FAMILY), permission);
-
-            int hbaseExitCode = ToolRunner.run(new LoadIncrementalHFiles(getConf()), new String[] { input, tableName });
-
-            IIManager mgr = IIManager.getInstance(KylinConfig.getInstanceFromEnv());
-            IIInstance ii = mgr.getII(iiname);
-            IISegment seg = ii.getFirstSegment();
-            seg.setStorageLocationIdentifier(tableName);
-            seg.setStatus(SegmentStatusEnum.READY);
-            mgr.updateII(ii);
-
-            return hbaseExitCode;
-
-        } catch (Exception e) {
-            printUsage(options);
-            throw e;
-        }
-    }
-
-    public static void main(String[] args) throws Exception {
-        IIBulkLoadJob job = new IIBulkLoadJob();
-        job.setConf(HBaseConnection.newHBaseConfiguration(KylinConfig.getInstanceFromEnv().getStorageUrl()));
-        int exitCode = ToolRunner.run(job, args);
-        System.exit(exitCode);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHFileJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHFileJob.java b/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHFileJob.java
deleted file mode 100644
index b2282f1..0000000
--- a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHFileJob.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.hadoop.invertedindex;
-
-import org.apache.commons.cli.Options;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * @author yangli9
- * 
- */
-public class IICreateHFileJob extends AbstractHadoopJob {
-
-    protected static final Logger logger = LoggerFactory.getLogger(IICreateHFileJob.class);
-
-    public int run(String[] args) throws Exception {
-        Options options = new Options();
-
-        try {
-            options.addOption(OPTION_JOB_NAME);
-            options.addOption(OPTION_II_NAME);
-            options.addOption(OPTION_INPUT_PATH);
-            options.addOption(OPTION_OUTPUT_PATH);
-            options.addOption(OPTION_HTABLE_NAME);
-            parseOptions(options, args);
-
-            Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
-
-            job = Job.getInstance(getConf(), getOptionValue(OPTION_JOB_NAME));
-
-            setJobClasspath(job);
-
-            addInputDirs(getOptionValue(OPTION_INPUT_PATH), job);
-            FileOutputFormat.setOutputPath(job, output);
-
-            job.setInputFormatClass(SequenceFileInputFormat.class);
-            job.setMapperClass(IICreateHFileMapper.class);
-            job.setMapOutputKeyClass(ImmutableBytesWritable.class);
-            job.setMapOutputValueClass(KeyValue.class);
-
-            String tableName = getOptionValue(OPTION_HTABLE_NAME);
-            HTable htable = new HTable(HBaseConfiguration.create(getConf()), tableName);
-            HFileOutputFormat.configureIncrementalLoad(job, htable);
-
-            this.deletePath(job.getConfiguration(), output);
-
-            return waitForCompletion(job);
-        } catch (Exception e) {
-            printUsage(options);
-            throw e;
-        }
-    }
-
-    public static void main(String[] args) throws Exception {
-        IICreateHFileJob job = new IICreateHFileJob();
-        job.setConf(HBaseConnection.newHBaseConfiguration(KylinConfig.getInstanceFromEnv().getStorageUrl()));
-        int exitCode = ToolRunner.run(job, args);
-        System.exit(exitCode);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHFileMapper.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHFileMapper.java b/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHFileMapper.java
deleted file mode 100644
index 1adf8d6..0000000
--- a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHFileMapper.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.hadoop.invertedindex;
-
-import java.io.IOException;
-
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValue.Type;
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.kylin.engine.mr.KylinMapper;
-import org.apache.kylin.invertedindex.model.IIDesc;
-
-/**
- * @author yangli9
- */
-public class IICreateHFileMapper extends KylinMapper<ImmutableBytesWritable, ImmutableBytesWritable, ImmutableBytesWritable, KeyValue> {
-
-    long timestamp;
-
-    @Override
-    protected void setup(Context context) throws IOException, InterruptedException {
-        super.bindCurrentConfiguration(context.getConfiguration());
-
-        timestamp = System.currentTimeMillis();
-    }
-
-    @Override
-    protected void map(ImmutableBytesWritable key, ImmutableBytesWritable value, Context context) throws IOException, InterruptedException {
-
-        KeyValue kv = new KeyValue(key.get(), key.getOffset(), key.getLength(), //
-                IIDesc.HBASE_FAMILY_BYTES, 0, IIDesc.HBASE_FAMILY_BYTES.length, //
-                IIDesc.HBASE_QUALIFIER_BYTES, 0, IIDesc.HBASE_QUALIFIER_BYTES.length, //
-                timestamp, Type.Put, //
-                value.get(), value.getOffset(), value.getLength());
-
-        context.write(key, kv);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHTableJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHTableJob.java b/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHTableJob.java
deleted file mode 100644
index 22ed3a2..0000000
--- a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IICreateHTableJob.java
+++ /dev/null
@@ -1,156 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.hadoop.invertedindex;
-
-import org.apache.commons.cli.Options;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.io.compress.Compression;
-import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
-import org.apache.hadoop.hbase.security.User;
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.BytesUtil;
-import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.invertedindex.model.IIDesc;
-import org.apache.kylin.invertedindex.model.IIKeyValueCodec;
-import org.apache.kylin.metadata.realization.IRealizationConstants;
-import org.apache.kylin.storage.hbase.steps.HBaseConnection;
-import org.apache.kylin.storage.hbase.util.DeployCoprocessorCLI;
-
-/**
- * @author George Song (ysong1)
- */
-public class IICreateHTableJob extends AbstractHadoopJob {
-
-    @Override
-    public int run(String[] args) throws Exception {
-        Options options = new Options();
-
-        try {
-            options.addOption(OPTION_II_NAME);
-            options.addOption(OPTION_HTABLE_NAME);
-            parseOptions(options, args);
-
-            String tableName = getOptionValue(OPTION_HTABLE_NAME);
-            String iiName = getOptionValue(OPTION_II_NAME);
-
-            KylinConfig config = KylinConfig.getInstanceFromEnv();
-            IIManager iiManager = IIManager.getInstance(config);
-            IIInstance ii = iiManager.getII(iiName);
-            int sharding = ii.getDescriptor().getSharding();
-
-            HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf(tableName));
-            HColumnDescriptor cf = new HColumnDescriptor(IIDesc.HBASE_FAMILY);
-            cf.setMaxVersions(1);
-
-            String hbaseDefaultCC = config.getHbaseDefaultCompressionCodec().toLowerCase();
-
-            switch (hbaseDefaultCC) {
-            case "snappy": {
-                logger.info("hbase will use snappy to compress data");
-                cf.setCompressionType(Compression.Algorithm.SNAPPY);
-                break;
-            }
-            case "lzo": {
-                logger.info("hbase will use lzo to compress data");
-                cf.setCompressionType(Compression.Algorithm.LZO);
-                break;
-            }
-            case "gz":
-            case "gzip": {
-                logger.info("hbase will use gzip to compress data");
-                cf.setCompressionType(Compression.Algorithm.GZ);
-                break;
-            }
-            case "lz4": {
-                logger.info("hbase will use lz4 to compress data");
-                cf.setCompressionType(Compression.Algorithm.LZ4);
-                break;
-            }
-            default: {
-                logger.info("hbase will not user any compression codec to compress data");
-            }
-            }
-
-            cf.setDataBlockEncoding(DataBlockEncoding.FAST_DIFF);
-            tableDesc.addFamily(cf);
-            tableDesc.setValue(IRealizationConstants.HTableTag, config.getMetadataUrlPrefix());
-            tableDesc.setValue(IRealizationConstants.HTableCreationTime, String.valueOf(System.currentTimeMillis()));
-            tableDesc.setValue(HTableDescriptor.SPLIT_POLICY, DisabledRegionSplitPolicy.class.getName());
-
-            Configuration conf = HBaseConfiguration.create(getConf());
-            if (User.isHBaseSecurityEnabled(conf)) {
-                // add coprocessor for bulk load
-                tableDesc.addCoprocessor("org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint");
-            }
-
-            DeployCoprocessorCLI.deployCoprocessor(tableDesc);
-
-            // drop the table first
-            HBaseAdmin admin = new HBaseAdmin(conf);
-            if (admin.tableExists(tableName)) {
-                admin.disableTable(tableName);
-                admin.deleteTable(tableName);
-            }
-
-            // create table
-            byte[][] splitKeys = getSplits(sharding);
-            if (splitKeys.length == 0)
-                splitKeys = null;
-            admin.createTable(tableDesc, splitKeys);
-            if (splitKeys != null) {
-                for (int i = 0; i < splitKeys.length; i++) {
-                    System.out.println("split key " + i + ": " + BytesUtil.toHex(splitKeys[i]));
-                }
-            }
-            System.out.println("create hbase table " + tableName + " done.");
-            admin.close();
-
-            return 0;
-        } catch (Exception e) {
-            printUsage(options);
-            throw e;
-        }
-    }
-
-    //one region for one shard
-    private byte[][] getSplits(int shard) {
-        byte[][] result = new byte[shard - 1][];
-        for (int i = 1; i < shard; ++i) {
-            byte[] split = new byte[IIKeyValueCodec.SHARD_LEN];
-            BytesUtil.writeUnsigned(i, split, 0, IIKeyValueCodec.SHARD_LEN);
-            result[i - 1] = split;
-        }
-        return result;
-    }
-
-    public static void main(String[] args) throws Exception {
-        IICreateHTableJob job = new IICreateHTableJob();
-        job.setConf(HBaseConnection.newHBaseConfiguration(KylinConfig.getInstanceFromEnv().getStorageUrl()));
-        ToolRunner.run(job, args);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsCombiner.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsCombiner.java b/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsCombiner.java
deleted file mode 100644
index 1f4611b..0000000
--- a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsCombiner.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.hadoop.invertedindex;
-
-import java.io.IOException;
-import java.util.HashSet;
-
-import org.apache.hadoop.io.ShortWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.kylin.common.util.ByteArray;
-import org.apache.kylin.common.util.Bytes;
-import org.apache.kylin.engine.mr.KylinReducer;
-
-/**
- * @author yangli9
- */
-public class IIDistinctColumnsCombiner extends KylinReducer<ShortWritable, Text, ShortWritable, Text> {
-
-    private Text outputValue = new Text();
-
-    @Override
-    protected void setup(Context context) throws IOException {
-        super.bindCurrentConfiguration(context.getConfiguration());
-
-    }
-
-    @Override
-    public void reduce(ShortWritable key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
-
-        HashSet<ByteArray> set = new HashSet<ByteArray>();
-        for (Text textValue : values) {
-            ByteArray value = new ByteArray(Bytes.copy(textValue.getBytes(), 0, textValue.getLength()));
-            set.add(value);
-        }
-
-        for (ByteArray value : set) {
-            outputValue.set(value.array(), value.offset(), value.length());
-            context.write(key, outputValue);
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsJob.java b/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsJob.java
deleted file mode 100644
index 042678e..0000000
--- a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsJob.java
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.hadoop.invertedindex;
-
-import java.io.IOException;
-
-import org.apache.commons.cli.Options;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.io.ShortWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.engine.mr.HadoopUtil;
-import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
-import org.apache.kylin.engine.mr.common.BatchConstants;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.invertedindex.model.IIJoinedFlatTableDesc;
-import org.apache.kylin.metadata.model.IntermediateColumnDesc;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * @author yangli9
- */
-public class IIDistinctColumnsJob extends AbstractHadoopJob {
-    protected static final Logger logger = LoggerFactory.getLogger(IIDistinctColumnsJob.class);
-
-    @Override
-    public int run(String[] args) throws Exception {
-        Options options = new Options();
-
-        try {
-            options.addOption(OPTION_JOB_NAME);
-            options.addOption(OPTION_TABLE_NAME);
-            options.addOption(OPTION_II_NAME);
-            options.addOption(OPTION_OUTPUT_PATH);
-            parseOptions(options, args);
-
-            job = Job.getInstance(getConf(), getOptionValue(OPTION_JOB_NAME));
-            String tableName = getOptionValue(OPTION_TABLE_NAME).toUpperCase();
-            String iiName = getOptionValue(OPTION_II_NAME);
-            Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
-
-            // ----------------------------------------------------------------------------
-
-            logger.info("Starting: " + job.getJobName() + " on table " + tableName);
-
-            IIManager iiMgr = IIManager.getInstance(KylinConfig.getInstanceFromEnv());
-            IIInstance ii = iiMgr.getII(iiName);
-            job.getConfiguration().set(BatchConstants.TABLE_NAME, tableName);
-            job.getConfiguration().set(BatchConstants.TABLE_COLUMNS, getColumns(ii));
-
-            setJobClasspath(job);
-
-            setupMapper();
-            setupReducer(output);
-
-            return waitForCompletion(job);
-
-        } catch (Exception e) {
-            printUsage(options);
-            throw e;
-        }
-
-    }
-
-    private String getColumns(IIInstance ii) {
-        IIJoinedFlatTableDesc iiflat = new IIJoinedFlatTableDesc(ii.getDescriptor());
-        StringBuilder buf = new StringBuilder();
-        for (IntermediateColumnDesc col : iiflat.getColumnList()) {
-            if (buf.length() > 0)
-                buf.append(",");
-            buf.append(col.getColumnName());
-        }
-        return buf.toString();
-    }
-
-    private void setupMapper() throws IOException {
-
-        String tableName = job.getConfiguration().get(BatchConstants.TABLE_NAME);
-        String[] dbTableNames = HadoopUtil.parseHiveTableName(tableName);
-
-        logger.info("setting hcat input format, db name {} , table name {}", dbTableNames[0], dbTableNames[1]);
-
-        HCatInputFormat.setInput(job, dbTableNames[0], dbTableNames[1]);
-
-        job.setInputFormatClass(HCatInputFormat.class);
-
-        job.setMapperClass(IIDistinctColumnsMapper.class);
-        job.setCombinerClass(IIDistinctColumnsCombiner.class);
-        job.setMapOutputKeyClass(ShortWritable.class);
-        job.setMapOutputValueClass(Text.class);
-    }
-
-    private void setupReducer(Path output) throws IOException {
-        job.setReducerClass(IIDistinctColumnsReducer.class);
-        job.setOutputFormatClass(SequenceFileOutputFormat.class);
-        job.setOutputKeyClass(NullWritable.class);
-        job.setOutputValueClass(Text.class);
-
-        FileOutputFormat.setOutputPath(job, output);
-        job.getConfiguration().set(BatchConstants.OUTPUT_PATH, output.toString());
-
-        job.setNumReduceTasks(1);
-
-        deletePath(job.getConfiguration(), output);
-    }
-
-    public static void main(String[] args) throws Exception {
-        IIDistinctColumnsJob job = new IIDistinctColumnsJob();
-        int exitCode = ToolRunner.run(job, args);
-        System.exit(exitCode);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsMapper.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsMapper.java b/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsMapper.java
deleted file mode 100644
index 3418a57..0000000
--- a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsMapper.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.hadoop.invertedindex;
-
-import java.io.IOException;
-
-import org.apache.hadoop.io.ShortWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hive.hcatalog.data.HCatRecord;
-import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
-import org.apache.hive.hcatalog.data.schema.HCatSchema;
-import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.kylin.common.util.Bytes;
-import org.apache.kylin.engine.mr.KylinMapper;
-
-/**
- * @author yangli9
- */
-public class IIDistinctColumnsMapper<KEYIN> extends KylinMapper<KEYIN, HCatRecord, ShortWritable, Text> {
-
-    private ShortWritable outputKey = new ShortWritable();
-    private Text outputValue = new Text();
-    private HCatSchema schema = null;
-    private int columnSize = 0;
-
-    @Override
-    protected void setup(Context context) throws IOException {
-        super.bindCurrentConfiguration(context.getConfiguration());
-        schema = HCatInputFormat.getTableSchema(context.getConfiguration());
-        columnSize = schema.getFields().size();
-    }
-
-    @Override
-    public void map(KEYIN key, HCatRecord record, Context context) throws IOException, InterruptedException {
-
-        HCatFieldSchema fieldSchema = null;
-        for (short i = 0; i < columnSize; i++) {
-            outputKey.set(i);
-            fieldSchema = schema.get(i);
-            Object fieldValue = record.get(fieldSchema.getName(), schema);
-            if (fieldValue == null)
-                continue;
-            byte[] bytes = Bytes.toBytes(fieldValue.toString());
-            outputValue.set(bytes, 0, bytes.length);
-            context.write(outputKey, outputValue);
-        }
-
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsReducer.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsReducer.java b/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsReducer.java
deleted file mode 100644
index fcb4dd5..0000000
--- a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIDistinctColumnsReducer.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.hadoop.invertedindex;
-
-import java.io.IOException;
-import java.util.HashSet;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.io.ShortWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.kylin.common.util.ByteArray;
-import org.apache.kylin.common.util.Bytes;
-import org.apache.kylin.engine.mr.KylinReducer;
-import org.apache.kylin.engine.mr.common.BatchConstants;
-
-/**
- * @author yangli9
- */
-public class IIDistinctColumnsReducer extends KylinReducer<ShortWritable, Text, NullWritable, Text> {
-
-    private String[] columns;
-
-    @Override
-    protected void setup(Context context) throws IOException {
-        super.bindCurrentConfiguration(context.getConfiguration());
-
-        Configuration conf = context.getConfiguration();
-        this.columns = conf.get(BatchConstants.TABLE_COLUMNS).split(",");
-    }
-
-    @Override
-    public void reduce(ShortWritable key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
-        String columnName = columns[key.get()];
-
-        HashSet<ByteArray> set = new HashSet<ByteArray>();
-        for (Text textValue : values) {
-            ByteArray value = new ByteArray(Bytes.copy(textValue.getBytes(), 0, textValue.getLength()));
-            set.add(value);
-        }
-
-        Configuration conf = context.getConfiguration();
-        FileSystem fs = FileSystem.get(conf);
-        String outputPath = conf.get(BatchConstants.OUTPUT_PATH);
-        FSDataOutputStream out = fs.create(new Path(outputPath, columnName));
-
-        try {
-            for (ByteArray value : set) {
-                out.write(value.array(), value.offset(), value.length());
-                out.write('\n');
-            }
-        } finally {
-            out.close();
-        }
-
-    }
-
-}


[5/9] incubator-kylin git commit: KYLIN-1010 Decompose project job

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexJob.java b/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexJob.java
deleted file mode 100644
index c9ad448..0000000
--- a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexJob.java
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.hadoop.invertedindex;
-
-import java.io.IOException;
-import java.util.ArrayList;
-
-import org.apache.commons.cli.Options;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.engine.mr.HadoopUtil;
-import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
-import org.apache.kylin.engine.mr.common.BatchConstants;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.invertedindex.IISegment;
-import org.apache.kylin.metadata.MetadataManager;
-import org.apache.kylin.metadata.model.TableDesc;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * @author yangli9
- */
-public class InvertedIndexJob extends AbstractHadoopJob {
-    protected static final Logger logger = LoggerFactory.getLogger(InvertedIndexJob.class);
-
-    @Override
-    public int run(String[] args) throws Exception {
-        Options options = new Options();
-
-        try {
-            options.addOption(OPTION_JOB_NAME);
-            options.addOption(OPTION_II_NAME);
-            options.addOption(OPTION_TABLE_NAME);
-            options.addOption(OPTION_OUTPUT_PATH);
-            parseOptions(options, args);
-
-            job = Job.getInstance(getConf(), getOptionValue(OPTION_JOB_NAME));
-            String iiname = getOptionValue(OPTION_II_NAME);
-            String intermediateTable = getOptionValue(OPTION_TABLE_NAME);
-            Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
-
-            // ----------------------------------------------------------------------------
-
-            System.out.println("Starting: " + job.getJobName());
-
-            IIInstance ii = getII(iiname);
-            short sharding = ii.getDescriptor().getSharding();
-
-            setJobClasspath(job);
-
-            setupMapper(intermediateTable);
-            setupReducer(output, sharding);
-            attachMetadata(ii);
-
-            return waitForCompletion(job);
-
-        } catch (Exception e) {
-            printUsage(options);
-            throw e;
-        } finally {
-            if (job != null)
-                cleanupTempConfFile(job.getConfiguration());
-        }
-
-    }
-
-    private IIInstance getII(String iiName) {
-        IIManager mgr = IIManager.getInstance(KylinConfig.getInstanceFromEnv());
-        IIInstance ii = mgr.getII(iiName);
-        if (ii == null)
-            throw new IllegalArgumentException("No Inverted Index found by name " + iiName);
-        return ii;
-    }
-
-    private void attachMetadata(IIInstance ii) throws IOException {
-
-        Configuration conf = job.getConfiguration();
-        attachKylinPropsAndMetadata(ii, conf);
-
-        IISegment seg = ii.getFirstSegment();
-        conf.set(BatchConstants.CFG_II_NAME, ii.getName());
-        conf.set(BatchConstants.CFG_II_SEGMENT_NAME, seg.getName());
-    }
-
-    protected void attachKylinPropsAndMetadata(IIInstance ii, Configuration conf) throws IOException {
-        MetadataManager metaMgr = MetadataManager.getInstance(KylinConfig.getInstanceFromEnv());
-
-        // write II / model_desc / II_desc / dict / table
-        ArrayList<String> dumpList = new ArrayList<String>();
-        dumpList.add(ii.getResourcePath());
-        dumpList.add(ii.getDescriptor().getModel().getResourcePath());
-        dumpList.add(ii.getDescriptor().getResourcePath());
-
-        for (String tableName : ii.getDescriptor().getModel().getAllTables()) {
-            TableDesc table = metaMgr.getTableDesc(tableName);
-            dumpList.add(table.getResourcePath());
-        }
-        for (IISegment segment : ii.getSegments()) {
-            dumpList.addAll(segment.getDictionaryPaths());
-        }
-
-        attachKylinPropsAndMetadata(dumpList, conf);
-    }
-
-    private void setupMapper(String intermediateTable) throws IOException {
-
-        String[] dbTableNames = HadoopUtil.parseHiveTableName(intermediateTable);
-        HCatInputFormat.setInput(job, dbTableNames[0], dbTableNames[1]);
-
-        job.setInputFormatClass(HCatInputFormat.class);
-
-        job.setMapperClass(InvertedIndexMapper.class);
-        job.setMapOutputKeyClass(LongWritable.class);
-        job.setMapOutputValueClass(ImmutableBytesWritable.class);
-        job.setPartitionerClass(InvertedIndexPartitioner.class);
-    }
-
-    private void setupReducer(Path output, short sharding) throws IOException {
-        job.setReducerClass(InvertedIndexReducer.class);
-        job.setOutputFormatClass(SequenceFileOutputFormat.class);
-        job.setOutputKeyClass(ImmutableBytesWritable.class);
-        job.setOutputValueClass(ImmutableBytesWritable.class);
-
-        job.setNumReduceTasks(sharding);
-
-        FileOutputFormat.setOutputPath(job, output);
-
-        job.getConfiguration().set(BatchConstants.OUTPUT_PATH, output.toString());
-
-        deletePath(job.getConfiguration(), output);
-    }
-
-    public static void main(String[] args) throws Exception {
-        InvertedIndexJob job = new InvertedIndexJob();
-        int exitCode = ToolRunner.run(job, args);
-        System.exit(exitCode);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexMapper.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexMapper.java b/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexMapper.java
deleted file mode 100644
index bc43b65..0000000
--- a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexMapper.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.hadoop.invertedindex;
-
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hive.hcatalog.data.HCatRecord;
-import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
-import org.apache.hive.hcatalog.data.schema.HCatSchema;
-import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.engine.mr.KylinMapper;
-import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
-import org.apache.kylin.engine.mr.common.BatchConstants;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.invertedindex.IISegment;
-import org.apache.kylin.invertedindex.index.TableRecord;
-import org.apache.kylin.invertedindex.index.TableRecordInfo;
-import org.apache.kylin.metadata.model.SegmentStatusEnum;
-
-/**
- * @author yangli9
- */
-public class InvertedIndexMapper<KEYIN> extends KylinMapper<KEYIN, HCatRecord, LongWritable, ImmutableBytesWritable> {
-
-    private TableRecordInfo info;
-    private TableRecord rec;
-
-    private LongWritable outputKey;
-    private ImmutableBytesWritable outputValue;
-    private HCatSchema schema = null;
-    private List<HCatFieldSchema> fields;
-
-    @Override
-    protected void setup(Context context) throws IOException {
-        super.bindCurrentConfiguration(context.getConfiguration());
-
-        Configuration conf = context.getConfiguration();
-
-        KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata();
-        IIManager mgr = IIManager.getInstance(config);
-        IIInstance ii = mgr.getII(conf.get(BatchConstants.CFG_II_NAME));
-        IISegment seg = ii.getSegment(conf.get(BatchConstants.CFG_II_SEGMENT_NAME), SegmentStatusEnum.NEW);
-        this.info = new TableRecordInfo(seg);
-        this.rec = this.info.createTableRecord();
-
-        outputKey = new LongWritable();
-        outputValue = new ImmutableBytesWritable(rec.getBytes());
-
-        schema = HCatInputFormat.getTableSchema(context.getConfiguration());
-
-        fields = schema.getFields();
-    }
-
-    @Override
-    public void map(KEYIN key, HCatRecord record, Context context) throws IOException, InterruptedException {
-
-        rec.reset();
-        for (int i = 0; i < fields.size(); i++) {
-            Object fieldValue = record.get(i);
-            rec.setValueString(i, fieldValue == null ? null : fieldValue.toString());
-        }
-
-        outputKey.set(rec.getTimestamp());
-        // outputValue's backing bytes array is the same as rec
-
-        context.write(outputKey, outputValue);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexPartitioner.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexPartitioner.java b/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexPartitioner.java
deleted file mode 100644
index 396c221..0000000
--- a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexPartitioner.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.hadoop.invertedindex;
-
-import java.io.IOException;
-
-import org.apache.hadoop.conf.Configurable;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.Partitioner;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
-import org.apache.kylin.engine.mr.common.BatchConstants;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.invertedindex.IISegment;
-import org.apache.kylin.invertedindex.index.TableRecord;
-import org.apache.kylin.invertedindex.index.TableRecordInfo;
-import org.apache.kylin.metadata.model.SegmentStatusEnum;
-
-/**
- * @author yangli9
- */
-public class InvertedIndexPartitioner extends Partitioner<LongWritable, ImmutableBytesWritable> implements Configurable {
-
-    private Configuration conf;
-    private TableRecordInfo info;
-    private TableRecord rec;
-
-    @Override
-    public int getPartition(LongWritable key, ImmutableBytesWritable value, int numPartitions) {
-        rec.setBytes(value.get(), value.getOffset(), value.getLength());
-        return rec.getShard();
-    }
-
-    @Override
-    public void setConf(Configuration conf) {
-        this.conf = conf;
-        try {
-            KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata();
-            IIManager mgr = IIManager.getInstance(config);
-            IIInstance ii = mgr.getII(conf.get(BatchConstants.CFG_II_NAME));
-            IISegment seg = ii.getSegment(conf.get(BatchConstants.CFG_II_SEGMENT_NAME), SegmentStatusEnum.NEW);
-            this.info = new TableRecordInfo(seg);
-            this.rec = this.info.createTableRecord();
-        } catch (IOException e) {
-            throw new RuntimeException("", e);
-        }
-    }
-
-    @Override
-    public Configuration getConf() {
-        return conf;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexReducer.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexReducer.java b/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexReducer.java
deleted file mode 100644
index 5a69eec..0000000
--- a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/InvertedIndexReducer.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.hadoop.invertedindex;
-
-import java.io.IOException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.engine.mr.KylinReducer;
-import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
-import org.apache.kylin.engine.mr.common.BatchConstants;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.invertedindex.IISegment;
-import org.apache.kylin.invertedindex.index.IncrementalSliceMaker;
-import org.apache.kylin.invertedindex.index.Slice;
-import org.apache.kylin.invertedindex.index.TableRecord;
-import org.apache.kylin.invertedindex.index.TableRecordInfo;
-import org.apache.kylin.invertedindex.model.IIKeyValueCodec;
-import org.apache.kylin.invertedindex.model.IIRow;
-import org.apache.kylin.metadata.model.SegmentStatusEnum;
-
-/**
- * @author yangli9
- */
-public class InvertedIndexReducer extends KylinReducer<LongWritable, ImmutableBytesWritable, ImmutableBytesWritable, ImmutableBytesWritable> {
-
-    private TableRecordInfo info;
-    private TableRecord rec;
-    private IncrementalSliceMaker builder;
-    private IIKeyValueCodec kv;
-
-    @Override
-    protected void setup(Context context) throws IOException {
-        super.bindCurrentConfiguration(context.getConfiguration());
-
-        Configuration conf = context.getConfiguration();
-        KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata();
-        IIManager mgr = IIManager.getInstance(config);
-        IIInstance ii = mgr.getII(conf.get(BatchConstants.CFG_II_NAME));
-        IISegment seg = ii.getSegment(conf.get(BatchConstants.CFG_II_SEGMENT_NAME), SegmentStatusEnum.NEW);
-        info = new TableRecordInfo(seg);
-        rec = info.createTableRecord();
-        builder = null;
-        kv = new IIKeyValueCodec(info.getDigest());
-    }
-
-    @Override
-    public void reduce(LongWritable key, Iterable<ImmutableBytesWritable> values, Context context) //
-            throws IOException, InterruptedException {
-        for (ImmutableBytesWritable v : values) {
-            rec.setBytes(v.get(), v.getOffset(), v.getLength());
-
-            if (builder == null) {
-                builder = new IncrementalSliceMaker(info, rec.getShard());
-            }
-
-            //TODO: to delete this log
-            System.out.println(rec.getShard() + " - " + rec);
-
-            Slice slice = builder.append(rec);
-            if (slice != null) {
-                output(slice, context);
-            }
-        }
-    }
-
-    @Override
-    protected void cleanup(Context context) throws IOException, InterruptedException {
-        Slice slice = builder.close();
-        if (slice != null) {
-            output(slice, context);
-        }
-    }
-
-    private void output(Slice slice, Context context) throws IOException, InterruptedException {
-        for (IIRow pair : kv.encodeKeyValue(slice)) {
-            context.write(pair.getKey(), pair.getValue());
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/main/java/org/apache/kylin/job/invertedindex/IIJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/invertedindex/IIJob.java b/job/src/main/java/org/apache/kylin/job/invertedindex/IIJob.java
deleted file mode 100644
index 0af846b..0000000
--- a/job/src/main/java/org/apache/kylin/job/invertedindex/IIJob.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.invertedindex;
-
-import org.apache.kylin.job.execution.DefaultChainedExecutable;
-
-/**
- */
-public class IIJob extends DefaultChainedExecutable {
-
-    public IIJob() {
-        super();
-    }
-
-    private static final String II_INSTANCE_NAME = "iiName";
-    private static final String SEGMENT_ID = "segmentId";
-
-    void setIIName(String name) {
-        setParam(II_INSTANCE_NAME, name);
-    }
-
-    public String getIIName() {
-        return getParam(II_INSTANCE_NAME);
-    }
-
-    void setSegmentId(String segmentId) {
-        setParam(SEGMENT_ID, segmentId);
-    }
-
-    public String getSegmentId() {
-        return getParam(SEGMENT_ID);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/main/java/org/apache/kylin/job/invertedindex/IIJobBuilder.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/invertedindex/IIJobBuilder.java b/job/src/main/java/org/apache/kylin/job/invertedindex/IIJobBuilder.java
deleted file mode 100644
index 4bd06c5..0000000
--- a/job/src/main/java/org/apache/kylin/job/invertedindex/IIJobBuilder.java
+++ /dev/null
@@ -1,230 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.invertedindex;
-
-import java.io.IOException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.TimeZone;
-
-import org.apache.kylin.engine.mr.common.HadoopShellExecutable;
-import org.apache.kylin.engine.mr.common.MapReduceExecutable;
-import org.apache.kylin.invertedindex.IISegment;
-import org.apache.kylin.invertedindex.model.IIJoinedFlatTableDesc;
-import org.apache.kylin.job.constant.ExecutableConstants;
-import org.apache.kylin.job.engine.JobEngineConfig;
-import org.apache.kylin.job.execution.AbstractExecutable;
-import org.apache.kylin.job.hadoop.dict.CreateInvertedIndexDictionaryJob;
-import org.apache.kylin.job.hadoop.invertedindex.IIBulkLoadJob;
-import org.apache.kylin.job.hadoop.invertedindex.IICreateHFileJob;
-import org.apache.kylin.job.hadoop.invertedindex.IICreateHTableJob;
-import org.apache.kylin.job.hadoop.invertedindex.IIDistinctColumnsJob;
-import org.apache.kylin.job.hadoop.invertedindex.InvertedIndexJob;
-import org.apache.kylin.metadata.model.DataModelDesc.RealizationCapacity;
-import org.apache.kylin.source.hive.HiveMRInput.BatchCubingInputSide;
-
-import com.google.common.base.Preconditions;
-
-/**
- */
-public final class IIJobBuilder {
-
-    final JobEngineConfig engineConfig;
-
-    public IIJobBuilder(JobEngineConfig engineConfig) {
-        this.engineConfig = engineConfig;
-    }
-
-    public IIJob buildJob(IISegment seg, String submitter) {
-        checkPreconditions(seg);
-
-        IIJob result = initialJob(seg, "BUILD", submitter);
-        final String jobId = result.getId();
-        final IIJoinedFlatTableDesc intermediateTableDesc = new IIJoinedFlatTableDesc(seg.getIIDesc());
-        final String intermediateTableIdentity = getIntermediateTableIdentity(intermediateTableDesc);
-        final String factDistinctColumnsPath = getIIDistinctColumnsPath(seg, jobId);
-        final String iiRootPath = getJobWorkingDir(jobId) + "/" + seg.getIIInstance().getName() + "/";
-        final String iiPath = iiRootPath + "*";
-
-        final AbstractExecutable intermediateHiveTableStep = createFlatHiveTableStep(intermediateTableDesc, jobId);
-        result.addTask(intermediateHiveTableStep);
-
-        result.addTask(createFactDistinctColumnsStep(seg, intermediateTableIdentity, jobId, factDistinctColumnsPath));
-
-        result.addTask(createBuildDictionaryStep(seg, factDistinctColumnsPath));
-
-        result.addTask(createInvertedIndexStep(seg, intermediateTableIdentity, iiRootPath));
-
-        // create htable step
-        result.addTask(createCreateHTableStep(seg));
-
-        // generate hfiles step
-        result.addTask(createConvertToHfileStep(seg, iiPath, jobId));
-
-        // bulk load step
-        result.addTask(createBulkLoadStep(seg, jobId));
-
-        return result;
-    }
-
-    private AbstractExecutable createFlatHiveTableStep(IIJoinedFlatTableDesc intermediateTableDesc, String jobId) {
-        return BatchCubingInputSide.createFlatHiveTableStep(engineConfig, intermediateTableDesc, jobId);
-    }
-
-    private IIJob initialJob(IISegment seg, String type, String submitter) {
-        IIJob result = new IIJob();
-        SimpleDateFormat format = new SimpleDateFormat("z yyyy-MM-dd HH:mm:ss");
-        format.setTimeZone(TimeZone.getTimeZone(engineConfig.getTimeZone()));
-        result.setIIName(seg.getIIInstance().getName());
-        result.setSegmentId(seg.getUuid());
-        result.setName(seg.getIIInstance().getName() + " - " + seg.getName() + " - " + type + " - " + format.format(new Date(System.currentTimeMillis())));
-        result.setSubmitter(submitter);
-        return result;
-    }
-
-    private void checkPreconditions(IISegment seg) {
-        Preconditions.checkNotNull(seg, "segment cannot be null");
-        Preconditions.checkNotNull(engineConfig, "jobEngineConfig cannot be null");
-    }
-
-    private void appendMapReduceParameters(StringBuilder builder, JobEngineConfig engineConfig) {
-        try {
-            String jobConf = engineConfig.getHadoopJobConfFilePath(RealizationCapacity.MEDIUM);
-            if (jobConf != null && jobConf.length() > 0) {
-                builder.append(" -conf ").append(jobConf);
-            }
-        } catch (IOException e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    private String getIIDistinctColumnsPath(IISegment seg, String jobUuid) {
-        return getJobWorkingDir(jobUuid) + "/" + seg.getIIInstance().getName() + "/ii_distinct_columns";
-    }
-
-    private String getHFilePath(IISegment seg, String jobId) {
-        return getJobWorkingDir(jobId) + "/" + seg.getIIInstance().getName() + "/hfile/";
-    }
-
-    private MapReduceExecutable createFactDistinctColumnsStep(IISegment seg, String factTableName, String jobId, String output) {
-        MapReduceExecutable result = new MapReduceExecutable();
-        result.setName(ExecutableConstants.STEP_NAME_FACT_DISTINCT_COLUMNS);
-        result.setMapReduceJobClass(IIDistinctColumnsJob.class);
-        StringBuilder cmd = new StringBuilder();
-        appendMapReduceParameters(cmd, engineConfig);
-        appendExecCmdParameters(cmd, "tablename", factTableName);
-        appendExecCmdParameters(cmd, "iiname", seg.getIIInstance().getName());
-        appendExecCmdParameters(cmd, "output", output);
-        appendExecCmdParameters(cmd, "jobname", "Kylin_Fact_Distinct_Columns_" + seg.getIIInstance().getName() + "_Step");
-
-        result.setMapReduceParams(cmd.toString());
-        return result;
-    }
-
-    private HadoopShellExecutable createBuildDictionaryStep(IISegment seg, String factDistinctColumnsPath) {
-        // base cuboid job
-        HadoopShellExecutable buildDictionaryStep = new HadoopShellExecutable();
-        buildDictionaryStep.setName(ExecutableConstants.STEP_NAME_BUILD_DICTIONARY);
-        StringBuilder cmd = new StringBuilder();
-        appendExecCmdParameters(cmd, "iiname", seg.getIIInstance().getName());
-        appendExecCmdParameters(cmd, "input", factDistinctColumnsPath);
-
-        buildDictionaryStep.setJobParams(cmd.toString());
-        buildDictionaryStep.setJobClass(CreateInvertedIndexDictionaryJob.class);
-        return buildDictionaryStep;
-    }
-
-    private MapReduceExecutable createInvertedIndexStep(IISegment seg, String intermediateHiveTable, String iiOutputTempPath) {
-        // base cuboid job
-        MapReduceExecutable buildIIStep = new MapReduceExecutable();
-
-        StringBuilder cmd = new StringBuilder();
-        appendMapReduceParameters(cmd, engineConfig);
-
-        buildIIStep.setName(ExecutableConstants.STEP_NAME_BUILD_II);
-
-        appendExecCmdParameters(cmd, "iiname", seg.getIIInstance().getName());
-        appendExecCmdParameters(cmd, "tablename", intermediateHiveTable);
-        appendExecCmdParameters(cmd, "output", iiOutputTempPath);
-        appendExecCmdParameters(cmd, "jobname", ExecutableConstants.STEP_NAME_BUILD_II);
-
-        buildIIStep.setMapReduceParams(cmd.toString());
-        buildIIStep.setMapReduceJobClass(InvertedIndexJob.class);
-        return buildIIStep;
-    }
-
-    private HadoopShellExecutable createCreateHTableStep(IISegment seg) {
-        HadoopShellExecutable createHtableStep = new HadoopShellExecutable();
-        createHtableStep.setName(ExecutableConstants.STEP_NAME_CREATE_HBASE_TABLE);
-        StringBuilder cmd = new StringBuilder();
-        appendExecCmdParameters(cmd, "iiname", seg.getIIInstance().getName());
-        appendExecCmdParameters(cmd, "htablename", seg.getStorageLocationIdentifier());
-
-        createHtableStep.setJobParams(cmd.toString());
-        createHtableStep.setJobClass(IICreateHTableJob.class);
-
-        return createHtableStep;
-    }
-
-    private MapReduceExecutable createConvertToHfileStep(IISegment seg, String inputPath, String jobId) {
-        MapReduceExecutable createHFilesStep = new MapReduceExecutable();
-        createHFilesStep.setName(ExecutableConstants.STEP_NAME_CONVERT_II_TO_HFILE);
-        StringBuilder cmd = new StringBuilder();
-
-        appendMapReduceParameters(cmd, engineConfig);
-        appendExecCmdParameters(cmd, "iiname", seg.getIIInstance().getName());
-        appendExecCmdParameters(cmd, "input", inputPath);
-        appendExecCmdParameters(cmd, "output", getHFilePath(seg, jobId));
-        appendExecCmdParameters(cmd, "htablename", seg.getStorageLocationIdentifier());
-        appendExecCmdParameters(cmd, "jobname", "Kylin_HFile_Generator_" + seg.getIIInstance().getName() + "_Step");
-
-        createHFilesStep.setMapReduceParams(cmd.toString());
-        createHFilesStep.setMapReduceJobClass(IICreateHFileJob.class);
-
-        return createHFilesStep;
-    }
-
-    private HadoopShellExecutable createBulkLoadStep(IISegment seg, String jobId) {
-        HadoopShellExecutable bulkLoadStep = new HadoopShellExecutable();
-        bulkLoadStep.setName(ExecutableConstants.STEP_NAME_BULK_LOAD_HFILE);
-
-        StringBuilder cmd = new StringBuilder();
-        appendExecCmdParameters(cmd, "input", getHFilePath(seg, jobId));
-        appendExecCmdParameters(cmd, "htablename", seg.getStorageLocationIdentifier());
-        appendExecCmdParameters(cmd, "iiname", seg.getIIInstance().getName());
-
-        bulkLoadStep.setJobParams(cmd.toString());
-        bulkLoadStep.setJobClass(IIBulkLoadJob.class);
-
-        return bulkLoadStep;
-
-    }
-
-    private StringBuilder appendExecCmdParameters(StringBuilder buf, String paraName, String paraValue) {
-        return buf.append(" -").append(paraName).append(" ").append(paraValue);
-    }
-
-    private String getJobWorkingDir(String uuid) {
-        return engineConfig.getHdfsWorkingDirectory() + "kylin-" + uuid;
-    }
-
-    private String getIntermediateTableIdentity(IIJoinedFlatTableDesc intermediateTableDesc) {
-        return engineConfig.getConfig().getHiveDatabaseForIntermediateTable() + "." + intermediateTableDesc.getTableName();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/main/java/org/apache/kylin/job/tools/IICLI.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/tools/IICLI.java b/job/src/main/java/org/apache/kylin/job/tools/IICLI.java
deleted file mode 100644
index 8c39aa1..0000000
--- a/job/src/main/java/org/apache/kylin/job/tools/IICLI.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job.tools;
-
-import java.io.IOException;
-import java.util.Iterator;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.SequenceFile.Reader;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.engine.mr.HadoopUtil;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.invertedindex.index.RawTableRecord;
-import org.apache.kylin.invertedindex.index.Slice;
-import org.apache.kylin.invertedindex.index.TableRecord;
-import org.apache.kylin.invertedindex.index.TableRecordInfo;
-import org.apache.kylin.invertedindex.model.IIKeyValueCodec;
-import org.apache.kylin.invertedindex.model.IIRow;
-
-/**
- * @author yangli9
- */
-public class IICLI {
-
-    public static void main(String[] args) throws IOException {
-        Configuration hconf = HadoopUtil.getCurrentConfiguration();
-        IIManager mgr = IIManager.getInstance(KylinConfig.getInstanceFromEnv());
-
-        String iiName = args[0];
-        IIInstance ii = mgr.getII(iiName);
-
-        String path = args[1];
-        System.out.println("Reading from " + path + " ...");
-
-        TableRecordInfo info = new TableRecordInfo(ii.getFirstSegment());
-        IIKeyValueCodec codec = new IIKeyValueCodec(info.getDigest());
-        int count = 0;
-        for (Slice slice : codec.decodeKeyValue(readSequenceKVs(hconf, path))) {
-            for (RawTableRecord rec : slice) {
-                System.out.printf(new TableRecord(rec, info).toString());
-                count++;
-            }
-        }
-        System.out.println("Total " + count + " records");
-    }
-
-    public static Iterable<IIRow> readSequenceKVs(Configuration hconf, String path) throws IOException {
-        final Reader reader = new Reader(hconf, SequenceFile.Reader.file(new Path(path)));
-        return new Iterable<IIRow>() {
-            @Override
-            public Iterator<IIRow> iterator() {
-                return new Iterator<IIRow>() {
-                    ImmutableBytesWritable k = new ImmutableBytesWritable();
-                    ImmutableBytesWritable v = new ImmutableBytesWritable();
-                    IIRow pair = new IIRow(k, v, null);
-
-                    @Override
-                    public boolean hasNext() {
-                        boolean hasNext = false;
-                        try {
-                            hasNext = reader.next(k, v);
-                        } catch (IOException e) {
-                            throw new RuntimeException(e);
-                        } finally {
-                            if (hasNext == false) {
-                                IOUtils.closeQuietly(reader);
-                            }
-                        }
-                        return hasNext;
-                    }
-
-                    @Override
-                    public IIRow next() {
-                        return pair;
-                    }
-
-                    @Override
-                    public void remove() {
-                        throw new UnsupportedOperationException();
-                    }
-                };
-            }
-        };
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/job/BuildCubeWithEngineTest.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/job/BuildCubeWithEngineTest.java b/job/src/test/java/org/apache/kylin/job/BuildCubeWithEngineTest.java
deleted file mode 100644
index d7eb3cf..0000000
--- a/job/src/test/java/org/apache/kylin/job/BuildCubeWithEngineTest.java
+++ /dev/null
@@ -1,283 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.File;
-import java.lang.reflect.Method;
-import java.text.SimpleDateFormat;
-import java.util.List;
-import java.util.TimeZone;
-import java.util.concurrent.Callable;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.AbstractKylinTestCase;
-import org.apache.kylin.common.util.ClassUtil;
-import org.apache.kylin.cube.CubeInstance;
-import org.apache.kylin.cube.CubeManager;
-import org.apache.kylin.cube.CubeSegment;
-import org.apache.kylin.cube.CubeUpdate;
-import org.apache.kylin.engine.EngineFactory;
-import org.apache.kylin.engine.mr.CubingJob;
-import org.apache.kylin.job.engine.JobEngineConfig;
-import org.apache.kylin.job.execution.AbstractExecutable;
-import org.apache.kylin.job.execution.DefaultChainedExecutable;
-import org.apache.kylin.job.execution.ExecutableState;
-import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
-import org.apache.kylin.job.manager.ExecutableManager;
-import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
-import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-import com.google.common.collect.Lists;
-
-public class BuildCubeWithEngineTest {
-
-    private CubeManager cubeManager;
-    private DefaultScheduler scheduler;
-    protected ExecutableManager jobService;
-
-    private static final Log logger = LogFactory.getLog(BuildCubeWithEngineTest.class);
-
-    protected void waitForJob(String jobId) {
-        while (true) {
-            AbstractExecutable job = jobService.getJob(jobId);
-            if (job.getStatus() == ExecutableState.SUCCEED || job.getStatus() == ExecutableState.ERROR) {
-                break;
-            } else {
-                try {
-                    Thread.sleep(5000);
-                } catch (InterruptedException e) {
-                    e.printStackTrace();
-                }
-            }
-        }
-    }
-
-    @BeforeClass
-    public static void beforeClass() throws Exception {
-        logger.info("Adding to classpath: " + new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
-        ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
-        System.setProperty(KylinConfig.KYLIN_CONF, "../examples/test_case_data/sandbox");
-        System.setProperty("hdp.version", "2.2.4.2-2"); // mapred-site.xml ref this
-    }
-
-    @Before
-    public void before() throws Exception {
-        HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA);
-
-        DeployUtil.initCliWorkDir();
-        DeployUtil.deployMetadata();
-        DeployUtil.overrideJobJarLocations();
-
-        final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-        jobService = ExecutableManager.getInstance(kylinConfig);
-        scheduler = DefaultScheduler.getInstance();
-        scheduler.init(new JobEngineConfig(kylinConfig), new ZookeeperJobLock());
-        if (!scheduler.hasStarted()) {
-            throw new RuntimeException("scheduler has not been started");
-        }
-        cubeManager = CubeManager.getInstance(kylinConfig);
-        for (String jobId : jobService.getAllJobIds()) {
-            if (jobService.getJob(jobId) instanceof CubingJob) {
-                jobService.deleteJob(jobId);
-            }
-        }
-
-    }
-
-    @After
-    public void after() {
-        HBaseMetadataTestCase.staticCleanupTestMetadata();
-    }
-
-    @Test
-    public void test() throws Exception {
-        DeployUtil.prepareTestDataForNormalCubes("test_kylin_cube_with_slr_left_join_empty");
-        testInner();
-        testLeft();
-    }
-
-    private void testInner() throws Exception {
-        String[] testCase = new String[] { "testInnerJoinCube", "testInnerJoinCube2", };
-        runTestAndAssertSucceed(testCase);
-    }
-
-    private void testLeft() throws Exception {
-        String[] testCase = new String[] { "testLeftJoinCube", "testLeftJoinCube2", };
-        runTestAndAssertSucceed(testCase);
-    }
-
-    private void runTestAndAssertSucceed(String[] testCase) throws Exception {
-        ExecutorService executorService = Executors.newFixedThreadPool(testCase.length);
-        final CountDownLatch countDownLatch = new CountDownLatch(testCase.length);
-        List<Future<List<String>>> tasks = Lists.newArrayListWithExpectedSize(testCase.length);
-        for (int i = 0; i < testCase.length; i++) {
-            tasks.add(executorService.submit(new TestCallable(testCase[i], countDownLatch)));
-        }
-        countDownLatch.await();
-        try {
-            for (int i = 0; i < tasks.size(); ++i) {
-                Future<List<String>> task = tasks.get(i);
-                final List<String> jobIds = task.get();
-                for (String jobId : jobIds) {
-                    assertJobSucceed(jobId);
-                }
-            }
-        } catch (Exception ex) {
-            logger.error(ex);
-            throw ex;
-        }
-    }
-
-    private void assertJobSucceed(String jobId) {
-        assertEquals("The job '" + jobId + "' is failed.", ExecutableState.SUCCEED, jobService.getOutput(jobId).getState());
-    }
-
-    private class TestCallable implements Callable<List<String>> {
-
-        private final String methodName;
-        private final CountDownLatch countDownLatch;
-
-        public TestCallable(String methodName, CountDownLatch countDownLatch) {
-            this.methodName = methodName;
-            this.countDownLatch = countDownLatch;
-        }
-
-        @SuppressWarnings("unchecked")
-        @Override
-        public List<String> call() throws Exception {
-            try {
-                final Method method = BuildCubeWithEngineTest.class.getDeclaredMethod(methodName);
-                method.setAccessible(true);
-                return (List<String>) method.invoke(BuildCubeWithEngineTest.this);
-            } finally {
-                countDownLatch.countDown();
-            }
-        }
-    }
-
-    @SuppressWarnings("unused")
-    // called by reflection
-    private List<String> testInnerJoinCube2() throws Exception {
-        clearSegment("test_kylin_cube_with_slr_empty");
-        SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
-        f.setTimeZone(TimeZone.getTimeZone("GMT"));
-        long date1 = 0;
-        long date2 = f.parse("2013-01-01").getTime();
-        long date3 = f.parse("2022-01-01").getTime();
-        List<String> result = Lists.newArrayList();
-        result.add(buildSegment("test_kylin_cube_with_slr_empty", date1, date2));
-        result.add(buildSegment("test_kylin_cube_with_slr_empty", date2, date3));
-        return result;
-    }
-
-    @SuppressWarnings("unused")
-    // called by reflection
-    private List<String> testInnerJoinCube() throws Exception {
-        clearSegment("test_kylin_cube_without_slr_empty");
-
-        SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
-        f.setTimeZone(TimeZone.getTimeZone("GMT"));
-
-        // this cube's start date is 0, end date is 20501112000000
-        long date1 = 0;
-        long date2 = f.parse("2050-01-11").getTime();
-
-        // this cube doesn't support incremental build, always do full build
-
-        List<String> result = Lists.newArrayList();
-        result.add(buildSegment("test_kylin_cube_without_slr_empty", date1, date2));
-        return result;
-    }
-
-    @SuppressWarnings("unused")
-    // called by reflection
-    private List<String> testLeftJoinCube2() throws Exception {
-        SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
-        f.setTimeZone(TimeZone.getTimeZone("GMT"));
-        List<String> result = Lists.newArrayList();
-        final String cubeName = "test_kylin_cube_without_slr_left_join_empty";
-        // this cube's start date is 0, end date is 20120601000000
-        long dateStart = cubeManager.getCube(cubeName).getDescriptor().getModel().getPartitionDesc().getPartitionDateStart();
-        long dateEnd = f.parse("2012-06-01").getTime();
-
-        clearSegment(cubeName);
-        result.add(buildSegment(cubeName, dateStart, dateEnd));
-
-        // then submit an append job, start date is 20120601000000, end
-        // date is 20220101000000
-        dateStart = f.parse("2012-06-01").getTime();
-        dateEnd = f.parse("2022-01-01").getTime();
-        result.add(buildSegment(cubeName, dateStart, dateEnd));
-
-        // build an empty segment which doesn't have data
-        dateStart = f.parse("2022-01-01").getTime();
-        dateEnd = f.parse("2023-01-01").getTime();
-        result.add(buildSegment(cubeName, dateStart, dateEnd));
-
-        return result;
-
-    }
-
-    @SuppressWarnings("unused")
-    // called by reflection
-    private List<String> testLeftJoinCube() throws Exception {
-        String cubeName = "test_kylin_cube_with_slr_left_join_empty";
-        clearSegment(cubeName);
-
-        SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
-        f.setTimeZone(TimeZone.getTimeZone("GMT"));
-        long dateStart = cubeManager.getCube(cubeName).getDescriptor().getModel().getPartitionDesc().getPartitionDateStart();
-        long dateEnd = f.parse("2050-11-12").getTime();
-
-        // this cube's start date is 0, end date is 20501112000000
-        List<String> result = Lists.newArrayList();
-        result.add(buildSegment(cubeName, dateStart, dateEnd));
-        return result;
-
-    }
-
-    private void clearSegment(String cubeName) throws Exception {
-        CubeInstance cube = cubeManager.getCube(cubeName);
-        // remove all existing segments
-        CubeUpdate cubeBuilder = new CubeUpdate(cube);
-        cubeBuilder.setToRemoveSegs(cube.getSegments().toArray(new CubeSegment[cube.getSegments().size()]));
-        cubeManager.updateCube(cubeBuilder);
-    }
-
-    private String buildSegment(String cubeName, long startDate, long endDate) throws Exception {
-        CubeSegment segment = cubeManager.appendSegments(cubeManager.getCube(cubeName), endDate);
-        DefaultChainedExecutable job = EngineFactory.createBatchCubingJob(segment, "TEST");
-        jobService.addJob(job);
-        waitForJob(job.getId());
-        return job.getId();
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/job/BuildCubeWithStreamTest.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/job/BuildCubeWithStreamTest.java b/job/src/test/java/org/apache/kylin/job/BuildCubeWithStreamTest.java
deleted file mode 100644
index b02b2f2..0000000
--- a/job/src/test/java/org/apache/kylin/job/BuildCubeWithStreamTest.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- *
- *
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *
- *  contributor license agreements. See the NOTICE file distributed with
- *
- *  this work for additional information regarding copyright ownership.
- *
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *
- *  (the "License"); you may not use this file except in compliance with
- *
- *  the License. You may obtain a copy of the License at
- *
- *
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- *
- *
- *  Unless required by applicable law or agreed to in writing, software
- *
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *
- *  See the License for the specific language governing permissions and
- *
- *  limitations under the License.
- *
- * /
- */
-
-package org.apache.kylin.job;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.UUID;
-
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.AbstractKylinTestCase;
-import org.apache.kylin.common.util.ClassUtil;
-import org.apache.kylin.common.util.DateFormat;
-import org.apache.kylin.job.streaming.BootstrapConfig;
-import org.apache.kylin.job.streaming.StreamingBootstrap;
-import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
-import org.apache.kylin.storage.hbase.util.StorageCleanupJob;
-import org.apache.kylin.streaming.StreamingConfig;
-import org.apache.kylin.streaming.StreamingManager;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- *  for streaming cubing case "test_streaming_table"
- */
-public class BuildCubeWithStreamTest {
-
-    private static final Logger logger = LoggerFactory.getLogger(BuildCubeWithStreamTest.class);
-    private static final String streamingName = "test_streaming_table_cube";
-    private static final long startTime = DateFormat.stringToMillis("2015-01-01 00:00:00");
-    private static final long endTime = DateFormat.stringToMillis("2015-01-03 00:00:00");
-    private static final long batchInterval = 16 * 60 * 60 * 1000;//16 hours
-
-    private KylinConfig kylinConfig;
-
-    @BeforeClass
-    public static void beforeClass() throws Exception {
-        logger.info("Adding to classpath: " + new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
-        ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
-        System.setProperty(KylinConfig.KYLIN_CONF, "../examples/test_case_data/sandbox");
-        System.setProperty("hdp.version", "2.2.4.2-2"); // mapred-site.xml ref this
-    }
-
-    @Before
-    public void before() throws Exception {
-        HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA);
-        DeployUtil.overrideJobJarLocations();
-
-        kylinConfig = KylinConfig.getInstanceFromEnv();
-
-        //Use a random toplic for kafka data stream
-        StreamingConfig streamingConfig = StreamingManager.getInstance(kylinConfig).getStreamingConfig(streamingName);
-        streamingConfig.setTopic(UUID.randomUUID().toString());
-        StreamingManager.getInstance(kylinConfig).saveStreamingConfig(streamingConfig);
-
-        DeployUtil.prepareTestDataForStreamingCube(startTime, endTime, streamingConfig);
-    }
-
-    @AfterClass
-    public static void afterClass() throws Exception {
-        backup();
-        HBaseMetadataTestCase.staticCleanupTestMetadata();
-    }
-
-    private static int cleanupOldStorage() throws Exception {
-        String[] args = { "--delete", "true" };
-        int exitCode = ToolRunner.run(new StorageCleanupJob(), args);
-        return exitCode;
-    }
-
-    private static void backup() throws Exception {
-        int exitCode = cleanupOldStorage();
-        if (exitCode == 0) {
-            exportHBaseData();
-        }
-    }
-
-    private static void exportHBaseData() throws IOException {
-        ExportHBaseData export = new ExportHBaseData();
-        export.exportTables();
-        export.tearDown();
-    }
-
-    @Test
-    public void test() throws Exception {
-        for (long start = startTime; start < endTime; start += batchInterval) {
-            BootstrapConfig bootstrapConfig = new BootstrapConfig();
-            bootstrapConfig.setStart(start);
-            bootstrapConfig.setEnd(start + batchInterval);
-            bootstrapConfig.setOneOff(true);
-            bootstrapConfig.setPartitionId(0);
-            bootstrapConfig.setStreaming(streamingName);
-            StreamingBootstrap.getInstance(KylinConfig.getInstanceFromEnv()).start(bootstrapConfig);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/job/BuildIIWithEngineTest.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/job/BuildIIWithEngineTest.java b/job/src/test/java/org/apache/kylin/job/BuildIIWithEngineTest.java
deleted file mode 100644
index fecb106..0000000
--- a/job/src/test/java/org/apache/kylin/job/BuildIIWithEngineTest.java
+++ /dev/null
@@ -1,250 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.File;
-import java.lang.reflect.Method;
-import java.text.SimpleDateFormat;
-import java.util.List;
-import java.util.TimeZone;
-import java.util.concurrent.Callable;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.AbstractKylinTestCase;
-import org.apache.kylin.common.util.ClassUtil;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.invertedindex.IISegment;
-import org.apache.kylin.job.engine.JobEngineConfig;
-import org.apache.kylin.job.execution.AbstractExecutable;
-import org.apache.kylin.job.execution.ExecutableState;
-import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
-import org.apache.kylin.job.invertedindex.IIJob;
-import org.apache.kylin.job.invertedindex.IIJobBuilder;
-import org.apache.kylin.job.manager.ExecutableManager;
-import org.apache.kylin.metadata.realization.RealizationStatusEnum;
-import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
-import org.apache.kylin.storage.hbase.util.StorageCleanupJob;
-import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
-
-import com.google.common.collect.Lists;
-
-/**
- * @author shaoshi
- */
-public class BuildIIWithEngineTest {
-
-    private JobEngineConfig jobEngineConfig;
-    private IIManager iiManager;
-
-    private DefaultScheduler scheduler;
-    protected ExecutableManager jobService;
-
-    protected static final String[] TEST_II_INSTANCES = new String[] { "test_kylin_ii_inner_join", "test_kylin_ii_left_join" };
-
-    private static final Log logger = LogFactory.getLog(BuildIIWithEngineTest.class);
-
-    protected void waitForJob(String jobId) {
-        while (true) {
-            AbstractExecutable job = jobService.getJob(jobId);
-            if (job.getStatus() == ExecutableState.SUCCEED || job.getStatus() == ExecutableState.ERROR) {
-                break;
-            } else {
-                try {
-                    Thread.sleep(5000);
-                } catch (InterruptedException e) {
-                    e.printStackTrace();
-                }
-            }
-        }
-    }
-
-    @BeforeClass
-    public static void beforeClass() throws Exception {
-        logger.info("Adding to classpath: " + new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
-        ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
-        System.setProperty("hdp.version", "2.2.4.2-2"); // mapred-site.xml ref this
-    }
-
-    @Before
-    public void before() throws Exception {
-        HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA);
-
-        //DeployUtil.initCliWorkDir();
-        //        DeployUtil.deployMetadata();
-        DeployUtil.overrideJobJarLocations();
-
-        final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-        jobService = ExecutableManager.getInstance(kylinConfig);
-        scheduler = DefaultScheduler.getInstance();
-        scheduler.init(new JobEngineConfig(kylinConfig), new ZookeeperJobLock());
-        if (!scheduler.hasStarted()) {
-            throw new RuntimeException("scheduler has not been started");
-        }
-        jobEngineConfig = new JobEngineConfig(kylinConfig);
-        for (String jobId : jobService.getAllJobIds()) {
-            if (jobService.getJob(jobId) instanceof IIJob) {
-                jobService.deleteJob(jobId);
-            }
-        }
-
-        iiManager = IIManager.getInstance(kylinConfig);
-        for (String iiInstance : TEST_II_INSTANCES) {
-
-            IIInstance ii = iiManager.getII(iiInstance);
-            if (ii.getStatus() != RealizationStatusEnum.DISABLED) {
-                ii.setStatus(RealizationStatusEnum.DISABLED);
-                iiManager.updateII(ii);
-            }
-        }
-    }
-
-    @After
-    public void after() throws Exception {
-
-        for (String iiInstance : TEST_II_INSTANCES) {
-            IIInstance ii = iiManager.getII(iiInstance);
-            if (ii.getStatus() != RealizationStatusEnum.READY) {
-                ii.setStatus(RealizationStatusEnum.READY);
-                iiManager.updateII(ii);
-            }
-        }
-    }
-
-    @Test
-    @Ignore
-    public void testBuildII() throws Exception {
-
-        String[] testCase = new String[] { "buildIIInnerJoin", "buildIILeftJoin" };
-        ExecutorService executorService = Executors.newFixedThreadPool(testCase.length);
-        final CountDownLatch countDownLatch = new CountDownLatch(testCase.length);
-        List<Future<List<String>>> tasks = Lists.newArrayListWithExpectedSize(testCase.length);
-        for (int i = 0; i < testCase.length; i++) {
-            tasks.add(executorService.submit(new TestCallable(testCase[i], countDownLatch)));
-        }
-        countDownLatch.await();
-        for (int i = 0; i < tasks.size(); ++i) {
-            Future<List<String>> task = tasks.get(i);
-            final List<String> jobIds = task.get();
-            for (String jobId : jobIds) {
-                assertJobSucceed(jobId);
-            }
-        }
-
-    }
-
-    private void assertJobSucceed(String jobId) {
-        assertEquals(ExecutableState.SUCCEED, jobService.getOutput(jobId).getState());
-    }
-
-    private class TestCallable implements Callable<List<String>> {
-
-        private final String methodName;
-        private final CountDownLatch countDownLatch;
-
-        public TestCallable(String methodName, CountDownLatch countDownLatch) {
-            this.methodName = methodName;
-            this.countDownLatch = countDownLatch;
-        }
-
-        @SuppressWarnings("unchecked")
-        @Override
-        public List<String> call() throws Exception {
-            try {
-                final Method method = BuildIIWithEngineTest.class.getDeclaredMethod(methodName);
-                method.setAccessible(true);
-                return (List<String>) method.invoke(BuildIIWithEngineTest.this);
-            } finally {
-                countDownLatch.countDown();
-            }
-        }
-    }
-
-    protected List<String> buildIIInnerJoin() throws Exception {
-        return buildII(TEST_II_INSTANCES[0]);
-    }
-
-    protected List<String> buildIILeftJoin() throws Exception {
-        return buildII(TEST_II_INSTANCES[1]);
-    }
-
-    protected List<String> buildII(String iiName) throws Exception {
-        clearSegment(iiName);
-
-        SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
-        f.setTimeZone(TimeZone.getTimeZone("GMT"));
-
-        long date1 = 0;
-        long date2 = f.parse("2015-01-01").getTime();
-
-        List<String> result = Lists.newArrayList();
-        result.add(buildSegment(iiName, date1, date2));
-        return result;
-    }
-
-    private void clearSegment(String iiName) throws Exception {
-        IIInstance ii = iiManager.getII(iiName);
-        ii.getSegments().clear();
-        iiManager.updateII(ii);
-    }
-
-    private String buildSegment(String iiName, long startDate, long endDate) throws Exception {
-        IIInstance iiInstance = iiManager.getII(iiName);
-        IISegment segment = iiManager.buildSegment(iiInstance, startDate, endDate);
-        iiInstance.getSegments().add(segment);
-        iiManager.updateII(iiInstance);
-        IIJobBuilder iiJobBuilder = new IIJobBuilder(jobEngineConfig);
-        IIJob job = iiJobBuilder.buildJob(segment, "TEST");
-        jobService.addJob(job);
-        waitForJob(job.getId());
-        return job.getId();
-    }
-
-    private int cleanupOldStorage() throws Exception {
-        String[] args = { "--delete", "true" };
-
-        int exitCode = ToolRunner.run(new StorageCleanupJob(), args);
-        return exitCode;
-    }
-
-    public static void main(String[] args) throws Exception {
-        BuildIIWithEngineTest instance = new BuildIIWithEngineTest();
-
-        BuildIIWithEngineTest.beforeClass();
-        instance.before();
-        instance.testBuildII();
-        instance.after();
-
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/job/BuildIIWithStreamTest.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/job/BuildIIWithStreamTest.java b/job/src/test/java/org/apache/kylin/job/BuildIIWithStreamTest.java
deleted file mode 100644
index 5ca3b29..0000000
--- a/job/src/test/java/org/apache/kylin/job/BuildIIWithStreamTest.java
+++ /dev/null
@@ -1,248 +0,0 @@
-/*
- *
- *
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *
- *  contributor license agreements. See the NOTICE file distributed with
- *
- *  this work for additional information regarding copyright ownership.
- *
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *
- *  (the "License"); you may not use this file except in compliance with
- *
- *  the License. You may obtain a copy of the License at
- *
- *
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- *
- *
- *  Unless required by applicable law or agreed to in writing, software
- *
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *
- *  See the License for the specific language governing permissions and
- *
- *  limitations under the License.
- *
- * /
- */
-
-package org.apache.kylin.job;
-
-import static org.junit.Assert.fail;
-
-import java.io.File;
-import java.io.IOException;
-import java.text.SimpleDateFormat;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-import java.util.TimeZone;
-import java.util.UUID;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-import java.util.concurrent.LinkedBlockingDeque;
-
-import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.AbstractKylinTestCase;
-import org.apache.kylin.common.util.ClassUtil;
-import org.apache.kylin.common.util.DateFormat;
-import org.apache.kylin.engine.mr.JobBuilderSupport;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.invertedindex.IISegment;
-import org.apache.kylin.invertedindex.model.IIDesc;
-import org.apache.kylin.invertedindex.model.IIJoinedFlatTableDesc;
-import org.apache.kylin.job.common.ShellExecutable;
-import org.apache.kylin.job.constant.ExecutableConstants;
-import org.apache.kylin.job.engine.JobEngineConfig;
-import org.apache.kylin.job.hadoop.invertedindex.IICreateHTableJob;
-import org.apache.kylin.metadata.model.TblColRef;
-import org.apache.kylin.metadata.realization.RealizationStatusEnum;
-import org.apache.kylin.source.hive.HiveTableReader;
-import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
-import org.apache.kylin.streaming.StreamBuilder;
-import org.apache.kylin.streaming.StreamMessage;
-import org.apache.kylin.streaming.invertedindex.IIStreamConsumer;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.Lists;
-
-/**
- */
-public class BuildIIWithStreamTest {
-
-    private static final Logger logger = LoggerFactory.getLogger(BuildIIWithStreamTest.class);
-
-    private static final String[] II_NAME = new String[] { "test_kylin_ii_left_join", "test_kylin_ii_inner_join" };
-    private IIManager iiManager;
-    private KylinConfig kylinConfig;
-
-    @BeforeClass
-    public static void beforeClass() throws Exception {
-        logger.info("Adding to classpath: " + new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
-        ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
-        System.setProperty("hdp.version", "2.2.4.2-2"); // mapred-site.xml ref this
-    }
-
-    @Before
-    public void before() throws Exception {
-        HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA);
-        DeployUtil.overrideJobJarLocations();
-
-        kylinConfig = KylinConfig.getInstanceFromEnv();
-        iiManager = IIManager.getInstance(kylinConfig);
-        iiManager = IIManager.getInstance(kylinConfig);
-        for (String iiInstance : II_NAME) {
-
-            IIInstance ii = iiManager.getII(iiInstance);
-            if (ii.getStatus() != RealizationStatusEnum.DISABLED) {
-                ii.setStatus(RealizationStatusEnum.DISABLED);
-                iiManager.updateII(ii);
-            }
-        }
-    }
-
-    private String createIntermediateTable(IIDesc desc, KylinConfig kylinConfig) throws IOException {
-        IIJoinedFlatTableDesc intermediateTableDesc = new IIJoinedFlatTableDesc(desc);
-        JobEngineConfig jobEngineConfig = new JobEngineConfig(kylinConfig);
-        final String uuid = UUID.randomUUID().toString();
-        final String useDatabaseHql = "USE " + kylinConfig.getHiveDatabaseForIntermediateTable() + ";";
-        final String dropTableHql = JoinedFlatTable.generateDropTableStatement(intermediateTableDesc);
-        final String createTableHql = JoinedFlatTable.generateCreateTableStatement(intermediateTableDesc, JobBuilderSupport.getJobWorkingDir(jobEngineConfig, uuid));
-        String insertDataHqls;
-        try {
-            insertDataHqls = JoinedFlatTable.generateInsertDataStatement(intermediateTableDesc, jobEngineConfig);
-        } catch (IOException e1) {
-            e1.printStackTrace();
-            throw new RuntimeException("Failed to generate insert data SQL for intermediate table.");
-        }
-
-        ShellExecutable step = new ShellExecutable();
-        StringBuffer buf = new StringBuffer();
-        buf.append("hive -e \"");
-        buf.append(useDatabaseHql + "\n");
-        buf.append(dropTableHql + "\n");
-        buf.append(createTableHql + "\n");
-        buf.append(insertDataHqls + "\n");
-        buf.append("\"");
-
-        step.setCmd(buf.toString());
-        logger.info(step.getCmd());
-        step.setName(ExecutableConstants.STEP_NAME_CREATE_FLAT_HIVE_TABLE);
-        kylinConfig.getCliCommandExecutor().execute(step.getCmd(), null);
-        return intermediateTableDesc.getTableName();
-    }
-
-    private void clearSegment(String iiName) throws Exception {
-        IIInstance ii = iiManager.getII(iiName);
-        ii.getSegments().clear();
-        iiManager.updateII(ii);
-    }
-
-    private IISegment createSegment(String iiName) throws Exception {
-        clearSegment(iiName);
-        SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
-        f.setTimeZone(TimeZone.getTimeZone("GMT"));
-
-        long date1 = 0;
-        long date2 = f.parse("2015-01-01").getTime();
-        return buildSegment(iiName, date1, date2);
-    }
-
-    private IISegment buildSegment(String iiName, long startDate, long endDate) throws Exception {
-        IIInstance iiInstance = iiManager.getII(iiName);
-        IISegment segment = iiManager.buildSegment(iiInstance, startDate, endDate);
-        iiInstance.getSegments().add(segment);
-        iiManager.updateII(iiInstance);
-        return segment;
-    }
-
-    private void buildII(String iiName) throws Exception {
-        final IIDesc desc = iiManager.getII(iiName).getDescriptor();
-        final String tableName = createIntermediateTable(desc, kylinConfig);
-        logger.info("intermediate table name:" + tableName);
-
-        HiveTableReader reader = new HiveTableReader("default", tableName);
-        final List<TblColRef> tblColRefs = desc.listAllColumns();
-        for (TblColRef tblColRef : tblColRefs) {
-            if (desc.isMetricsCol(tblColRef)) {
-                logger.info("matrix:" + tblColRef.getName());
-            } else {
-                logger.info("measure:" + tblColRef.getName());
-            }
-        }
-        LinkedBlockingDeque<StreamMessage> queue = new LinkedBlockingDeque<StreamMessage>();
-        final IISegment segment = createSegment(iiName);
-        String[] args = new String[] { "-iiname", iiName, "-htablename", segment.getStorageLocationIdentifier() };
-        ToolRunner.run(new IICreateHTableJob(), args);
-
-        ExecutorService executorService = Executors.newSingleThreadExecutor();
-        final StreamBuilder streamBuilder = StreamBuilder.newLimitedSizeStreamBuilder(iiName, queue, new IIStreamConsumer(iiName, segment.getStorageLocationIdentifier(), segment.getIIDesc(), 0), 0, segment.getIIDesc().getSliceSize());
-
-        List<String[]> sorted = getSortedRows(reader, desc.getTimestampColumn());
-        int count = sorted.size();
-        for (String[] row : sorted) {
-            logger.info("another row: " + StringUtils.join(row, ","));
-            queue.put(parse(row));
-        }
-
-        reader.close();
-        logger.info("total record count:" + count + " htable:" + segment.getStorageLocationIdentifier());
-        queue.put(StreamMessage.EOF);
-        final Future<?> future = executorService.submit(streamBuilder);
-        try {
-            future.get();
-        } catch (Exception e) {
-            logger.error("stream build failed", e);
-            fail("stream build failed");
-        }
-
-        logger.info("stream build finished, htable name:" + segment.getStorageLocationIdentifier());
-    }
-
-    @Test
-    public void test() throws Exception {
-        for (String iiName : II_NAME) {
-            buildII(iiName);
-            IIInstance ii = iiManager.getII(iiName);
-            if (ii.getStatus() != RealizationStatusEnum.READY) {
-                ii.setStatus(RealizationStatusEnum.READY);
-                iiManager.updateII(ii);
-            }
-        }
-    }
-
-    private StreamMessage parse(String[] row) {
-        return new StreamMessage(System.currentTimeMillis(), StringUtils.join(row, ",").getBytes());
-    }
-
-    private List<String[]> getSortedRows(HiveTableReader reader, final int tsCol) throws IOException {
-        List<String[]> unsorted = Lists.newArrayList();
-        while (reader.next()) {
-            unsorted.add(reader.getRow());
-        }
-        Collections.sort(unsorted, new Comparator<String[]>() {
-            @Override
-            public int compare(String[] o1, String[] o2) {
-                long t1 = DateFormat.stringToMillis(o1[tsCol]);
-                long t2 = DateFormat.stringToMillis(o2[tsCol]);
-                return Long.compare(t1, t2);
-            }
-        });
-        return unsorted;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/job/DataGenTest.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/job/DataGenTest.java b/job/src/test/java/org/apache/kylin/job/DataGenTest.java
deleted file mode 100644
index 5c01305..0000000
--- a/job/src/test/java/org/apache/kylin/job/DataGenTest.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job;
-
-import static org.junit.Assert.assertTrue;
-
-import org.apache.kylin.common.util.LocalFileMetadataTestCase;
-import org.apache.kylin.job.dataGen.FactTableGenerator;
-import org.apache.kylin.metadata.MetadataManager;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-/**
- *
- */
-public class DataGenTest extends LocalFileMetadataTestCase {
-
-    @Before
-    public void before() throws Exception {
-        this.createTestMetadata();
-        MetadataManager.clearCache();
-    }
-
-    @After
-    public void after() throws Exception {
-        this.cleanupTestMetadata();
-    }
-
-    @Test
-    public void testBasics() throws Exception {
-        String content = FactTableGenerator.generate("test_kylin_cube_with_slr_ready", "10000", "1", null);// default  settings
-        System.out.println(content);
-        assertTrue(content.contains("FP-non GTC"));
-        assertTrue(content.contains("ABIN"));
-
-        DeployUtil.overrideFactTableData(content, "default.test_kylin_fact");
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/6c59e107/job/src/test/java/org/apache/kylin/job/DeployLocalMetaToRemoteTest.java
----------------------------------------------------------------------
diff --git a/job/src/test/java/org/apache/kylin/job/DeployLocalMetaToRemoteTest.java b/job/src/test/java/org/apache/kylin/job/DeployLocalMetaToRemoteTest.java
deleted file mode 100644
index 7f12069..0000000
--- a/job/src/test/java/org/apache/kylin/job/DeployLocalMetaToRemoteTest.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.job;
-
-import java.io.File;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.AbstractKylinTestCase;
-import org.apache.kylin.common.util.ClassUtil;
-import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
-
-/**
- * This test case is ONLY for dev use, it deploys local meta to sandbox
- */
-@Ignore("dev use only")
-public class DeployLocalMetaToRemoteTest {
-
-    private static final Log logger = LogFactory.getLog(DeployLocalMetaToRemoteTest.class);
-
-    @BeforeClass
-    public static void beforeClass() throws Exception {
-        logger.info("Adding to classpath: " + new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
-        ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
-        System.setProperty(KylinConfig.KYLIN_CONF, "../examples/test_case_data/sandbox");
-        System.setProperty("hdp.version", "2.2.4.2-2"); // mapred-site.xml ref this
-    }
-
-    @Before
-    public void before() throws Exception {
-        HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA);
-
-        DeployUtil.initCliWorkDir();
-        DeployUtil.deployMetadata();
-        DeployUtil.overrideJobJarLocations();
-
-    }
-
-    @After
-    public void after() {
-        HBaseMetadataTestCase.staticCleanupTestMetadata();
-    }
-
-    @Test
-    public void test() throws Exception {
-        System.out.println("blank");
-    }
-
-}
\ No newline at end of file