You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by li...@apache.org on 2016/03/10 08:50:04 UTC

[01/50] [abbrv] kylin git commit: KYLIN-1337 sort case insensitive by default

Repository: kylin
Updated Branches:
  refs/heads/master 13e6a434a -> 4e48f2ae6


KYLIN-1337 sort case insensitive by default


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/83bf92ca
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/83bf92ca
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/83bf92ca

Branch: refs/heads/master
Commit: 83bf92ca2ef70ff831859c3b20150ee524b628b9
Parents: c3a9b97
Author: janzhongi <ji...@ebay.com>
Authored: Thu Feb 25 17:52:40 2016 +0800
Committer: janzhongi <ji...@ebay.com>
Committed: Thu Feb 25 17:52:40 2016 +0800

----------------------------------------------------------------------
 webapp/app/js/filters/filter.js | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/83bf92ca/webapp/app/js/filters/filter.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/filters/filter.js b/webapp/app/js/filters/filter.js
index 1eaae93..f18b6f6 100755
--- a/webapp/app/js/filters/filter.js
+++ b/webapp/app/js/filters/filter.js
@@ -54,7 +54,7 @@ KylinApp
           result = 1;
         }
         else {
-          result = attriOfA > attriOfB ? 1 : attriOfA < attriOfB ? -1 : 0;
+          result = attriOfA.toLowerCase() > attriOfB.toLowerCase() ? 1 : attriOfA.toLowerCase() < attriOfB.toLowerCase() ? -1 : 0;
         }
         return reverse ? -result : result;
       });


[32/50] [abbrv] kylin git commit: minor, UI disable modelname edit when edit model

Posted by li...@apache.org.
minor, UI disable modelname edit  when edit model


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/ecd12669
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/ecd12669
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/ecd12669

Branch: refs/heads/master
Commit: ecd12669a7d7f8a2febbc5199c5291ccfbf339d9
Parents: 2f44970
Author: Jason <ji...@163.com>
Authored: Fri Mar 4 17:58:30 2016 +0800
Committer: Jason <ji...@163.com>
Committed: Fri Mar 4 17:59:12 2016 +0800

----------------------------------------------------------------------
 webapp/app/partials/modelDesigner/model_info.html | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/ecd12669/webapp/app/partials/modelDesigner/model_info.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/modelDesigner/model_info.html b/webapp/app/partials/modelDesigner/model_info.html
index a290bfb..bace322 100644
--- a/webapp/app/partials/modelDesigner/model_info.html
+++ b/webapp/app/partials/modelDesigner/model_info.html
@@ -31,7 +31,7 @@
                     <div class="col-xs-12 col-sm-6" ng-class="{'has-error':forms.model_info_form.model_name.$invalid && (forms.model_info_form.model_name.$dirty||forms.model_info_form.$sbumitted)}">
 
                         <!-- edit -->
-                        <input ng-if="state.mode=='edit'" name="model_name" type="text" class="form-control"
+                        <input ng-if="state.mode=='edit'" name="model_name" type="text" class="form-control"  ng-disabled="{{modelMode=='editExistModel'}}"
                                ng-model="modelsManager.selectedModel.name" required
                                placeholder="You can use letters, numbers, and '_'"
                                ng-maxlength=100 ng-pattern="/^\w+$/" />


[29/50] [abbrv] kylin git commit: KYLIN-1469 Find hive dependencies from jvm if not exists in environment variables

Posted by li...@apache.org.
KYLIN-1469 Find hive dependencies from jvm if not exists in environment variables


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/b41c4460
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/b41c4460
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/b41c4460

Branch: refs/heads/master
Commit: b41c44600f801f80aa379494bd3be9bc77d5fa65
Parents: 8a3e0e0
Author: lidongsjtu <li...@apache.org>
Authored: Fri Mar 4 14:02:28 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Fri Mar 4 14:02:28 2016 +0800

----------------------------------------------------------------------
 .../engine/mr/common/AbstractHadoopJob.java     | 22 +++++++++++++++++++-
 examples/test_case_data/sandbox/mapred-site.xml |  2 +-
 2 files changed, 22 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/b41c4460/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
index e4eee96..61983d5 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
@@ -23,7 +23,7 @@ package org.apache.kylin.engine.mr.common;
  *
  */
 
-import static org.apache.hadoop.util.StringUtils.formatTime;
+import static org.apache.hadoop.util.StringUtils.*;
 
 import java.io.File;
 import java.io.IOException;
@@ -49,6 +49,7 @@ import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.util.ClassUtil;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
@@ -217,6 +218,25 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
             if (kylinDependency.length() > 0)
                 kylinDependency.append(",");
             kylinDependency.append(filteredHive);
+        } else {
+
+            logger.info("No hive dependency jars set in the environment, will find them from jvm:");
+
+            try {
+                String hiveExecJarPath = ClassUtil.findContainingJar(Class.forName("org.apache.hadoop.hive.ql.Driver"));
+                kylinDependency.append(hiveExecJarPath).append(",");
+                logger.info("hive-exec jar file: " + hiveExecJarPath);
+
+                String hiveHCatJarPath = ClassUtil.findContainingJar(Class.forName("org.apache.hive.hcatalog.mapreduce.HCatInputFormat"));
+                kylinDependency.append(hiveHCatJarPath).append(",");
+                logger.info("hive-catalog jar file: " + hiveHCatJarPath);
+
+                String hiveMetaStoreJarPath = ClassUtil.findContainingJar(Class.forName("org.apache.hadoop.hive.metastore.api.Table"));
+                kylinDependency.append(hiveMetaStoreJarPath).append(",");
+                logger.info("hive-metastore jar file: " + hiveMetaStoreJarPath);
+            } catch (ClassNotFoundException e) {
+                logger.error("Cannot found hive dependency jars: " + e);
+            }
         }
 
         // for KylinJobMRLibDir

http://git-wip-us.apache.org/repos/asf/kylin/blob/b41c4460/examples/test_case_data/sandbox/mapred-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/mapred-site.xml b/examples/test_case_data/sandbox/mapred-site.xml
index 7b1a4be..18f6feb 100644
--- a/examples/test_case_data/sandbox/mapred-site.xml
+++ b/examples/test_case_data/sandbox/mapred-site.xml
@@ -58,7 +58,7 @@
 
     <property>
         <name>mapreduce.application.classpath</name>
-        <value>/tmp/kylin/*,$HADOOP_CONF_DIR,/usr/hdp/${hdp.version}/hbase/lib/hbase-common.jar,/usr/hdp/current/hive-client/conf/,/usr/hdp/${hdp.version}/hive/lib/hive-metastore.jar,/usr/hdp/${hdp.version}/hive/lib/hive-exec.jar,/usr/hdp/${hdp.version}/hive-hcatalog/share/hcatalog/*,$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:/usr/hdp/${hdp.version}/hadoop/lib/hadoop-lzo-0.6.0.${hdp.version}.jar:/usr/hdp/${hdp.version}/hadoop/lib/snappy-java-1.0.4.1.jar:/etc/hadoop/conf/secure</value>
+        <value>/tmp/kylin/*,$HADOOP_CONF_DIR,/usr/hdp/${hdp.version}/hbase/lib/hbase-common.jar,/usr/hdp/current/hive-client/conf/,$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:/usr/hdp/${hdp.version}/hadoop/lib/hadoop-lzo-0.6.0.${hdp.version}.jar:/usr/hdp/${hdp.version}/hadoop/lib/snappy-java-1.0.4.1.jar:/etc/hadoop/conf/secure</value>
     </property>
 
     <property>


[37/50] [abbrv] kylin git commit: KYLIN-1420 enhance and update test case

Posted by li...@apache.org.
KYLIN-1420 enhance and update test case

Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/250978d8
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/250978d8
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/250978d8

Branch: refs/heads/master
Commit: 250978d887e577582f26fc05036d6a4af82dfd0b
Parents: 3736f72
Author: shaofengshi <sh...@apache.org>
Authored: Mon Feb 15 18:06:18 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Tue Mar 8 12:15:11 2016 +0800

----------------------------------------------------------------------
 .../kylin/gridtable/GTScanRangePlanner.java     | 27 ++++++++------------
 .../kylin/gridtable/DictGridTableTest.java      |  2 +-
 2 files changed, 11 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/250978d8/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java
index d314dde..559a245 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java
@@ -1,17 +1,8 @@
 package org.apache.kylin.gridtable;
 
-import java.util.ArrayList;
-import java.util.BitSet;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
 import org.apache.kylin.common.debug.BackdoorToggles;
 import org.apache.kylin.common.util.ByteArray;
 import org.apache.kylin.common.util.ImmutableBitSet;
@@ -26,9 +17,7 @@ import org.apache.kylin.metadata.model.TblColRef;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
+import java.util.*;
 
 public class GTScanRangePlanner {
 
@@ -108,8 +97,10 @@ public class GTScanRangePlanner {
         for (ColumnRange range : andDimRanges) {
             if (partitionColRef != null && range.column.equals(partitionColRef)) {
                 if (rangeStartEndComparator.comparator.compare(segmentStartAndEnd.getFirst(), range.end) <= 0 //
-                        && rangeStartEndComparator.comparator.compare(range.begin, segmentStartAndEnd.getSecond()) <= 0) {
-                    //segment range is [Closed,Open), but segmentStartAndEnd.getSecond() might be rounded, so use <=. 
+                        && (rangeStartEndComparator.comparator.compare(range.begin, segmentStartAndEnd.getSecond()) < 0 //
+                        || rangeStartEndComparator.comparator.compare(range.begin, segmentStartAndEnd.getSecond()) == 0 //
+                        && (range.op == FilterOperatorEnum.EQ || range.op == FilterOperatorEnum.LTE || range.op == FilterOperatorEnum.GTE || range.op == FilterOperatorEnum.IN))) {
+                    //segment range is [Closed,Open), but segmentStartAndEnd.getSecond() might be rounded, so use <= when has equals in condition. 
                 } else {
                     logger.debug("Pre-check partition col filter failed, partitionColRef {}, segment start {}, segment end {}, range begin {}, range end {}",//
                             new Object[] { partitionColRef, makeReadable(segmentStartAndEnd.getFirst()), makeReadable(segmentStartAndEnd.getSecond()), makeReadable(range.begin), makeReadable(range.end) });
@@ -346,9 +337,11 @@ public class GTScanRangePlanner {
         private ByteArray begin = ByteArray.EMPTY;
         private ByteArray end = ByteArray.EMPTY;
         private Set<ByteArray> valueSet;
+        private FilterOperatorEnum op;
 
         public ColumnRange(TblColRef column, Set<ByteArray> values, FilterOperatorEnum op) {
             this.column = column;
+            this.op = op;
 
             switch (op) {
             case EQ:

http://git-wip-us.apache.org/repos/asf/kylin/blob/250978d8/core-cube/src/test/java/org/apache/kylin/gridtable/DictGridTableTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/gridtable/DictGridTableTest.java b/core-cube/src/test/java/org/apache/kylin/gridtable/DictGridTableTest.java
index df69c17..674aa15 100644
--- a/core-cube/src/test/java/org/apache/kylin/gridtable/DictGridTableTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/gridtable/DictGridTableTest.java
@@ -118,7 +118,7 @@ public class DictGridTableTest {
         {
             LogicalTupleFilter filter = and(timeComp4, ageComp1);
             List<GTScanRange> r = planner.planScanRanges(filter);
-            assertEquals(0, r.size());
+            assertEquals(1, r.size());
         }
         {
             LogicalTupleFilter filter = and(timeComp5, ageComp1);


[36/50] [abbrv] kylin git commit: KYLIN-1420 Query returns empty result on partition column's boundary condition

Posted by li...@apache.org.
KYLIN-1420 Query returns empty result on partition column's boundary condition


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/3736f72c
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/3736f72c
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/3736f72c

Branch: refs/heads/master
Commit: 3736f72cc756af28a39a473efc47bec943ba7fc9
Parents: 3af7d4a
Author: shaofengshi <sh...@apache.org>
Authored: Mon Feb 15 15:52:15 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Tue Mar 8 12:14:18 2016 +0800

----------------------------------------------------------------------
 .../main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/3736f72c/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java
index a72426d..d314dde 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java
@@ -108,8 +108,8 @@ public class GTScanRangePlanner {
         for (ColumnRange range : andDimRanges) {
             if (partitionColRef != null && range.column.equals(partitionColRef)) {
                 if (rangeStartEndComparator.comparator.compare(segmentStartAndEnd.getFirst(), range.end) <= 0 //
-                        && rangeStartEndComparator.comparator.compare(range.begin, segmentStartAndEnd.getSecond()) < 0) {
-                    //segment range is [Closed,Open)
+                        && rangeStartEndComparator.comparator.compare(range.begin, segmentStartAndEnd.getSecond()) <= 0) {
+                    //segment range is [Closed,Open), but segmentStartAndEnd.getSecond() might be rounded, so use <=. 
                 } else {
                     logger.debug("Pre-check partition col filter failed, partitionColRef {}, segment start {}, segment end {}, range begin {}, range end {}",//
                             new Object[] { partitionColRef, makeReadable(segmentStartAndEnd.getFirst()), makeReadable(segmentStartAndEnd.getSecond()), makeReadable(range.begin), makeReadable(range.end) });


[17/50] [abbrv] kylin git commit: KYLIN-1458: Checking the consistency of cube segment host with the environment after cube migration

Posted by li...@apache.org.
KYLIN-1458: Checking the consistency of cube segment host with the environment after cube migration

Signed-off-by: Hongbin Ma <ma...@apache.org>

KYLIN-1458: Add the auto fix if inconsistency exists after cube migration

Signed-off-by: Hongbin Ma <ma...@apache.org>

KYLIN-1458: rename parameters for avoiding misleading

Signed-off-by: Hongbin Ma <ma...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/ca597955
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/ca597955
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/ca597955

Branch: refs/heads/master
Commit: ca597955477650a388f882b968e25499b02a4bf0
Parents: 5474fe4
Author: yangzhong <ya...@ebay.com>
Authored: Mon Feb 29 09:37:06 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Tue Mar 1 13:38:02 2016 +0800

----------------------------------------------------------------------
 .../storage/hbase/util/CubeMigrationCLI.java    |   7 +
 .../hbase/util/CubeMigrationCheckCLI.java       | 195 +++++++++++++++++++
 2 files changed, 202 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/ca597955/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
index caf79b2..09aab48 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
@@ -141,6 +141,8 @@ public class CubeMigrationCLI {
         } else {
             showOpts();
         }
+
+        checkMigrationSuccess(dstConfig, cubeName, true);
     }
 
     public static void moveCube(String srcCfgUri, String dstCfgUri, String cubeName, String projectName, String copyAcl, String purgeAndDisable, String overwriteIfExists, String realExecute) throws IOException, InterruptedException {
@@ -148,6 +150,11 @@ public class CubeMigrationCLI {
         moveCube(KylinConfig.createInstanceFromUri(srcCfgUri), KylinConfig.createInstanceFromUri(dstCfgUri), cubeName, projectName, copyAcl, purgeAndDisable, overwriteIfExists, realExecute);
     }
 
+    public static void checkMigrationSuccess(KylinConfig kylinConfig, String cubeName, Boolean ifFix) throws IOException{
+        CubeMigrationCheckCLI checkCLI = new CubeMigrationCheckCLI(kylinConfig, ifFix);
+        checkCLI.execute(cubeName);
+    }
+
     private static String checkAndGetHbaseUrl() {
         String srcMetadataUrl = srcConfig.getMetadataUrl();
         String dstMetadataUrl = dstConfig.getMetadataUrl();

http://git-wip-us.apache.org/repos/asf/kylin/blob/ca597955/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
new file mode 100644
index 0000000..2762561
--- /dev/null
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
@@ -0,0 +1,195 @@
+package org.apache.kylin.storage.hbase.util;
+
+import com.google.common.collect.Lists;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.OptionsHelper;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.metadata.realization.IRealizationConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * <p/>
+ * This tool serves for the purpose of
+ * checking the "KYLIN_HOST" property to be consistent with the dst's MetadataUrlPrefix
+ * for all of cube segments' corresponding HTables after migrating a cube
+ * <p/>
+ */
+public class CubeMigrationCheckCLI {
+
+    private static final Logger logger = LoggerFactory.getLogger(CubeMigrationCheckCLI.class);
+
+    private static final Option OPTION_FIX = OptionBuilder.withArgName("fix").hasArg().isRequired(false).withDescription("Fix the inconsistent cube segments' HOST").create("fix");
+
+    private static final Option OPTION_DST_CFG_URI = OptionBuilder.withArgName("dstCfgUri").hasArg().isRequired(false).withDescription("The KylinConfig of the cube’s new home").create("dstCfgUri");
+
+    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("The name of cube migrated").create("cube");
+
+
+    private KylinConfig dstCfg;
+    private HBaseAdmin hbaseAdmin;
+
+    private List<String> issueExistHTables;
+    private List<String> inconsistentHTables;
+    
+    private boolean ifFix = false;
+
+    public static void main(String[] args) throws ParseException, IOException {
+
+        OptionsHelper optionsHelper = new OptionsHelper();
+
+        Options options = new Options();
+        options.addOption(OPTION_FIX);
+        options.addOption(OPTION_DST_CFG_URI);
+        options.addOption(OPTION_CUBE);
+
+        boolean ifFix = false;
+        String dstCfgUri;
+        String cubeName;
+        logger.info("jobs args: " + Arrays.toString(args));
+        try {
+
+            optionsHelper.parseOptions(options, args);
+
+            logger.info("options: '" + options.toString() + "'");
+            logger.info("option value 'fix': '" + optionsHelper.getOptionValue(OPTION_FIX) + "'");
+            ifFix = Boolean.parseBoolean(optionsHelper.getOptionValue(OPTION_FIX));
+
+            logger.info("option value 'dstCfgUri': '" + optionsHelper.getOptionValue(OPTION_DST_CFG_URI) + "'");
+            dstCfgUri = optionsHelper.getOptionValue(OPTION_DST_CFG_URI);
+
+            logger.info("option value 'cube': '" + optionsHelper.getOptionValue(OPTION_CUBE) + "'");
+            cubeName = optionsHelper.getOptionValue(OPTION_CUBE);
+
+        } catch (ParseException e) {
+            optionsHelper.printUsage(CubeMigrationCheckCLI.class.getName(), options);
+            throw e;
+        }
+
+        KylinConfig kylinConfig;
+        if(dstCfgUri==null){
+            kylinConfig = KylinConfig.getInstanceFromEnv();
+        }else{
+            kylinConfig = KylinConfig.createInstanceFromUri(dstCfgUri);
+        }
+
+        CubeMigrationCheckCLI checkCLI = new CubeMigrationCheckCLI(kylinConfig, ifFix);
+        checkCLI.execute(cubeName);
+    }
+
+    public void execute() throws IOException{
+        execute(null);
+    }
+
+    public void execute(String cubeName) throws IOException{
+        if(cubeName==null){
+            checkAll();
+        }else {
+            checkCube(cubeName);
+        }
+        fixInconsistent();
+        printIssueExistingHTables();
+    }
+
+    public CubeMigrationCheckCLI(KylinConfig kylinConfig, Boolean isFix) throws IOException{
+        this.dstCfg = kylinConfig;
+        this.ifFix = isFix;
+
+        Configuration conf = HBaseConfiguration.create();
+        hbaseAdmin = new HBaseAdmin(conf);
+
+        issueExistHTables = Lists.newArrayList();
+        inconsistentHTables = Lists.newArrayList();
+    }
+
+    public void checkCube(String cubeName) {
+        List<String> segFullNameList = Lists.newArrayList();
+
+        CubeInstance cube = CubeManager.getInstance(dstCfg).getCube(cubeName);
+        addHTableNamesForCube(cube,segFullNameList);
+
+        check(segFullNameList);
+    }
+
+    public void checkAll(){
+        List<String> segFullNameList = Lists.newArrayList();
+
+        CubeManager cubeMgr = CubeManager.getInstance(dstCfg);
+        for (CubeInstance cube : cubeMgr.listAllCubes()) {
+            addHTableNamesForCube(cube, segFullNameList);
+        }
+
+        check(segFullNameList);
+    }
+
+    public void addHTableNamesForCube(CubeInstance cube, List<String> segFullNameList){
+        for (CubeSegment seg : cube.getSegments()) {
+            String tableName = seg.getStorageLocationIdentifier();
+            segFullNameList.add(tableName+","+cube.getName());
+        }
+    }
+
+    public void check(List<String> segFullNameList){
+        issueExistHTables = Lists.newArrayList();
+        inconsistentHTables = Lists.newArrayList();
+
+        for(String segFullName:segFullNameList){
+            String[] sepNameList = segFullName.split(",");
+            try {
+                HTableDescriptor hTableDescriptor = hbaseAdmin.getTableDescriptor(TableName.valueOf(sepNameList[0]));
+                String host = hTableDescriptor.getValue(IRealizationConstants.HTableTag);
+                if(!dstCfg.getMetadataUrlPrefix().equalsIgnoreCase(host)){
+                    inconsistentHTables.add(segFullName);
+                }
+            }catch (IOException e){
+                issueExistHTables.add(segFullName);
+                continue;
+            }
+        }
+    }
+
+    public void fixInconsistent() throws IOException{
+        if(ifFix == true){
+            for(String segFullName : inconsistentHTables){
+                String[] sepNameList = segFullName.split(",");
+                HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(sepNameList[0]));
+                logger.info("Change the host of htable "+sepNameList[0]+"belonging to cube "+sepNameList[1]+" from "+desc.getValue(IRealizationConstants.HTableTag)+" to "+dstCfg.getMetadataUrlPrefix());
+                hbaseAdmin.disableTable(sepNameList[0]);
+                desc.setValue(IRealizationConstants.HTableTag, dstCfg.getMetadataUrlPrefix());
+                hbaseAdmin.modifyTable(sepNameList[0], desc);
+                hbaseAdmin.enableTable(sepNameList[0]);
+            }
+        }else{
+            logger.info("------ Inconsistent HTables Needed To Be Fixed ------");
+            for (String hTable : inconsistentHTables) {
+                String[] sepNameList = hTable.split(",");
+                logger.info(sepNameList[0]+" belonging to cube "+sepNameList[1]);
+            }
+            logger.info("----------------------------------------------------");
+        }
+    }
+
+    public void printIssueExistingHTables(){
+        logger.info("------ HTables exist issues in hbase : not existing, metadata broken ------");
+        for(String segFullName : issueExistHTables){
+            String[] sepNameList = segFullName.split(",");
+            logger.error(sepNameList[0]+" belonging to cube "+sepNameList[1]+" has some issues and cannot be read successfully!!!");
+        }
+        logger.info("----------------------------------------------------");
+    }
+}


[22/50] [abbrv] kylin git commit: minor, remove unused files

Posted by li...@apache.org.
minor, remove unused files


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/1ea781f0
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/1ea781f0
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/1ea781f0

Branch: refs/heads/master
Commit: 1ea781f0005c1e20c0e775e660e47a6964f07bcb
Parents: 3fb67ca
Author: lidongsjtu <li...@apache.org>
Authored: Wed Mar 2 17:34:59 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Wed Mar 2 17:34:59 2016 +0800

----------------------------------------------------------------------
 ...port-load-hive-table-from-listed-tree-.patch | 864 -------------------
 1 file changed, 864 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/1ea781f0/0001-KYLIN-1074-support-load-hive-table-from-listed-tree-.patch
----------------------------------------------------------------------
diff --git a/0001-KYLIN-1074-support-load-hive-table-from-listed-tree-.patch b/0001-KYLIN-1074-support-load-hive-table-from-listed-tree-.patch
deleted file mode 100644
index 31cc017..0000000
--- a/0001-KYLIN-1074-support-load-hive-table-from-listed-tree-.patch
+++ /dev/null
@@ -1,864 +0,0 @@
-From 1a79ef1aec557259f9611f5b3199c2e90400be77 Mon Sep 17 00:00:00 2001
-From: Jason <ji...@163.com>
-Date: Wed, 2 Mar 2016 14:40:19 +0800
-Subject: [PATCH] KYLIN-1074 support load hive table from listed tree, patch
- from @nichunen
-
----
- build/conf/kylin.properties                        |   2 +
- examples/test_case_data/sandbox/kylin.properties   |   1 +
- pom.xml                                            |   2 +
- .../kylin/rest/controller/TableController.java     |  44 +++
- .../org/apache/kylin/source/hive/HiveClient.java   |   8 +
- webapp/app/index.html                              |   1 +
- webapp/app/js/controllers/sourceMeta.js            | 186 ++++++++++-
- webapp/app/js/directives/angular-tree-control.js   | 363 +++++++++++++++++++++
- webapp/app/js/services/kylinProperties.js          |  15 +-
- webapp/app/js/services/tables.js                   |   7 +-
- webapp/app/partials/tables/source_table_tree.html  |  26 ++
- webapp/bower.json                                  |   3 +-
- webapp/grunt.json                                  |   1 -
- 13 files changed, 649 insertions(+), 10 deletions(-)
- create mode 100644 webapp/app/js/directives/angular-tree-control.js
-
-diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
-index a4b8c3b..e8add7c 100644
---- a/build/conf/kylin.properties
-+++ b/build/conf/kylin.properties
-@@ -158,3 +158,5 @@ deploy.env=DEV
- 
- ###########################deprecated configs#######################
- kylin.sandbox=true
-+
-+kylin.web.hive.limit=20
-\ No newline at end of file
-diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
-index 9451b78..1a74b80 100644
---- a/examples/test_case_data/sandbox/kylin.properties
-+++ b/examples/test_case_data/sandbox/kylin.properties
-@@ -131,3 +131,4 @@ kylin.web.contact_mail=
- deploy.env=DEV
- 
- 
-+kylin.web.hive.limit=20
-\ No newline at end of file
-diff --git a/pom.xml b/pom.xml
-index 9d9a54b..537693f 100644
---- a/pom.xml
-+++ b/pom.xml
-@@ -774,6 +774,8 @@
-                                 <!-- MIT license -->
-                                 <exclude>webapp/app/css/AdminLTE.css</exclude>
-                                 <exclude>webapp/app/js/directives/kylin_abn_tree_directive.js</exclude>
-+                                <exclude>webapp/app/js/directives/angular-tree-control.js</exclude>
-+
- 
-                                 <!--configuration file -->
-                                 <exclude>webapp/app/routes.json</exclude>
-diff --git a/server/src/main/java/org/apache/kylin/rest/controller/TableController.java b/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
-index 39af7db..ea5fdd4 100644
---- a/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
-+++ b/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
-@@ -33,6 +33,7 @@ import org.apache.kylin.rest.request.CardinalityRequest;
- import org.apache.kylin.rest.request.StreamingRequest;
- import org.apache.kylin.rest.response.TableDescResponse;
- import org.apache.kylin.rest.service.CubeService;
-+import org.apache.kylin.source.hive.HiveClient;
- import org.slf4j.Logger;
- import org.slf4j.LoggerFactory;
- import org.springframework.beans.factory.annotation.Autowired;
-@@ -205,6 +206,49 @@ public class TableController extends BasicController {
-         return descs;
-     }
- 
-+    /**
-+     * Show all databases in Hive
-+     *
-+     * @return Hive databases list
-+     * @throws IOException
-+     */
-+    @RequestMapping(value = "/hive", method = { RequestMethod.GET })
-+    @ResponseBody
-+    private static List<String> showHiveDatabases() throws IOException {
-+        HiveClient hiveClient = new HiveClient();
-+        List<String> results = null;
-+
-+        try {
-+            results = hiveClient.getHiveDbNames();
-+        } catch (Exception e) {
-+            e.printStackTrace();
-+            throw new IOException(e);
-+        }
-+        return results;
-+    }
-+
-+    /**
-+     * Show all tables in a Hive database
-+     *
-+     * @return Hive table list
-+     * @throws IOException
-+     */
-+    @RequestMapping(value = "/hive/{database}", method = { RequestMethod.GET })
-+    @ResponseBody
-+    private static List<String> showHiveTables(@PathVariable String database) throws IOException {
-+        HiveClient hiveClient = new HiveClient();
-+        List<String> results = null;
-+
-+        try {
-+            results = hiveClient.getHiveTableNames(database);
-+        } catch (Exception e) {
-+            e.printStackTrace();
-+            throw new IOException(e);
-+        }
-+        return results;
-+    }
-+
-+
-     public void setCubeService(CubeService cubeService) {
-         this.cubeMgmtService = cubeService;
-     }
-diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
-index 178889e..a99b304 100644
---- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
-+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
-@@ -132,6 +132,14 @@ public class HiveClient {
-         return getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.NUM_FILES);
-     }
- 
-+    public List<String> getHiveDbNames() throws Exception {
-+        return getMetaStoreClient().getAllDatabases();
-+    }
-+
-+    public List<String> getHiveTableNames(String database) throws Exception {
-+        return getMetaStoreClient().getAllTables(database);
-+    }
-+
-     /**
-      * COPIED FROM org.apache.hadoop.hive.ql.stats.StatsUtil for backward compatibility
-      * 
-diff --git a/webapp/app/index.html b/webapp/app/index.html
-index 11ca283..b4eb9d7 100644
---- a/webapp/app/index.html
-+++ b/webapp/app/index.html
-@@ -113,6 +113,7 @@
- <script src="js/filters/filter.js"></script>
- <script src="js/directives/directives.js"></script>
- <script src="js/directives/kylin_abn_tree_directive.js"></script>
-+<script src="js/directives/angular-tree-control.js"></script>
- <script src="js/factories/graph.js"></script>
- <script src="js/services/cache.js"></script>
- <script src="js/services/message.js"></script>
-diff --git a/webapp/app/js/controllers/sourceMeta.js b/webapp/app/js/controllers/sourceMeta.js
-index abdeeb8..c87d6ef 100755
---- a/webapp/app/js/controllers/sourceMeta.js
-+++ b/webapp/app/js/controllers/sourceMeta.js
-@@ -19,14 +19,14 @@
- 'use strict';
- 
- KylinApp
--  .controller('SourceMetaCtrl', function ($scope, $cacheFactory, $q, $window, $routeParams, CubeService, $modal, TableService, $route, loadingRequest, SweetAlert, tableConfig, TableModel,cubeConfig) {
-+  .controller('SourceMetaCtrl', function ($scope, $cacheFactory, $q, $window, $routeParams, CubeService, $modal, TableService, $route, loadingRequest, SweetAlert, tableConfig, TableModel,cubeConfig,kylinConfig) {
-     var $httpDefaultCache = $cacheFactory.get('$http');
-     $scope.tableModel = TableModel;
-     $scope.tableModel.selectedSrcDb = [];
-     $scope.tableModel.selectedSrcTable = {};
-     $scope.window = 0.68 * $window.innerHeight;
-     $scope.tableConfig = tableConfig;
--
-+    $scope.kylinConfig = kylinConfig;
- 
-     $scope.state = {
-       filterAttr: 'id', filterReverse: false, reverseColumn: 'id',
-@@ -100,13 +100,193 @@ KylinApp
-       });
-     };
- 
--    var ModalInstanceCtrl = function ($scope, $location, $modalInstance, tableNames, MessageService, projectName, scope) {
-+    $scope.openTreeModal = function () {
-+      $modal.open({
-+        templateUrl: 'addHiveTableFromTree.html',
-+        controller: ModalInstanceCtrl,
-+        resolve: {
-+          tableNames: function () {
-+            return $scope.tableNames;
-+          },
-+          projectName:function(){
-+            return  $scope.projectModel.selectedProject;
-+          },
-+          scope: function () {
-+            return $scope;
-+          }
-+        }
-+      });
-+    };
-+
-+    var ModalInstanceCtrl = function ($scope, $location, $modalInstance, tableNames, MessageService, projectName, scope,kylinConfig) {
-       $scope.tableNames = "";
-       $scope.projectName = projectName;
-       $scope.cancel = function () {
-         $modalInstance.dismiss('cancel');
-       };
-+
-+      $scope.kylinConfig = kylinConfig;
-+
-+
-+      $scope.treeOptions = {multiSelection: true};
-+      $scope.selectedNodes = [];
-+      $scope.hiveLimit =  kylinConfig.getHiveLimit();
-+
-+      $scope.loadHive = function () {
-+        if($scope.hiveLoaded)
-+          return;
-+        TableService.showHiveDatabases({}, function (databases) {
-+          $scope.dbNum = databases.length;
-+          if (databases.length > 0) {
-+            $scope.hiveMap = {};
-+            for (var i = 0; i < databases.length; i++) {
-+              var dbName = databases[i];
-+              var hiveData = {"dbname":dbName,"tables":[],"expanded":false};
-+              $scope.hive.push(hiveData);
-+              $scope.hiveMap[dbName] = i;
-+            }
-+          }
-+          $scope.hiveLoaded = true;
-+          $scope.showMoreDatabases();
-+        });
-+      }
-+
-+      $scope.showMoreTables = function(hiveTables, node){
-+        var shownTimes = parseInt(node.children.length / $scope.hiveLimit);
-+        var from = $scope.hiveLimit * shownTimes;
-+        var to = 0;
-+        var hasMore = false;
-+        if(from + $scope.hiveLimit > hiveTables.length) {
-+          to = hiveTables.length - 1;
-+        } else {
-+          to = from + $scope.hiveLimit - 1;
-+          hasMore = true;
-+        }
-+        if(!angular.isUndefined(node.children[from])){
-+          node.children.pop();
-+        }
-+
-+        for(var idx = from; idx <= to; idx++){
-+          node.children.push({"label":node.label+'.'+hiveTables[idx],"id":idx-from+1,"children":[]});
-+        }
-+
-+        if(hasMore){
-+          var loading = {"label":"","id":65535,"children":[]};
-+          node.children.push(loading);
-+        }
-+      }
-+
-+      $scope.showAllTables = function(hiveTables, node){
-+        var shownTimes = parseInt(node.children.length / $scope.hiveLimit);
-+        var from = $scope.hiveLimit * shownTimes;
-+        var to = hiveTables.length - 1;
-+        if(!angular.isUndefined(node.children[from])){
-+          node.children.pop();
-+        }
-+        for(var idx = from; idx <= to; idx++){
-+          node.children.push({"label":node.label+'.'+hiveTables[idx],"id":idx-from+1,"children":[]});
-+        }
-+      }
-+
-+      $scope.showMoreDatabases = function(){
-+        var shownTimes = parseInt($scope.treedata.length / $scope.hiveLimit);
-+        var from = $scope.hiveLimit * shownTimes;
-+        var to = 0;
-+        var hasMore = false;
-+        if(from + $scope.hiveLimit > $scope.hive.length) {
-+          to = $scope.hive.length - 1;
-+        } else {
-+          to = from + $scope.hiveLimit - 1;
-+          hasMore = true;
-+        }
-+        if(!angular.isUndefined($scope.treedata[from])){
-+          $scope.treedata.pop();
-+        }
-+
-+        for(var idx = from; idx <= to; idx++){
-+          var children = [];
-+          var loading = {"label":"","id":0,"children":[]};
-+          children.push(loading);
-+          $scope.treedata.push({"label":$scope.hive[idx].dbname,"id":idx+1,"children":children,"expanded":false});
-+        }
-+
-+        if(hasMore){
-+          var loading = {"label":"","id":65535,"children":[0]};
-+          $scope.treedata.push(loading);
-+        }
-+      }
-+
-+      $scope.showAllDatabases = function(){
-+        var shownTimes = parseInt($scope.treedata.length / $scope.hiveLimit);
-+        var from = $scope.hiveLimit * shownTimes;
-+        var to = $scope.hive.length - 1;
-+
-+        if(!angular.isUndefined($scope.treedata[from])){
-+          $scope.treedata.pop();
-+        }
-+
-+        for(var idx = from; idx <= to; idx++){
-+          var children = [];
-+          var loading = {"label":"","id":0,"children":[]};
-+          children.push(loading);
-+          $scope.treedata.push({"label":$scope.hive[idx].dbname,"id":idx+1,"children":children,"expanded":false});
-+        }
-+      }
-+
-+      $scope.showMoreClicked = function($parentNode){
-+        if($parentNode == null){
-+          $scope.showMoreDatabases();
-+        } else {
-+          $scope.showMoreTables($scope.hive[$scope.hiveMap[$parentNode.label]].tables,$parentNode);
-+        }
-+      }
-+
-+      $scope.showAllClicked = function($parentNode){
-+        if($parentNode == null){
-+          $scope.showAllDatabases();
-+        } else {
-+          $scope.showAllTables($scope.hive[$scope.hiveMap[$parentNode.label]].tables,$parentNode);
-+        }
-+      }
-+
-+      $scope.showToggle = function(node) {
-+        if(node.expanded == false){
-+          TableService.showHiveTables({"database": node.label},function (hive_tables){
-+            var tables = [];
-+            for (var i = 0; i < hive_tables.length; i++) {
-+              tables.push(hive_tables[i]);
-+            }
-+            $scope.hive[$scope.hiveMap[node.label]].tables = tables;
-+            $scope.showMoreTables(tables,node);
-+            node.expanded = true;
-+          });
-+        }
-+      }
-+
-+      $scope.showSelected = function(node) {
-+
-+      }
-+
-+      if(angular.isUndefined($scope.hive) || angular.isUndefined($scope.hiveLoaded) || angular.isUndefined($scope.treedata) ){
-+        $scope.hive = [];
-+        $scope.hiveLoaded = false;
-+        $scope.treedata = [];
-+        $scope.loadHive();
-+      }
-+
-+
-+
-+
-       $scope.add = function () {
-+
-+        if($scope.tableNames.length === 0 && $scope.selectedNodes.length > 0) {
-+          for(var i = 0; i <  $scope.selectedNodes.length; i++){
-+            if($scope.selectedNodes[i].label.indexOf(".") >= 0){
-+              $scope.tableNames += ($scope.selectedNodes[i].label) += ',';
-+            }
-+          }
-+        }
-+
-         if ($scope.tableNames.trim() === "") {
-           SweetAlert.swal('', 'Please input table(s) you want to synchronize.', 'info');
-           return;
-diff --git a/webapp/app/js/directives/angular-tree-control.js b/webapp/app/js/directives/angular-tree-control.js
-new file mode 100644
-index 0000000..6fca987
---- /dev/null
-+++ b/webapp/app/js/directives/angular-tree-control.js
-@@ -0,0 +1,363 @@
-+/*
-+ * The MIT License (MIT)
-+ *
-+ * Copyright (c) 2013 Steve
-+ *
-+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
-+ * this software and associated documentation files (the "Software"), to deal in
-+ * the Software without restriction, including without limitation the rights to
-+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-+ * the Software, and to permit persons to whom the Software is furnished to do so,
-+ *   subject to the following conditions:
-+ *
-+ *   The above copyright notice and this permission notice shall be included in all
-+ * copies or substantial portions of the Software.
-+ *
-+ *   THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-+ * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-+ * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-+ * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-+ * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-+ */
-+
-+(function ( angular ) {
-+  'use strict';
-+
-+  angular.module( 'treeControl', [] )
-+    .directive( 'treecontrol', ['$compile', function( $compile ) {
-+      /**
-+       * @param cssClass - the css class
-+       * @param addClassProperty - should we wrap the class name with class=""
-+       */
-+      function classIfDefined(cssClass, addClassProperty) {
-+        if (cssClass) {
-+          if (addClassProperty)
-+            return 'class="' + cssClass + '"';
-+          else
-+            return cssClass;
-+        }
-+        else
-+          return "";
-+      }
-+
-+      function ensureDefault(obj, prop, value) {
-+        if (!obj.hasOwnProperty(prop))
-+          obj[prop] = value;
-+      }
-+
-+      return {
-+        restrict: 'EA',
-+        require: "treecontrol",
-+        transclude: true,
-+        scope: {
-+          treeModel: "=",
-+          selectedNode: "=?",
-+          selectedNodes: "=?",
-+          expandedNodes: "=?",
-+          onSelection: "&",
-+          onNodeToggle: "&",
-+          options: "=?",
-+          orderBy: "@",
-+          reverseOrder: "@",
-+          filterExpression: "=?",
-+          filterComparator: "=?",
-+          onDblclick: "&"
-+        },
-+        controller: ['$scope', function( $scope ) {
-+
-+          function defaultIsLeaf(node) {
-+            return !node[$scope.options.nodeChildren] || node[$scope.options.nodeChildren].length === 0;
-+          }
-+
-+          function shallowCopy(src, dst) {
-+            if (angular.isArray(src)) {
-+              dst = dst || [];
-+
-+              for ( var i = 0; i < src.length; i++) {
-+                dst[i] = src[i];
-+              }
-+            } else if (angular.isObject(src)) {
-+              dst = dst || {};
-+
-+              for (var key in src) {
-+                if (hasOwnProperty.call(src, key) && !(key.charAt(0) === '$' && key.charAt(1) === '$')) {
-+                  dst[key] = src[key];
-+                }
-+              }
-+            }
-+
-+            return dst || src;
-+          }
-+          function defaultEquality(a, b) {
-+            if (a === undefined || b === undefined)
-+              return false;
-+            a = shallowCopy(a);
-+            a[$scope.options.nodeChildren] = [];
-+            b = shallowCopy(b);
-+            b[$scope.options.nodeChildren] = [];
-+            return angular.equals(a, b);
-+          }
-+
-+          $scope.options = $scope.options || {};
-+          ensureDefault($scope.options, "multiSelection", false);
-+          ensureDefault($scope.options, "nodeChildren", "children");
-+          ensureDefault($scope.options, "dirSelectable", "true");
-+          ensureDefault($scope.options, "injectClasses", {});
-+          ensureDefault($scope.options.injectClasses, "ul", "");
-+          ensureDefault($scope.options.injectClasses, "li", "");
-+          ensureDefault($scope.options.injectClasses, "liSelected", "");
-+          ensureDefault($scope.options.injectClasses, "iExpanded", "");
-+          ensureDefault($scope.options.injectClasses, "iCollapsed", "");
-+          ensureDefault($scope.options.injectClasses, "iLeaf", "");
-+          ensureDefault($scope.options.injectClasses, "label", "");
-+          ensureDefault($scope.options.injectClasses, "labelSelected", "");
-+          ensureDefault($scope.options, "equality", defaultEquality);
-+          ensureDefault($scope.options, "isLeaf", defaultIsLeaf);
-+
-+          $scope.selectedNodes = $scope.selectedNodes || [];
-+          $scope.expandedNodes = $scope.expandedNodes || [];
-+          $scope.expandedNodesMap = {};
-+          for (var i=0; i < $scope.expandedNodes.length; i++) {
-+            $scope.expandedNodesMap[""+i] = $scope.expandedNodes[i];
-+          }
-+          $scope.parentScopeOfTree = $scope.$parent;
-+
-+
-+          function isSelectedNode(node) {
-+            if (!$scope.options.multiSelection && ($scope.options.equality(node, $scope.selectedNode)))
-+              return true;
-+            else if ($scope.options.multiSelection && $scope.selectedNodes) {
-+              for (var i = 0; (i < $scope.selectedNodes.length); i++) {
-+                if ($scope.options.equality(node, $scope.selectedNodes[i])) {
-+                  return true;
-+                }
-+              }
-+              return false;
-+            }
-+          }
-+
-+          $scope.headClass = function(node) {
-+            var liSelectionClass = classIfDefined($scope.options.injectClasses.liSelected, false);
-+            var injectSelectionClass = "";
-+            if (liSelectionClass && isSelectedNode(node))
-+              injectSelectionClass = " " + liSelectionClass;
-+            if ($scope.options.isLeaf(node))
-+              return "tree-leaf" + injectSelectionClass;
-+            if ($scope.expandedNodesMap[this.$id])
-+              return "tree-expanded" + injectSelectionClass;
-+            else
-+              return "tree-collapsed" + injectSelectionClass;
-+          };
-+
-+          $scope.iBranchClass = function() {
-+            if ($scope.expandedNodesMap[this.$id])
-+              return classIfDefined($scope.options.injectClasses.iExpanded);
-+            else
-+              return classIfDefined($scope.options.injectClasses.iCollapsed);
-+          };
-+
-+          $scope.nodeExpanded = function() {
-+            return !!$scope.expandedNodesMap[this.$id];
-+          };
-+
-+          $scope.selectNodeHead = function() {
-+            var expanding = $scope.expandedNodesMap[this.$id] === undefined;
-+            $scope.expandedNodesMap[this.$id] = (expanding ? this.node : undefined);
-+            if (expanding) {
-+              $scope.expandedNodes.push(this.node);
-+            }
-+            else {
-+              var index;
-+              for (var i=0; (i < $scope.expandedNodes.length) && !index; i++) {
-+                if ($scope.options.equality($scope.expandedNodes[i], this.node)) {
-+                  index = i;
-+                }
-+              }
-+              if (index != undefined)
-+                $scope.expandedNodes.splice(index, 1);
-+            }
-+            if ($scope.onNodeToggle)
-+              $scope.onNodeToggle({node: this.node, expanded: expanding});
-+          };
-+
-+          $scope.selectNodeLabel = function( selectedNode ){
-+            if(selectedNode[$scope.options.nodeChildren] && selectedNode[$scope.options.nodeChildren].length > 0){
-+              this.selectNodeHead();
-+            }
-+            if($scope.options.dirSelectable || !(selectedNode[$scope.options.nodeChildren] && selectedNode[$scope.options.nodeChildren].length > 0) )
-+             {
-+              var selected = false;
-+              if ($scope.options.multiSelection) {
-+                var pos = $scope.selectedNodes.indexOf(selectedNode);
-+                if (pos === -1) {
-+                  $scope.selectedNodes.push(selectedNode);
-+                  selected = true;
-+                } else {
-+                  $scope.selectedNodes.splice(pos, 1);
-+                }
-+              } else {
-+                if ($scope.selectedNode != selectedNode) {
-+                  $scope.selectedNode = selectedNode;
-+                  selected = true;
-+                }
-+                else {
-+                  $scope.selectedNode = undefined;
-+                }
-+              }
-+              if ($scope.onSelection)
-+                $scope.onSelection({node: selectedNode, selected: selected});
-+            }
-+          };
-+
-+
-+          $scope.dblClickNode = function(selectedNode){
-+            if($scope.onDblclick!=null){
-+              $scope.onDblclick({node:selectedNode});
-+            }
-+          }
-+
-+          $scope.selectedClass = function() {
-+            var isThisNodeSelected = isSelectedNode(this.node);
-+            var labelSelectionClass = classIfDefined($scope.options.injectClasses.labelSelected, false);
-+            var injectSelectionClass = "";
-+            if (labelSelectionClass && isThisNodeSelected)
-+              injectSelectionClass = " " + labelSelectionClass;
-+
-+            return isThisNodeSelected?"tree-selected" + injectSelectionClass:"";
-+          };
-+
-+          //tree template
-+          var orderBy = $scope.orderBy ? ' | orderBy:orderBy:reverseOrder' : '';
-+          var template =
-+            '<ul '+classIfDefined($scope.options.injectClasses.ul, true)+'>' +
-+            '<li ng-repeat="node in node.' + $scope.options.nodeChildren + ' | filter:filterExpression:filterComparator ' + orderBy + '" ng-class="headClass(node)" '+classIfDefined($scope.options.injectClasses.li, true)+'>' +
-+            '<i class="tree-branch-head" ng-class="iBranchClass()" ng-click="selectNodeHead(node)"></i>' +
-+            '<i class="tree-leaf-head '+classIfDefined($scope.options.injectClasses.iLeaf, false)+'"></i>' +
-+            '<div class="tree-label '+classIfDefined($scope.options.injectClasses.label, false)+'" ng-class="selectedClass()" ng-click="selectNodeLabel(node)" ng-dblclick="dblClickNode(node)" tree-transclude></div>' +
-+            '<treeitem ng-if="nodeExpanded()"></treeitem>' +
-+            '</li>' +
-+            '</ul>';
-+
-+          this.template = $compile(template);
-+        }],
-+        compile: function(element, attrs, childTranscludeFn) {
-+          return function ( scope, element, attrs, treemodelCntr ) {
-+
-+            scope.$watch("treeModel", function updateNodeOnRootScope(newValue) {
-+              if (angular.isArray(newValue)) {
-+                if (angular.isDefined(scope.node) && angular.equals(scope.node[scope.options.nodeChildren], newValue))
-+                  return;
-+                scope.node = {};
-+                scope.synteticRoot = scope.node;
-+                scope.node[scope.options.nodeChildren] = newValue;
-+              }
-+              else {
-+                if (angular.equals(scope.node, newValue))
-+                  return;
-+                scope.node = newValue;
-+              }
-+            });
-+
-+            scope.$watchCollection('expandedNodes', function(newValue) {
-+              var notFoundIds = 0;
-+              var newExpandedNodesMap = {};
-+              var $liElements = element.find('li');
-+              var existingScopes = [];
-+              // find all nodes visible on the tree and the scope $id of the scopes including them
-+              angular.forEach($liElements, function(liElement) {
-+                var $liElement = angular.element(liElement);
-+                var liScope = $liElement.scope();
-+                existingScopes.push(liScope);
-+              });
-+              // iterate over the newValue, the new expanded nodes, and for each find it in the existingNodesAndScopes
-+              // if found, add the mapping $id -> node into newExpandedNodesMap
-+              // if not found, add the mapping num -> node into newExpandedNodesMap
-+              angular.forEach(newValue, function(newExNode) {
-+                var found = false;
-+                for (var i=0; (i < existingScopes.length) && !found; i++) {
-+                  var existingScope = existingScopes[i];
-+                  if (scope.options.equality(newExNode, existingScope.node)) {
-+                    newExpandedNodesMap[existingScope.$id] = existingScope.node;
-+                    found = true;
-+                  }
-+                }
-+                if (!found)
-+                  newExpandedNodesMap[notFoundIds++] = newExNode;
-+              });
-+              scope.expandedNodesMap = newExpandedNodesMap;
-+            });
-+
-+//                        scope.$watch('expandedNodesMap', function(newValue) {
-+//
-+//                        });
-+
-+            //Rendering template for a root node
-+            treemodelCntr.template( scope, function(clone) {
-+              element.html('').append( clone );
-+            });
-+            // save the transclude function from compile (which is not bound to a scope as apposed to the one from link)
-+            // we can fix this to work with the link transclude function with angular 1.2.6. as for angular 1.2.0 we need
-+            // to keep using the compile function
-+            scope.$treeTransclude = childTranscludeFn;
-+          }
-+        }
-+      };
-+    }])
-+    .directive("treeitem", function() {
-+      return {
-+        restrict: 'E',
-+        require: "^treecontrol",
-+        link: function( scope, element, attrs, treemodelCntr) {
-+          // Rendering template for the current node
-+          treemodelCntr.template(scope, function(clone) {
-+            element.html('').append(clone);
-+          });
-+        }
-+      }
-+    })
-+    .directive("treeTransclude", function() {
-+      return {
-+        link: function(scope, element, attrs, controller) {
-+          if (!scope.options.isLeaf(scope.node)) {
-+            angular.forEach(scope.expandedNodesMap, function (node, id) {
-+              if (scope.options.equality(node, scope.node)) {
-+                scope.expandedNodesMap[scope.$id] = scope.node;
-+                scope.expandedNodesMap[id] = undefined;
-+              }
-+            });
-+          }
-+          if (!scope.options.multiSelection && scope.options.equality(scope.node, scope.selectedNode)) {
-+            scope.selectedNode = scope.node;
-+          } else if (scope.options.multiSelection) {
-+            var newSelectedNodes = [];
-+            for (var i = 0; (i < scope.selectedNodes.length); i++) {
-+              if (scope.options.equality(scope.node, scope.selectedNodes[i])) {
-+                newSelectedNodes.push(scope.node);
-+              }
-+            }
-+            scope.selectedNodes = newSelectedNodes;
-+          }
-+
-+          // create a scope for the transclusion, whos parent is the parent of the tree control
-+          scope.transcludeScope = scope.parentScopeOfTree.$new();
-+          scope.transcludeScope.node = scope.node;
-+          scope.transcludeScope.$parentNode = (scope.$parent.node === scope.synteticRoot)?null:scope.$parent.node;
-+          scope.transcludeScope.$index = scope.$index;
-+          scope.transcludeScope.$first = scope.$first;
-+          scope.transcludeScope.$middle = scope.$middle;
-+          scope.transcludeScope.$last = scope.$last;
-+          scope.transcludeScope.$odd = scope.$odd;
-+          scope.transcludeScope.$even = scope.$even;
-+          scope.$on('$destroy', function() {
-+            scope.transcludeScope.$destroy();
-+          });
-+
-+          scope.$treeTransclude(scope.transcludeScope, function(clone) {
-+            element.empty();
-+            element.append(clone);
-+          });
-+        }
-+      }
-+    });
-+})( angular );
-diff --git a/webapp/app/js/services/kylinProperties.js b/webapp/app/js/services/kylinProperties.js
-index a03403b..b1f04c0 100644
---- a/webapp/app/js/services/kylinProperties.js
-+++ b/webapp/app/js/services/kylinProperties.js
-@@ -20,6 +20,7 @@ KylinApp.service('kylinConfig', function (AdminService, $log) {
-   var _config;
-   var timezone;
-   var deployEnv;
-+  var hiveLimit;
- 
- 
-   this.init = function () {
-@@ -56,12 +57,22 @@ KylinApp.service('kylinConfig', function (AdminService, $log) {
-   }
- 
-   this.getDeployEnv = function () {
-+    this.deployEnv = this.getProperty("deploy.env");
-     if (!this.deployEnv) {
--      this.deployEnv = this.getProperty("deploy.env").trim();
-+      return "DEV";
-     }
--    return this.deployEnv.toUpperCase();
-+    return this.deployEnv.toUpperCase().trim();
-   }
- 
-+  this.getHiveLimit = function () {
-+    this.hiveLimit = this.getProperty("kylin.web.hive.limit");
-+    if (!this.hiveLimit) {
-+      return 20;
-+    }
-+    return this.hiveLimit;
-+  }
-+
-+
-   //fill config info for Config from backend
-   this.initWebConfigInfo = function () {
- 
-diff --git a/webapp/app/js/services/tables.js b/webapp/app/js/services/tables.js
-index 3b5e9f4..9b2d376 100755
---- a/webapp/app/js/services/tables.js
-+++ b/webapp/app/js/services/tables.js
-@@ -17,13 +17,14 @@
-  */
- 
- KylinApp.factory('TableService', ['$resource', function ($resource, config) {
--  return $resource(Config.service.url + 'tables/:tableName/:action', {}, {
-+  return $resource(Config.service.url + 'tables/:tableName/:action/:database', {}, {
-     list: {method: 'GET', params: {}, cache: true, isArray: true},
-     get: {method: 'GET', params: {}, isArray: false},
-     getExd: {method: 'GET', params: {action: 'exd-map'}, isArray: false},
-     reload: {method: 'PUT', params: {action: 'reload'}, isArray: false},
-     loadHiveTable: {method: 'POST', params: {}, isArray: false},
-     addStreamingSrc: {method: 'POST', params: {action:'addStreamingSrc'}, isArray: false},
--    genCardinality: {method: 'PUT', params: {action: 'cardinality'}, isArray: false}
--  });
-+    genCardinality: {method: 'PUT', params: {action: 'cardinality'}, isArray: false},
-+    showHiveDatabases: {method: 'GET', params: {action:'hive'}, cache: true, isArray: true},
-+    showHiveTables: {method: 'GET', params: {action:'hive'}, cache: true, isArray: true}  });
- }]);
-diff --git a/webapp/app/partials/tables/source_table_tree.html b/webapp/app/partials/tables/source_table_tree.html
-index 767eb43..c091dca 100755
---- a/webapp/app/partials/tables/source_table_tree.html
-+++ b/webapp/app/partials/tables/source_table_tree.html
-@@ -26,6 +26,7 @@
-         <div class="col-xs-5" style="padding-left: 0px;margin-top: 20px;">
-             <div class="pull-right">
-                 <a class="btn btn-xs btn-primary" tooltip="Load Hive Table"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openModal()"><i class="fa fa-download"></i></a>
-+                <a class="btn btn-xs btn-info" tooltip="Load Hive Table From Tree"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openTreeModal()"><i class="fa fa-download"></i></a>
-                 <a class="btn btn-xs btn-primary" tooltip="Add Streaming Table"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openStreamingSourceModal()"><i class="fa fa-area-chart"></i></a>
-             </div>
-         </div>
-@@ -47,3 +48,28 @@
- </div>
- 
- <div ng-include="'partials/tables/table_load.html'"></div>
-+
-+<script type="text/ng-template" id="addHiveTableFromTree.html">
-+  <div class="modal-header"><button class="close" type="button" data-dismiss="modal" ng-click="cancel()">×</button>
-+    <h4>Load Hive Table Metadata From Tree</h4>
-+  </div>
-+  <div class="modal-body">
-+    <span><strong>Project: </strong>{{ $parent.projectName!=null?$parent.projectName:'NULL'}}</span>
-+    <div class="form-group searchBox">
-+      <input type="text" placeholder="Filter ..." class="nav-search-input" ng-model="predicate" />
-+    </div>
-+    <loading ng-if="!hiveLoaded" text="Loading Databases..."></loading>
-+    <treecontrol class="tree-light check" tree-model="treedata" selected-nodes="selectedNodes" filter-expression="predicate" on-selection="showSelected(node)" on-node-toggle="showToggle(node)" options="treeOptions">
-+      <div ng-if="node.label==''&&node.id==0"><img src="image/ajax-loader.gif">Loading Tables...</div>
-+      <button class="btn btn-xs btn-primary" ng-if="node.label==''&&node.id==65535" ng-click="showMoreClicked($parentNode)">Show More</button>
-+      <button class="btn btn-xs btn-primary" ng-if="node.label==''&&node.id==65535" ng-click="showAllClicked($parentNode)">Show All</button>
-+      {{node.label}}
-+    </treecontrol>
-+  </div>
-+
-+  <div class="modal-footer">
-+    <button class="btn btn-primary" ng-click="add()">Sync</button>
-+    <button class="btn btn-primary" ng-click="cancel()">Cancel</button>
-+  </div>
-+
-+</script>
-diff --git a/webapp/bower.json b/webapp/bower.json
-index 41144f9..bba4a52 100755
---- a/webapp/bower.json
-+++ b/webapp/bower.json
-@@ -32,7 +32,8 @@
-     "bootstrap-sweetalert": "~0.4.3",
-     "angular-toggle-switch":"1.3.0",
-     "angular-ui-select": "0.13.2",
--    "angular-sanitize": "1.2.18"
-+    "angular-sanitize": "1.2.18",
-+    "angular-tree-control": "0.2.8"
-   },
-   "devDependencies": {
-     "less.js": "~1.4.0",
-diff --git a/webapp/grunt.json b/webapp/grunt.json
-index 3219b5e..86ad1dc 100755
---- a/webapp/grunt.json
-+++ b/webapp/grunt.json
-@@ -19,7 +19,6 @@
-                 "app/components/angularLocalStorage/src/angularLocalStorage.js",
-                 "app/components/angular-base64/angular-base64.min.js",
-                 "app/components/ng-grid/build/ng-grid.js",
--                "app/components/angular-tree-control/angular-tree-control.js",
-                 "app/components/ace-builds/src-min-noconflict/ace.js",
-                 "app/components/ace-builds/src-min-noconflict/ext-language_tools.js",
-                 "app/components/ace-builds/src-min-noconflict/mode-json.js",
--- 
-2.5.4 (Apple Git-61)
-


[14/50] [abbrv] kylin git commit: KYLIN-1054 Support Hive client Beeline

Posted by li...@apache.org.
KYLIN-1054 Support Hive client Beeline


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/9c77a5eb
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/9c77a5eb
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/9c77a5eb

Branch: refs/heads/master
Commit: 9c77a5ebe955e708c51f9ea9c3cf3d747e880cd7
Parents: 7df1cc7
Author: lidongsjtu <li...@apache.org>
Authored: Mon Feb 29 19:26:56 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Mon Feb 29 19:26:56 2016 +0800

----------------------------------------------------------------------
 build/bin/find-hive-dependency.sh               |  11 +-
 .../apache/kylin/common/KylinConfigBase.java    |  12 +-
 .../test_case_data/sandbox/kylin.properties     |   3 +
 .../kylin/provision/BuildIIWithStream.java      |  17 ++-
 .../kylin/source/hive/HiveCmdBuilder.java       | 106 ++++++++++++++++++
 .../apache/kylin/source/hive/HiveMRInput.java   |  25 +++--
 .../kylin/source/hive/HiveCmdBuilderTest.java   |  78 +++++++++++++
 .../storage/hbase/steps/DeprecatedGCStep.java   |   6 +-
 .../storage/hbase/util/HiveCmdBuilder.java      | 109 +++++++++++++++++++
 .../storage/hbase/util/StorageCleanupJob.java   |  24 ++--
 10 files changed, 351 insertions(+), 40 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/9c77a5eb/build/bin/find-hive-dependency.sh
----------------------------------------------------------------------
diff --git a/build/bin/find-hive-dependency.sh b/build/bin/find-hive-dependency.sh
index 5994dda..171c5b1 100644
--- a/build/bin/find-hive-dependency.sh
+++ b/build/bin/find-hive-dependency.sh
@@ -17,7 +17,16 @@
 # limitations under the License.
 #
 
-hive_env=`hive -e set | grep 'env:CLASSPATH'`
+client_mode=`sh ${KYLIN_HOME}/bin/get-properties.sh kylin.hive.client`
+hive_env=
+
+if [ "${client_mode}" == "beeline" ]
+then
+    beeline_params=`sh ${KYLIN_HOME}/bin/get-properties.sh kylin.hive.beeline.params`
+    hive_env=`beeline ${beeline_params} --outputformat=dsv -e set | grep 'env:CLASSPATH'`
+else
+    hive_env=`hive -e set | grep 'env:CLASSPATH'`
+fi
 
 hive_classpath=`echo $hive_env | grep 'env:CLASSPATH' | awk -F '=' '{print $2}'`
 arr=(`echo $hive_classpath | cut -d ":"  --output-delimiter=" " -f 1-`)

http://git-wip-us.apache.org/repos/asf/kylin/blob/9c77a5eb/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 0cee9f8..826a28c 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -367,7 +367,7 @@ public class KylinConfigBase implements Serializable {
     public double getCubeAlgorithmAutoThreshold() {
         return Double.parseDouble(getOptional("kylin.cube.algorithm.auto.threshold", "8"));
     }
-    
+
     public int getCubeAggrGroupMaxSize() {
         return Integer.parseInt(getOptional("kylin.cube.aggrgroup.max.size", "12"));
     }
@@ -533,7 +533,7 @@ public class KylinConfigBase implements Serializable {
     public String getMailSender() {
         return getOptional("mail.sender", "");
     }
-    
+
     public boolean isWebCrossDomainEnabled() {
         return Boolean.parseBoolean(getOptional("crossdomain.enable", "true"));
     }
@@ -542,4 +542,12 @@ public class KylinConfigBase implements Serializable {
         return getMetadataUrl();
     }
 
+    public String getHiveClientMode() {
+        return getOptional("kylin.hive.client", "cli");
+    }
+
+    public String getHiveBeelineParams() {
+        return getOptional("kylin.hive.beeline.params", "");
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/9c77a5eb/examples/test_case_data/sandbox/kylin.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
index bf161fc..a304cab 100644
--- a/examples/test_case_data/sandbox/kylin.properties
+++ b/examples/test_case_data/sandbox/kylin.properties
@@ -20,6 +20,9 @@ kylin.storage.url=hbase
 # Temp folder in hdfs, make sure user has the right access to the hdfs directory
 kylin.hdfs.working.dir=/kylin
 
+# Parameters for beeline client
+kylin.hive.beeline.params=--hiveconf hive.security.authorization.sqlstd.confwhitelist.append='mapreduce.job.*|dfs.*' -u 'jdbc:hive2://localhost:10000'
+
 kylin.job.mapreduce.default.reduce.input.mb=500
 
 # If true, job engine will not assume that hadoop CLI reside on the same server as it self

http://git-wip-us.apache.org/repos/asf/kylin/blob/9c77a5eb/kylin-it/src/test/java/org/apache/kylin/provision/BuildIIWithStream.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildIIWithStream.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildIIWithStream.java
index 8436687..9b7cd14 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildIIWithStream.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildIIWithStream.java
@@ -74,6 +74,7 @@ import org.apache.kylin.job.constant.ExecutableConstants;
 import org.apache.kylin.job.engine.JobEngineConfig;
 import org.apache.kylin.metadata.model.TblColRef;
 import org.apache.kylin.metadata.realization.RealizationStatusEnum;
+import org.apache.kylin.source.hive.HiveCmdBuilder;
 import org.apache.kylin.source.hive.HiveTableReader;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.ii.IICreateHTableJob;
@@ -147,15 +148,13 @@ public class BuildIIWithStream {
         }
 
         ShellExecutable step = new ShellExecutable();
-        StringBuffer buf = new StringBuffer();
-        buf.append("hive -e \"");
-        buf.append(useDatabaseHql + "\n");
-        buf.append(dropTableHql + "\n");
-        buf.append(createTableHql + "\n");
-        buf.append(insertDataHqls + "\n");
-        buf.append("\"");
-
-        step.setCmd(buf.toString());
+        HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
+        hiveCmdBuilder.addStatement(useDatabaseHql);
+        hiveCmdBuilder.addStatement(dropTableHql);
+        hiveCmdBuilder.addStatement(createTableHql);
+        hiveCmdBuilder.addStatement(insertDataHqls);
+
+        step.setCmd(hiveCmdBuilder.build());
         logger.info(step.getCmd());
         step.setName(ExecutableConstants.STEP_NAME_CREATE_FLAT_HIVE_TABLE);
         kylinConfig.getCliCommandExecutor().execute(step.getCmd(), null);

http://git-wip-us.apache.org/repos/asf/kylin/blob/9c77a5eb/source-hive/src/main/java/org/apache/kylin/source/hive/HiveCmdBuilder.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveCmdBuilder.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveCmdBuilder.java
new file mode 100644
index 0000000..f510780
--- /dev/null
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveCmdBuilder.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.hive;
+
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.kylin.common.KylinConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Lists;
+
+public class HiveCmdBuilder {
+    private static final Logger logger = LoggerFactory.getLogger(HiveCmdBuilder.class);
+
+    public enum HiveClientMode {
+        CLI, BEELINE
+    }
+
+    private HiveClientMode clientMode;
+    private KylinConfig kylinConfig;
+    final private ArrayList<String> statements = Lists.newArrayList();
+
+    public HiveCmdBuilder() {
+        kylinConfig = KylinConfig.getInstanceFromEnv();
+        clientMode = HiveClientMode.valueOf(kylinConfig.getHiveClientMode().toUpperCase());
+    }
+
+    public String build() {
+        StringBuffer buf = new StringBuffer();
+
+        switch (clientMode) {
+        case CLI:
+            buf.append("hive -e \"");
+            for (String statement : statements) {
+                buf.append(statement).append("\n");
+            }
+            buf.append("\"");
+            break;
+        case BEELINE:
+            BufferedWriter bw = null;
+            try {
+                File tmpHql = File.createTempFile("beeline_", ".hql");
+                StringBuffer hqlBuf = new StringBuffer();
+                bw = new BufferedWriter(new FileWriter(tmpHql));
+                for (String statement : statements) {
+                    bw.write(statement);
+                    bw.newLine();
+
+                    hqlBuf.append(statement).append("\n");
+                }
+                buf.append("beeline ");
+                buf.append(kylinConfig.getHiveBeelineParams());
+                buf.append(" -f ");
+                buf.append(tmpHql.getAbsolutePath());
+                buf.append(";rm -f ");
+                buf.append(tmpHql.getAbsolutePath());
+
+                logger.info("The statements to execute in beeline: \n" + hqlBuf);
+            } catch (IOException e) {
+                throw new RuntimeException(e);
+            } finally {
+                IOUtils.closeQuietly(bw);
+            }
+            break;
+        default:
+            throw new RuntimeException("Hive client cannot be recognized: " + clientMode);
+        }
+
+        return buf.toString();
+    }
+
+    public void reset() {
+        statements.clear();
+    }
+
+    public void addStatement(String statement) {
+        statements.add(statement);
+    }
+
+    @Override
+    public String toString() {
+        return build();
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/9c77a5eb/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
index b8d1333..873641d 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
@@ -23,6 +23,7 @@ import java.io.IOException;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hive.hcatalog.data.HCatRecord;
@@ -123,15 +124,14 @@ public class HiveMRInput implements IMRInput {
             }
 
             ShellExecutable step = new ShellExecutable();
-            StringBuilder buf = new StringBuilder();
-            buf.append("hive -e \"");
-            buf.append(useDatabaseHql + "\n");
-            buf.append(dropTableHql + "\n");
-            buf.append(createTableHql + "\n");
-            buf.append(insertDataHqls + "\n");
-            buf.append("\"");
-
-            step.setCmd(buf.toString());
+
+            HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
+            hiveCmdBuilder.addStatement(useDatabaseHql);
+            hiveCmdBuilder.addStatement(dropTableHql);
+            hiveCmdBuilder.addStatement(createTableHql);
+            hiveCmdBuilder.addStatement(insertDataHqls);
+
+            step.setCmd(hiveCmdBuilder.build());
             step.setName(ExecutableConstants.STEP_NAME_CREATE_FLAT_HIVE_TABLE);
 
             return step;
@@ -164,10 +164,11 @@ public class HiveMRInput implements IMRInput {
 
             final String hiveTable = this.getIntermediateTableIdentity();
             if (config.isHiveKeepFlatTable() == false && StringUtils.isNotEmpty(hiveTable)) {
-                final String dropSQL = "USE " + context.getConfig().getHiveDatabaseForIntermediateTable() + ";" + " DROP TABLE IF EXISTS  " + hiveTable + ";";
-                final String dropHiveCMD = "hive -e \"" + dropSQL + "\"";
+                final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
+                hiveCmdBuilder.addStatement("USE " + context.getConfig().getHiveDatabaseForIntermediateTable() + ";");
+                hiveCmdBuilder.addStatement("DROP TABLE IF EXISTS  " + hiveTable + ";");
                 try {
-                    config.getCliCommandExecutor().execute(dropHiveCMD);
+                    config.getCliCommandExecutor().execute(hiveCmdBuilder.build());
                     output.append("Hive table " + hiveTable + " is dropped. \n");
 
                     Path externalDataPath = new Path(getExternalDataPath());

http://git-wip-us.apache.org/repos/asf/kylin/blob/9c77a5eb/source-hive/src/test/java/org/apache/kylin/source/hive/HiveCmdBuilderTest.java
----------------------------------------------------------------------
diff --git a/source-hive/src/test/java/org/apache/kylin/source/hive/HiveCmdBuilderTest.java b/source-hive/src/test/java/org/apache/kylin/source/hive/HiveCmdBuilderTest.java
new file mode 100644
index 0000000..70c11b3
--- /dev/null
+++ b/source-hive/src/test/java/org/apache/kylin/source/hive/HiveCmdBuilderTest.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.hive;
+
+import static org.junit.Assert.*;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.commons.io.FileUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * Created by dongli on 2/22/16.
+ */
+public class HiveCmdBuilderTest {
+
+    @Before
+    public void setup() {
+        System.setProperty("KYLIN_CONF", "../examples/test_case_data/localmeta");
+    }
+
+    @After
+    public void after() throws Exception {
+        System.clearProperty("kylin.hive.client");
+        System.clearProperty("kylin.hive.beeline.params");
+    }
+
+    @Test
+    public void testHiveCLI() {
+        System.setProperty("kylin.hive.client", "cli");
+
+        HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
+        hiveCmdBuilder.addStatement("USE default;");
+        hiveCmdBuilder.addStatement("DROP TABLE test;");
+        hiveCmdBuilder.addStatement("SHOW\n TABLES;");
+
+        assertEquals("hive -e \"USE default;\nDROP TABLE test;\nSHOW\n TABLES;\n\"", hiveCmdBuilder.build());
+    }
+
+    @Test
+    public void testBeeline() throws IOException {
+        System.setProperty("kylin.hive.client", "beeline");
+        System.setProperty("kylin.hive.beeline.params", "-u jdbc_url");
+
+        HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
+        hiveCmdBuilder.addStatement("USE default;");
+        hiveCmdBuilder.addStatement("DROP TABLE test;");
+        hiveCmdBuilder.addStatement("SHOW\n TABLES;");
+
+        String cmd = hiveCmdBuilder.build();
+        assertTrue(cmd.startsWith("beeline -u jdbc_url -f") && cmd.contains(";rm -f"));
+
+        String hqlFile = cmd.substring(cmd.lastIndexOf("-f ") + 3).trim();
+        String hqlStatement = FileUtils.readFileToString(new File(hqlFile));
+        assertEquals("USE default;\nDROP TABLE test;\nSHOW\n TABLES;\n", hqlStatement);
+
+        FileUtils.forceDelete(new File(hqlFile));
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/9c77a5eb/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
index bba16c3..735f967 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
@@ -38,6 +38,7 @@ import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.ExecutableContext;
 import org.apache.kylin.job.execution.ExecuteResult;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
+import org.apache.kylin.storage.hbase.util.HiveCmdBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -86,8 +87,9 @@ public class DeprecatedGCStep extends AbstractExecutable {
         final String hiveTable = this.getOldHiveTable();
         if (StringUtils.isNotEmpty(hiveTable)) {
             final String dropSQL = "USE " + context.getConfig().getHiveDatabaseForIntermediateTable() + ";" + " DROP TABLE IF EXISTS  " + hiveTable + ";";
-            final String dropHiveCMD = "hive -e \"" + dropSQL + "\"";
-            context.getConfig().getCliCommandExecutor().execute(dropHiveCMD);
+            final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
+            hiveCmdBuilder.addStatement(dropSQL);
+            context.getConfig().getCliCommandExecutor().execute(hiveCmdBuilder.build());
             output.append("Dropped Hive table " + hiveTable + " \n");
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/9c77a5eb/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HiveCmdBuilder.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HiveCmdBuilder.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HiveCmdBuilder.java
new file mode 100644
index 0000000..f1ca4de
--- /dev/null
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HiveCmdBuilder.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.storage.hbase.util;
+
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.kylin.common.KylinConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Lists;
+
+/**
+ * Created by dongli on 2/29/16.
+ */
+public class HiveCmdBuilder {
+    private static final Logger logger = LoggerFactory.getLogger(HiveCmdBuilder.class);
+
+    public enum HiveClientMode {
+        CLI, BEELINE
+    }
+
+    private HiveClientMode clientMode;
+    private KylinConfig kylinConfig;
+    final private ArrayList<String> statements = Lists.newArrayList();
+
+    public HiveCmdBuilder() {
+        kylinConfig = KylinConfig.getInstanceFromEnv();
+        clientMode = HiveClientMode.valueOf(kylinConfig.getHiveClientMode().toUpperCase());
+    }
+
+    public String build() {
+        StringBuffer buf = new StringBuffer();
+
+        switch (clientMode) {
+            case CLI:
+                buf.append("hive -e \"");
+                for (String statement : statements) {
+                    buf.append(statement).append("\n");
+                }
+                buf.append("\"");
+                break;
+            case BEELINE:
+                BufferedWriter bw = null;
+                try {
+                    File tmpHql = File.createTempFile("beeline_", ".hql");
+                    StringBuffer hqlBuf = new StringBuffer();
+                    bw = new BufferedWriter(new FileWriter(tmpHql));
+                    for (String statement : statements) {
+                        bw.write(statement);
+                        bw.newLine();
+
+                        hqlBuf.append(statement).append("\n");
+                    }
+                    buf.append("beeline ");
+                    buf.append(kylinConfig.getHiveBeelineParams());
+                    buf.append(" -f ");
+                    buf.append(tmpHql.getAbsolutePath());
+                    buf.append(";rm -f ");
+                    buf.append(tmpHql.getAbsolutePath());
+
+                    logger.info("The statements to execute in beeline: \n" + hqlBuf);
+                } catch (IOException e) {
+                    throw new RuntimeException(e);
+                } finally {
+                    IOUtils.closeQuietly(bw);
+                }
+                break;
+            default:
+                throw new RuntimeException("Hive client cannot be recognized: " + clientMode);
+        }
+
+        return buf.toString();
+    }
+
+    public void reset() {
+        statements.clear();
+    }
+
+    public void addStatement(String statement) {
+        statements.add(statement);
+    }
+
+    @Override
+    public String toString() {
+        return build();
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/9c77a5eb/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
index 2137f57..c010d51 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
@@ -240,13 +240,11 @@ public class StorageCleanupJob extends AbstractHadoopJob {
         final int uuidLength = 36;
         
         final String useDatabaseHql = "USE " + config.getHiveDatabaseForIntermediateTable() + ";";
-        StringBuilder buf = new StringBuilder();
-        buf.append("hive -e \"");
-        buf.append(useDatabaseHql);
-        buf.append("show tables " + "\'kylin_intermediate_*\'" + "; ");
-        buf.append("\"");
-        
-        Pair<Integer, String> result = cmdExec.execute(buf.toString());
+        final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
+        hiveCmdBuilder.addStatement(useDatabaseHql);
+        hiveCmdBuilder.addStatement("show tables " + "\'kylin_intermediate_*\'" + "; ");
+
+        Pair<Integer, String> result = cmdExec.execute(hiveCmdBuilder.build());
 
         String outputStr = result.getSecond();
         BufferedReader reader = new BufferedReader(new StringReader(outputStr));
@@ -282,17 +280,15 @@ public class StorageCleanupJob extends AbstractHadoopJob {
         }
 
         if (delete == true) {
-            buf.delete(0, buf.length());
-            buf.append("hive -e \"");
-            buf.append(useDatabaseHql);
+            hiveCmdBuilder.reset();
+            hiveCmdBuilder.addStatement(useDatabaseHql);
             for (String delHive : allHiveTablesNeedToBeDeleted) {
-                buf.append("drop table if exists " + delHive + "; ");
+                hiveCmdBuilder.addStatement("drop table if exists " + delHive + "; ");
                 logger.info("Remove " + delHive + " from hive tables.");
             }
-            buf.append("\"");
-            
+
             try {
-                cmdExec.execute(buf.toString());
+                cmdExec.execute(hiveCmdBuilder.build());
             } catch (IOException e) {
                 e.printStackTrace();
             }


[40/50] [abbrv] kylin git commit: KYLIN-1421 Cube "source record" is always zero for streaming

Posted by li...@apache.org.
KYLIN-1421 Cube "source record" is always zero for streaming


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/50aab0b0
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/50aab0b0
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/50aab0b0

Branch: refs/heads/master
Commit: 50aab0b08ea1532998db1f11084c1ae908a42e56
Parents: 9352e5a
Author: shaofengshi <sh...@apache.org>
Authored: Tue Feb 16 10:30:05 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Tue Mar 8 12:20:05 2016 +0800

----------------------------------------------------------------------
 .../apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java    | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/50aab0b0/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
index ec2ad91..b4182fe 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
@@ -117,6 +117,7 @@ public class StreamingCubeBuilder implements StreamingBatchBuilder {
         try {
             CubeSegment segment = cubeManager.appendSegments(cubeInstance, streamingBatch.getTimeRange().getFirst(), streamingBatch.getTimeRange().getSecond(), false, false);
             segment.setLastBuildJobID(segment.getUuid()); // give a fake job id
+            segment.setInputRecords(streamingBatch.getMessages().size());
             return segment;
         } catch (IOException e) {
             throw new RuntimeException("failed to create IBuildable", e);


[13/50] [abbrv] kylin git commit: KYLIN-1323 give “kylin.hbase.hfile.size.gb” a default value

Posted by li...@apache.org.
KYLIN-1323 give “kylin.hbase.hfile.size.gb” a default value


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/7df1cc74
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/7df1cc74
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/7df1cc74

Branch: refs/heads/master
Commit: 7df1cc74dcd9eda6d315cb9b7af5da9b9007bc9a
Parents: 0f48f10
Author: sunyerui <su...@gmail.com>
Authored: Mon Feb 29 12:12:15 2016 +0800
Committer: sunyerui <su...@gmail.com>
Committed: Mon Feb 29 12:12:15 2016 +0800

----------------------------------------------------------------------
 build/conf/kylin.properties                                        | 2 +-
 .../src/main/java/org/apache/kylin/common/KylinConfigBase.java     | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/7df1cc74/build/conf/kylin.properties
----------------------------------------------------------------------
diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
index b220b2d..5532339 100644
--- a/build/conf/kylin.properties
+++ b/build/conf/kylin.properties
@@ -66,7 +66,7 @@ kylin.hbase.region.cut.medium=10
 kylin.hbase.region.cut.large=50
 
 # The hfile size of GB, smaller hfile leading to the converting hfile MR has more reducers and be faster
-# set to 0 or comment this config to disable this optimization
+# set 0 to disable this optimization
 kylin.hbase.hfile.size.gb=5
 
 # Enable/disable ACL check for cube query

http://git-wip-us.apache.org/repos/asf/kylin/blob/7df1cc74/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 3430e0b..0cee9f8 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -412,7 +412,7 @@ public class KylinConfigBase implements Serializable {
     }
 
     public int getHBaseHFileSizeGB() {
-        return Integer.parseInt(getOptional("kylin.hbase.hfile.size.gb", "0"));
+        return Integer.parseInt(getOptional("kylin.hbase.hfile.size.gb", "5"));
     }
 
     public int getScanThreshold() {


[07/50] [abbrv] kylin git commit: minor, fix CI occasionally fail issue

Posted by li...@apache.org.
minor, fix CI occasionally fail issue


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/3f5074ee
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/3f5074ee
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/3f5074ee

Branch: refs/heads/master
Commit: 3f5074ee1568d5b0ba50d70d5c35319cd8223cc9
Parents: ab9d579
Author: honma <ho...@ebay.com>
Authored: Thu Feb 25 14:13:48 2016 +0800
Committer: honma <ho...@ebay.com>
Committed: Fri Feb 26 17:54:37 2016 +0800

----------------------------------------------------------------------
 .../dict/TupleFilterDictionaryTranslater.java   | 165 ------------------
 .../dict/TupleFilterFunctionTranslator.java     | 166 +++++++++++++++++++
 .../metadata/filter/FunctionTupleFilter.java    |   5 +-
 .../metadata/filter/function/BuiltInMethod.java |  33 ++--
 .../cache/AbstractCacheFledgedQuery.java        |  32 +---
 .../kylin/storage/cache/DynamicCacheTest.java   |  15 +-
 .../kylin/storage/cache/StaticCacheTest.java    |  19 ++-
 .../kylin/storage/hbase/ITStorageTest.java      |  11 +-
 .../common/coprocessor/FilterDecorator.java     |   4 +-
 .../hbase/cube/v2/CubeSegmentScanner.java       |   4 +-
 10 files changed, 226 insertions(+), 228 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/3f5074ee/core-dictionary/src/main/java/org/apache/kylin/dict/TupleFilterDictionaryTranslater.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/TupleFilterDictionaryTranslater.java b/core-dictionary/src/main/java/org/apache/kylin/dict/TupleFilterDictionaryTranslater.java
deleted file mode 100644
index 9ef360d..0000000
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/TupleFilterDictionaryTranslater.java
+++ /dev/null
@@ -1,165 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.dict;
-
-import com.google.common.primitives.Primitives;
-import org.apache.kylin.common.util.Dictionary;
-import org.apache.kylin.metadata.filter.ColumnTupleFilter;
-import org.apache.kylin.metadata.filter.CompareTupleFilter;
-import org.apache.kylin.metadata.filter.ConstantTupleFilter;
-import org.apache.kylin.metadata.filter.FunctionTupleFilter;
-import org.apache.kylin.metadata.filter.ITupleFilterTranslator;
-import org.apache.kylin.metadata.filter.LogicalTupleFilter;
-import org.apache.kylin.metadata.filter.TupleFilter;
-import org.apache.kylin.metadata.filter.TupleFilter.FilterOperatorEnum;
-import org.apache.kylin.metadata.model.TblColRef;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ListIterator;
-
-/**
- * Created by dongli on 1/7/16.
- */
-public class TupleFilterDictionaryTranslater implements ITupleFilterTranslator {
-    public static final Logger logger = LoggerFactory.getLogger(TupleFilterDictionaryTranslater.class);
-
-    private IDictionaryAware dictionaryAware;
-
-    public TupleFilterDictionaryTranslater(IDictionaryAware dictionaryAware) {
-        this.dictionaryAware = dictionaryAware;
-    }
-
-    @Override
-    public TupleFilter translate(TupleFilter tupleFilter) {
-        TupleFilter translated = null;
-        if (tupleFilter instanceof CompareTupleFilter) {
-            translated = translateCompareTupleFilter((CompareTupleFilter) tupleFilter);
-            if (translated != null) {
-                logger.info("Translated {" + tupleFilter + "} to IN clause: {" + translated + "}");
-            }
-        } else if (tupleFilter instanceof FunctionTupleFilter) {
-            translated = translateFunctionTupleFilter((FunctionTupleFilter) tupleFilter);
-            if (translated != null) {
-                logger.info("Translated {" + tupleFilter + "} to IN clause: {" + translated + "}");
-            }
-        } else if (tupleFilter instanceof LogicalTupleFilter) {
-            ListIterator<TupleFilter> childIterator = (ListIterator<TupleFilter>) tupleFilter.getChildren().listIterator();
-            while (childIterator.hasNext()) {
-                TupleFilter tempTranslated = translate(childIterator.next());
-                if (tempTranslated != null)
-                    childIterator.set(tempTranslated);
-            }
-        }
-        return translated == null ? tupleFilter : translated;
-    }
-
-    private TupleFilter translateFunctionTupleFilter(FunctionTupleFilter functionTupleFilter) {
-        if (!functionTupleFilter.isValid())
-            return null;
-
-        TblColRef columnRef = functionTupleFilter.getColumn();
-        Dictionary<?> dict = dictionaryAware.getDictionary(columnRef);
-        if (dict == null)
-            return null;
-
-        CompareTupleFilter translated = new CompareTupleFilter(FilterOperatorEnum.IN);
-        translated.addChild(new ColumnTupleFilter(columnRef));
-
-        try {
-            for (int i = dict.getMinId(); i <= dict.getMaxId(); i++) {
-                Object dictVal = dict.getValueFromId(i);
-                if ((Boolean) functionTupleFilter.invokeFunction(dictVal)) {
-                    translated.addChild(new ConstantTupleFilter(dictVal));
-                }
-            }
-        } catch (Exception e) {
-            logger.debug(e.getMessage());
-            return null;
-        }
-        return translated;
-    }
-
-    @SuppressWarnings("unchecked")
-    private TupleFilter translateCompareTupleFilter(CompareTupleFilter compTupleFilter) {
-        if (compTupleFilter.getFunction() == null)
-            return null;
-
-        FunctionTupleFilter functionTupleFilter = compTupleFilter.getFunction();
-        if (!functionTupleFilter.isValid())
-            return null;
-
-        TblColRef columnRef = functionTupleFilter.getColumn();
-        Dictionary<?> dict = dictionaryAware.getDictionary(columnRef);
-        if (dict == null)
-            return null;
-
-        CompareTupleFilter translated = new CompareTupleFilter(FilterOperatorEnum.IN);
-        translated.addChild(new ColumnTupleFilter(columnRef));
-
-        try {
-            for (int i = dict.getMinId(); i <= dict.getMaxId(); i++) {
-                Object dictVal = dict.getValueFromId(i);
-                Object computedVal = functionTupleFilter.invokeFunction(dictVal);
-                Class clazz = Primitives.wrap(computedVal.getClass());
-                Object targetVal = compTupleFilter.getFirstValue();
-                if (Primitives.isWrapperType(clazz))
-                    targetVal = clazz.cast(clazz.getDeclaredMethod("valueOf", String.class).invoke(null, compTupleFilter.getFirstValue()));
-
-                int comp = ((Comparable) computedVal).compareTo(targetVal);
-                boolean compResult = false;
-                switch (compTupleFilter.getOperator()) {
-                    case EQ:
-                        compResult = comp == 0;
-                        break;
-                    case NEQ:
-                        compResult = comp != 0;
-                        break;
-                    case LT:
-                        compResult = comp < 0;
-                        break;
-                    case LTE:
-                        compResult = comp <= 0;
-                        break;
-                    case GT:
-                        compResult = comp > 0;
-                        break;
-                    case GTE:
-                        compResult = comp >= 0;
-                        break;
-                    case IN:
-                        compResult = compTupleFilter.getValues().contains(computedVal.toString());
-                        break;
-                    case NOTIN:
-                        compResult = !compTupleFilter.getValues().contains(computedVal.toString());
-                        break;
-                    default:
-                        break;
-                }
-                if (compResult) {
-                    translated.addChild(new ConstantTupleFilter(dictVal));
-                }
-            }
-        } catch (Exception e) {
-            logger.debug(e.getMessage());
-            return null;
-        }
-        return translated;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/3f5074ee/core-dictionary/src/main/java/org/apache/kylin/dict/TupleFilterFunctionTranslator.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/TupleFilterFunctionTranslator.java b/core-dictionary/src/main/java/org/apache/kylin/dict/TupleFilterFunctionTranslator.java
new file mode 100644
index 0000000..1c96dd4
--- /dev/null
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/TupleFilterFunctionTranslator.java
@@ -0,0 +1,166 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.dict;
+
+import java.util.ListIterator;
+
+import org.apache.kylin.common.util.Dictionary;
+import org.apache.kylin.metadata.filter.ColumnTupleFilter;
+import org.apache.kylin.metadata.filter.CompareTupleFilter;
+import org.apache.kylin.metadata.filter.ConstantTupleFilter;
+import org.apache.kylin.metadata.filter.FunctionTupleFilter;
+import org.apache.kylin.metadata.filter.ITupleFilterTranslator;
+import org.apache.kylin.metadata.filter.LogicalTupleFilter;
+import org.apache.kylin.metadata.filter.TupleFilter;
+import org.apache.kylin.metadata.filter.TupleFilter.FilterOperatorEnum;
+import org.apache.kylin.metadata.model.TblColRef;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.primitives.Primitives;
+
+/**
+ * only take effect when the compare filter has function
+ */
+public class TupleFilterFunctionTranslator implements ITupleFilterTranslator {
+    public static final Logger logger = LoggerFactory.getLogger(TupleFilterFunctionTranslator.class);
+
+    private IDictionaryAware dictionaryAware;
+
+    public TupleFilterFunctionTranslator(IDictionaryAware dictionaryAware) {
+        this.dictionaryAware = dictionaryAware;
+    }
+
+    @Override
+    public TupleFilter translate(TupleFilter tupleFilter) {
+        TupleFilter translated = null;
+        if (tupleFilter instanceof CompareTupleFilter) {
+            translated = translateCompareTupleFilter((CompareTupleFilter) tupleFilter);
+            if (translated != null) {
+                logger.info("Translated {" + tupleFilter + "} to IN clause: {" + translated + "}");
+            }
+        } else if (tupleFilter instanceof FunctionTupleFilter) {
+            translated = translateFunctionTupleFilter((FunctionTupleFilter) tupleFilter);
+            if (translated != null) {
+                logger.info("Translated {" + tupleFilter + "} to IN clause: {" + translated + "}");
+            }
+        } else if (tupleFilter instanceof LogicalTupleFilter) {
+            ListIterator<TupleFilter> childIterator = (ListIterator<TupleFilter>) tupleFilter.getChildren().listIterator();
+            while (childIterator.hasNext()) {
+                TupleFilter tempTranslated = translate(childIterator.next());
+                if (tempTranslated != null)
+                    childIterator.set(tempTranslated);
+            }
+        }
+        return translated == null ? tupleFilter : translated;
+    }
+
+    private TupleFilter translateFunctionTupleFilter(FunctionTupleFilter functionTupleFilter) {
+        if (!functionTupleFilter.isValid())
+            return null;
+
+        TblColRef columnRef = functionTupleFilter.getColumn();
+        Dictionary<?> dict = dictionaryAware.getDictionary(columnRef);
+        if (dict == null)
+            return null;
+
+        CompareTupleFilter translated = new CompareTupleFilter(FilterOperatorEnum.IN);
+        translated.addChild(new ColumnTupleFilter(columnRef));
+
+        try {
+            for (int i = dict.getMinId(); i <= dict.getMaxId(); i++) {
+                Object dictVal = dict.getValueFromId(i);
+                if ((Boolean) functionTupleFilter.invokeFunction(dictVal)) {
+                    translated.addChild(new ConstantTupleFilter(dictVal));
+                }
+            }
+        } catch (Exception e) {
+            logger.debug(e.getMessage());
+            return null;
+        }
+        return translated;
+    }
+
+    @SuppressWarnings("unchecked")
+    private TupleFilter translateCompareTupleFilter(CompareTupleFilter compTupleFilter) {
+        if (compTupleFilter.getFunction() == null)
+            return null;
+
+        FunctionTupleFilter functionTupleFilter = compTupleFilter.getFunction();
+        if (!functionTupleFilter.isValid())
+            return null;
+
+        TblColRef columnRef = functionTupleFilter.getColumn();
+        Dictionary<?> dict = dictionaryAware.getDictionary(columnRef);
+        if (dict == null)
+            return null;
+
+        CompareTupleFilter translated = new CompareTupleFilter(FilterOperatorEnum.IN);
+        translated.addChild(new ColumnTupleFilter(columnRef));
+
+        try {
+            for (int i = dict.getMinId(); i <= dict.getMaxId(); i++) {
+                Object dictVal = dict.getValueFromId(i);
+                Object computedVal = functionTupleFilter.invokeFunction(dictVal);
+                Class clazz = Primitives.wrap(computedVal.getClass());
+                Object targetVal = compTupleFilter.getFirstValue();
+                if (Primitives.isWrapperType(clazz))
+                    targetVal = clazz.cast(clazz.getDeclaredMethod("valueOf", String.class).invoke(null, compTupleFilter.getFirstValue()));
+
+                int comp = ((Comparable) computedVal).compareTo(targetVal);
+                boolean compResult = false;
+                switch (compTupleFilter.getOperator()) {
+                case EQ:
+                    compResult = comp == 0;
+                    break;
+                case NEQ:
+                    compResult = comp != 0;
+                    break;
+                case LT:
+                    compResult = comp < 0;
+                    break;
+                case LTE:
+                    compResult = comp <= 0;
+                    break;
+                case GT:
+                    compResult = comp > 0;
+                    break;
+                case GTE:
+                    compResult = comp >= 0;
+                    break;
+                case IN:
+                    compResult = compTupleFilter.getValues().contains(computedVal.toString());
+                    break;
+                case NOTIN:
+                    compResult = !compTupleFilter.getValues().contains(computedVal.toString());
+                    break;
+                default:
+                    break;
+                }
+                if (compResult) {
+                    translated.addChild(new ConstantTupleFilter(dictVal));
+                }
+            }
+        } catch (Exception e) {
+            logger.debug(e.getMessage());
+            return null;
+        }
+        return translated;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/3f5074ee/core-metadata/src/main/java/org/apache/kylin/metadata/filter/FunctionTupleFilter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/FunctionTupleFilter.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/FunctionTupleFilter.java
index 15fcb72..30bef97 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/FunctionTupleFilter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/FunctionTupleFilter.java
@@ -35,9 +35,6 @@ import org.slf4j.LoggerFactory;
 import com.google.common.collect.Lists;
 import com.google.common.primitives.Primitives;
 
-/**
- * Created by dongli on 11/11/15.
- */
 public class FunctionTupleFilter extends TupleFilter {
     public static final Logger logger = LoggerFactory.getLogger(FunctionTupleFilter.class);
 
@@ -79,7 +76,7 @@ public class FunctionTupleFilter extends TupleFilter {
         if (columnContainerFilter instanceof ColumnTupleFilter)
             methodParams.set(colPosition, (Serializable) input);
         else if (columnContainerFilter instanceof FunctionTupleFilter)
-            methodParams.set(colPosition, (Serializable) ((FunctionTupleFilter) columnContainerFilter).invokeFunction((Serializable) input));
+            methodParams.set(colPosition, (Serializable) ((FunctionTupleFilter) columnContainerFilter).invokeFunction(input));
         return method.invoke(null, (Object[]) (methodParams.toArray()));
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/3f5074ee/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/BuiltInMethod.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/BuiltInMethod.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/BuiltInMethod.java
index b927d8d..7b241cc 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/BuiltInMethod.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/BuiltInMethod.java
@@ -18,29 +18,21 @@
 
 package org.apache.kylin.metadata.filter.function;
 
-import com.google.common.collect.ImmutableMap;
-import org.apache.commons.lang3.reflect.MethodUtils;
-
 import java.lang.reflect.Method;
 import java.util.regex.Pattern;
 
-/**
- * Created by dongli on 11/13/15.
- */
+import org.apache.commons.lang3.reflect.MethodUtils;
+
+import com.google.common.collect.ImmutableMap;
+
 public enum BuiltInMethod {
-    UPPER(BuiltInMethod.class, "upper", String.class),
-    LOWER(BuiltInMethod.class, "lower", String.class),
-    SUBSTRING(BuiltInMethod.class, "substring", String.class, int.class, int.class),
-    CHAR_LENGTH(BuiltInMethod.class, "charLength", String.class),
-    LIKE(BuiltInMethod.class, "like", String.class, String.class),
-    INITCAP(BuiltInMethod.class, "initcap", String.class);
+    UPPER(BuiltInMethod.class, "upper", String.class), LOWER(BuiltInMethod.class, "lower", String.class), SUBSTRING(BuiltInMethod.class, "substring", String.class, int.class, int.class), CHAR_LENGTH(BuiltInMethod.class, "charLength", String.class), LIKE(BuiltInMethod.class, "like", String.class, String.class), INITCAP(BuiltInMethod.class, "initcap", String.class);
     public final Method method;
 
     public static final ImmutableMap<String, BuiltInMethod> MAP;
 
     static {
-        final ImmutableMap.Builder<String, BuiltInMethod> builder =
-                ImmutableMap.builder();
+        final ImmutableMap.Builder<String, BuiltInMethod> builder = ImmutableMap.builder();
         for (BuiltInMethod value : BuiltInMethod.values()) {
             if (value.method != null) {
                 builder.put(value.name(), value);
@@ -70,22 +62,22 @@ public enum BuiltInMethod {
         for (int i = 0; i < len; i++) {
             char curCh = s.charAt(i);
             final int c = (int) curCh;
-            if (start) {  // curCh is whitespace or first character of word.
+            if (start) { // curCh is whitespace or first character of word.
                 if (c > 47 && c < 58) { // 0-9
                     start = false;
-                } else if (c > 64 && c < 91) {  // A-Z
+                } else if (c > 64 && c < 91) { // A-Z
                     start = false;
-                } else if (c > 96 && c < 123) {  // a-z
+                } else if (c > 96 && c < 123) { // a-z
                     start = false;
                     curCh = (char) (c - 32); // Uppercase this character
                 }
                 // else {} whitespace
-            } else {  // Inside of a word or white space after end of word.
+            } else { // Inside of a word or white space after end of word.
                 if (c > 47 && c < 58) { // 0-9
                     // noop
-                } else if (c > 64 && c < 91) {  // A-Z
+                } else if (c > 64 && c < 91) { // A-Z
                     curCh = (char) (c + 32); // Lowercase this character
-                } else if (c > 96 && c < 123) {  // a-z
+                } else if (c > 96 && c < 123) { // a-z
                     // noop
                 } else { // whitespace
                     start = true;
@@ -116,5 +108,4 @@ public enum BuiltInMethod {
         return s.toLowerCase();
     }
 
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/3f5074ee/core-storage/src/main/java/org/apache/kylin/storage/cache/AbstractCacheFledgedQuery.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/cache/AbstractCacheFledgedQuery.java b/core-storage/src/main/java/org/apache/kylin/storage/cache/AbstractCacheFledgedQuery.java
index 6ba76c4..a5ca800 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/cache/AbstractCacheFledgedQuery.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/cache/AbstractCacheFledgedQuery.java
@@ -5,9 +5,6 @@ import net.sf.ehcache.CacheManager;
 import net.sf.ehcache.Element;
 import net.sf.ehcache.Status;
 import net.sf.ehcache.config.CacheConfiguration;
-import net.sf.ehcache.config.Configuration;
-import net.sf.ehcache.config.PersistenceConfiguration;
-import net.sf.ehcache.store.MemoryStoreEvictionPolicy;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.metadata.realization.StreamSQLDigest;
@@ -21,6 +18,7 @@ import org.slf4j.LoggerFactory;
  */
 public abstract class AbstractCacheFledgedQuery implements IStorageQuery, TeeTupleItrListener {
     private static final Logger logger = LoggerFactory.getLogger(AbstractCacheFledgedQuery.class);
+
     private static final String storageCacheTemplate = "StorageCache";
 
     protected static CacheManager CACHE_MANAGER;
@@ -37,31 +35,6 @@ public abstract class AbstractCacheFledgedQuery implements IStorageQuery, TeeTup
         CACHE_MANAGER = cacheManager;
     }
 
-    /**
-     * This method is only useful non-spring injected test cases.
-     * When Kylin is normally ran as a spring app CACHE_MANAGER will be injected.
-     * and the configuration for cache lies in server/src/main/resources/ehcache.xml
-     * 
-     * the cache named "StorageCache" acts like a template for each realization to
-     * create its own cache.
-     */
-    private static void initCacheManger() {
-        Configuration conf = new Configuration();
-        conf.setMaxBytesLocalHeap("128M");
-        CACHE_MANAGER = CacheManager.create(conf);
-
-        //a fake template for test cases
-        Cache storageCache = new Cache(new CacheConfiguration(storageCacheTemplate, 0).//
-                memoryStoreEvictionPolicy(MemoryStoreEvictionPolicy.LRU).//
-                eternal(false).//
-                timeToIdleSeconds(86400).//
-                diskExpiryThreadIntervalSeconds(0).//
-                //maxBytesLocalHeap(10, MemoryUnit.MEGABYTES).//
-                persistence(new PersistenceConfiguration().strategy(PersistenceConfiguration.Strategy.NONE)));
-
-        CACHE_MANAGER.addCacheIfAbsent(storageCache);
-    }
-
     protected StreamSQLResult getStreamSQLResult(StreamSQLDigest streamSQLDigest) {
 
         Cache cache = CACHE_MANAGER.getCache(this.underlyingStorage.getStorageUUID());
@@ -87,8 +60,7 @@ public abstract class AbstractCacheFledgedQuery implements IStorageQuery, TeeTup
 
     private void makeCacheIfNecessary(String storageUUID) {
         if (CACHE_MANAGER == null || (!(CACHE_MANAGER.getStatus().equals(Status.STATUS_ALIVE)))) {
-            logger.warn("CACHE_MANAGER is not provided or not alive");
-            initCacheManger();
+            throw new RuntimeException("CACHE_MANAGER is not provided or not alive");
         }
 
         if (CACHE_MANAGER.getCache(storageUUID) == null) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/3f5074ee/core-storage/src/test/java/org/apache/kylin/storage/cache/DynamicCacheTest.java
----------------------------------------------------------------------
diff --git a/core-storage/src/test/java/org/apache/kylin/storage/cache/DynamicCacheTest.java b/core-storage/src/test/java/org/apache/kylin/storage/cache/DynamicCacheTest.java
index 53e5f5b..3193bbb 100644
--- a/core-storage/src/test/java/org/apache/kylin/storage/cache/DynamicCacheTest.java
+++ b/core-storage/src/test/java/org/apache/kylin/storage/cache/DynamicCacheTest.java
@@ -5,6 +5,7 @@ import java.util.IdentityHashMap;
 import java.util.List;
 import java.util.concurrent.atomic.AtomicInteger;
 
+import net.sf.ehcache.CacheManager;
 import org.apache.commons.lang.NotImplementedException;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.DateFormat;
@@ -20,6 +21,7 @@ import org.apache.kylin.metadata.tuple.Tuple;
 import org.apache.kylin.metadata.tuple.TupleInfo;
 import org.apache.kylin.storage.ICachableStorageQuery;
 import org.apache.kylin.storage.StorageContext;
+import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -32,10 +34,21 @@ import com.google.common.collect.Ranges;
  */
 public class DynamicCacheTest {
 
+    private static CacheManager cacheManager;
+    
     @BeforeClass
-    public static void setup() {
+    public static void setupResource() throws Exception {
         System.setProperty(KylinConfig.KYLIN_CONF, "../examples/test_case_data/sandbox");
         KylinConfig.getInstanceFromEnv().setProperty("kylin.query.cache.threshold.duration", "0");
+        
+        cacheManager = CacheManager.newInstance("../server/src/main/resources/ehcache-test.xml");
+        AbstractCacheFledgedQuery.setCacheManager(cacheManager);
+    }
+
+    @AfterClass
+    public static void tearDownResource() {
+        cacheManager.shutdown();
+        AbstractCacheFledgedQuery.setCacheManager(null);
     }
 
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/3f5074ee/core-storage/src/test/java/org/apache/kylin/storage/cache/StaticCacheTest.java
----------------------------------------------------------------------
diff --git a/core-storage/src/test/java/org/apache/kylin/storage/cache/StaticCacheTest.java b/core-storage/src/test/java/org/apache/kylin/storage/cache/StaticCacheTest.java
index 182091b..b1665df 100644
--- a/core-storage/src/test/java/org/apache/kylin/storage/cache/StaticCacheTest.java
+++ b/core-storage/src/test/java/org/apache/kylin/storage/cache/StaticCacheTest.java
@@ -6,6 +6,7 @@ import java.util.IdentityHashMap;
 import java.util.List;
 import java.util.concurrent.atomic.AtomicInteger;
 
+import net.sf.ehcache.CacheManager;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.IdentityUtils;
 import org.apache.kylin.metadata.filter.TupleFilter;
@@ -20,6 +21,7 @@ import org.apache.kylin.metadata.tuple.Tuple;
 import org.apache.kylin.metadata.tuple.TupleInfo;
 import org.apache.kylin.storage.ICachableStorageQuery;
 import org.apache.kylin.storage.StorageContext;
+import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -30,12 +32,25 @@ import com.google.common.collect.Range;
 /**
  */
 public class StaticCacheTest {
+
+    private static CacheManager cacheManager;
+
     @BeforeClass
-    public static void setup() {
+    public static void setupResource() throws Exception {
+
         System.setProperty(KylinConfig.KYLIN_CONF, "../examples/test_case_data/sandbox");
-        
+
         KylinConfig config = KylinConfig.getInstanceFromEnv();
         config.setProperty("kylin.query.cache.threshold.duration", "0");
+        
+        cacheManager = CacheManager.newInstance("../server/src/main/resources/ehcache-test.xml");
+        AbstractCacheFledgedQuery.setCacheManager(cacheManager);
+    }
+
+    @AfterClass
+    public static void tearDownResource() {
+        cacheManager.shutdown();
+        AbstractCacheFledgedQuery.setCacheManager(null);
     }
 
     @Test

http://git-wip-us.apache.org/repos/asf/kylin/blob/3f5074ee/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITStorageTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITStorageTest.java b/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITStorageTest.java
index d6443e7..c253770 100644
--- a/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITStorageTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITStorageTest.java
@@ -18,12 +18,14 @@
 
 package org.apache.kylin.storage.hbase;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertTrue;
 
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 
+import net.sf.ehcache.CacheManager;
+
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
 import org.apache.kylin.cube.CubeInstance;
@@ -38,6 +40,7 @@ import org.apache.kylin.metadata.tuple.ITupleIterator;
 import org.apache.kylin.storage.IStorageQuery;
 import org.apache.kylin.storage.StorageContext;
 import org.apache.kylin.storage.StorageFactory;
+import org.apache.kylin.storage.cache.AbstractCacheFledgedQuery;
 import org.apache.kylin.storage.cache.StorageMockUtils;
 import org.apache.kylin.storage.exception.ScanOutOfLimitException;
 import org.junit.After;
@@ -54,12 +57,18 @@ public class ITStorageTest extends HBaseMetadataTestCase {
     private CubeInstance cube;
     private StorageContext context;
 
+    private static CacheManager cacheManager;
+
     @BeforeClass
     public static void setupResource() throws Exception {
+        cacheManager = CacheManager.newInstance("../server/src/main/resources/ehcache-test.xml");
+        AbstractCacheFledgedQuery.setCacheManager(cacheManager);
     }
 
     @AfterClass
     public static void tearDownResource() {
+        cacheManager.shutdown();
+        AbstractCacheFledgedQuery.setCacheManager(null);
     }
 
     @Before

http://git-wip-us.apache.org/repos/asf/kylin/blob/3f5074ee/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterDecorator.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterDecorator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterDecorator.java
index 294f399..01d3041 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterDecorator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterDecorator.java
@@ -8,7 +8,7 @@ import org.apache.kylin.common.util.Dictionary;
 import org.apache.kylin.cube.kv.RowKeyColumnIO;
 import org.apache.kylin.dict.DictCodeSystem;
 import org.apache.kylin.dict.IDictionaryAware;
-import org.apache.kylin.dict.TupleFilterDictionaryTranslater;
+import org.apache.kylin.dict.TupleFilterFunctionTranslator;
 import org.apache.kylin.metadata.filter.ColumnTupleFilter;
 import org.apache.kylin.metadata.filter.CompareTupleFilter;
 import org.apache.kylin.metadata.filter.ConstantTupleFilter;
@@ -131,7 +131,7 @@ public class FilterDecorator implements TupleFilterSerializer.Decorator {
         if (filter == null)
             return null;
 
-        ITupleFilterTranslator translator = new TupleFilterDictionaryTranslater(columnIO.getIDictionaryAware());
+        ITupleFilterTranslator translator = new TupleFilterFunctionTranslator(columnIO.getIDictionaryAware());
         filter = translator.translate(filter);
 
         // un-evaluatable filter is replaced with TRUE

http://git-wip-us.apache.org/repos/asf/kylin/blob/3f5074ee/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeSegmentScanner.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeSegmentScanner.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeSegmentScanner.java
index abfb74d..e96c602 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeSegmentScanner.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeSegmentScanner.java
@@ -23,7 +23,7 @@ import org.apache.kylin.cube.gridtable.CubeGridTable;
 import org.apache.kylin.cube.gridtable.CuboidToGridTableMapping;
 import org.apache.kylin.cube.gridtable.NotEnoughGTInfoException;
 import org.apache.kylin.cube.model.CubeDesc;
-import org.apache.kylin.dict.TupleFilterDictionaryTranslater;
+import org.apache.kylin.dict.TupleFilterFunctionTranslator;
 import org.apache.kylin.gridtable.EmptyGTScanner;
 import org.apache.kylin.gridtable.GTInfo;
 import org.apache.kylin.gridtable.GTRecord;
@@ -64,7 +64,7 @@ public class CubeSegmentScanner implements IGTScanner {
         CuboidToGridTableMapping mapping = cuboid.getCuboidToGridTableMapping();
 
         // translate FunctionTupleFilter to IN clause
-        ITupleFilterTranslator translator = new TupleFilterDictionaryTranslater(this.cubeSeg);
+        ITupleFilterTranslator translator = new TupleFilterFunctionTranslator(this.cubeSeg);
         filter = translator.translate(filter);
 
         //replace the constant values in filter to dictionary codes 


[25/50] [abbrv] kylin git commit: KYLIN-1465 log improvement on query comparision test

Posted by li...@apache.org.
KYLIN-1465 log improvement on query comparision test


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/c4d94f79
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/c4d94f79
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/c4d94f79

Branch: refs/heads/master
Commit: c4d94f793f22e1c86f3f23d5f83a790b24e17966
Parents: 61f3278
Author: Hongbin Ma <ma...@apache.org>
Authored: Thu Mar 3 10:31:48 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Thu Mar 3 11:09:26 2016 +0800

----------------------------------------------------------------------
 .../apache/kylin/query/ITCombinationTest.java   |  5 +-
 .../apache/kylin/query/ITKylinQueryTest.java    |  6 +-
 .../org/apache/kylin/query/KylinTestBase.java   | 69 ++++++++++----------
 3 files changed, 39 insertions(+), 41 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/c4d94f79/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java
index f4bbe5f..1845587 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java
@@ -36,11 +36,12 @@ public class ITCombinationTest extends ITKylinQueryTest {
 
     @BeforeClass
     public static void setUp() throws SQLException {
-        System.out.println("setUp in ITCombinationTest");
+        printInfo("setUp in ITCombinationTest");
     }
 
     @AfterClass
     public static void tearDown() {
+        printInfo("tearDown in ITCombinationTest");
         clean();
         HBaseStorage.overwriteStorageQuery = null;
     }
@@ -58,6 +59,8 @@ public class ITCombinationTest extends ITKylinQueryTest {
 
     public ITCombinationTest(String joinType, String coprocessorToggle, String queryEngine) throws Exception {
 
+        printInfo("Into combination join type: " + joinType + ", coprocessor toggle: " + coprocessorToggle + ", query engine: " + queryEngine);
+
         ITKylinQueryTest.clean();
 
         ITKylinQueryTest.joinType = joinType;

http://git-wip-us.apache.org/repos/asf/kylin/blob/c4d94f79/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
index fd88452..54abd4d 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
@@ -46,10 +46,9 @@ import org.junit.Test;
 public class ITKylinQueryTest extends KylinTestBase {
     private static CacheManager cacheManager;
 
-
     @BeforeClass
     public static void setUp() throws Exception {
-        printInfo("set up in KylinQueryTest");
+        printInfo("setUp in KylinQueryTest");
         joinType = "left";
 
         setupAll();
@@ -57,8 +56,7 @@ public class ITKylinQueryTest extends KylinTestBase {
 
     @AfterClass
     public static void tearDown() throws Exception {
-        printInfo("tearDown");
-        printInfo("Closing connection...");
+        printInfo("tearDown in KylinQueryTest");
         clean();
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/c4d94f79/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
index b5c6d10..c4a94ed 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
@@ -36,19 +36,24 @@ import java.sql.Statement;
 import java.sql.Timestamp;
 import java.sql.Types;
 import java.util.ArrayList;
+import java.util.Comparator;
 import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Set;
+import java.util.TreeSet;
 import java.util.logging.LogManager;
 
+import com.google.common.base.Throwables;
 import org.apache.commons.lang3.StringUtils;
+import org.apache.commons.lang3.exception.ExceptionContext;
 import org.apache.kylin.common.KylinConfig;
 import org.dbunit.Assertion;
 import org.dbunit.database.DatabaseConfig;
 import org.dbunit.database.DatabaseConnection;
 import org.dbunit.database.IDatabaseConnection;
 import org.dbunit.dataset.DataSetException;
+import org.dbunit.dataset.DefaultTable;
 import org.dbunit.dataset.ITable;
 import org.dbunit.dataset.SortedTable;
 import org.dbunit.dataset.datatype.DataType;
@@ -58,6 +63,8 @@ import org.dbunit.ext.h2.H2DataTypeFactory;
 import org.junit.Assert;
 
 import com.google.common.io.Files;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  */
@@ -96,39 +103,26 @@ public class KylinTestBase {
         }
     }
 
+    private static class FileByNameComparator implements Comparator<File> {
+        @Override
+        public int compare(File o1, File o2) {
+            return String.CASE_INSENSITIVE_ORDER.compare(o1.getName(), o2.getName());
+        }
+    }
+
     /**
      * @param folder
-     * @param fileType
-     *            specify the interested file type by file extension
+     * @param fileType specify the interested file type by file extension
      * @return
      */
     protected static List<File> getFilesFromFolder(final File folder, final String fileType) {
-        List<File> files = new ArrayList<File>();
+        Set<File> set = new TreeSet<>(new FileByNameComparator());
         for (final File fileEntry : folder.listFiles()) {
             if (fileEntry.getName().toLowerCase().endsWith(fileType.toLowerCase())) {
-                files.add(fileEntry);
+                set.add(fileEntry);
             }
         }
-        return files;
-    }
-
-    protected static void getFilesFromFolderR(final String directoryStr, List<File> files, final String fileType) {
-        File folder = new File(directoryStr);
-        for (final File fileEntry : folder.listFiles()) {
-            if (fileEntry.isDirectory()) {
-                getFilesFromFolderR(fileEntry.getAbsolutePath(), files, fileType);
-            } else if (fileEntry.isFile()) {
-                if (fileEntry.getName().toLowerCase().endsWith(fileType.toLowerCase())) {
-                    files.add(fileEntry);
-                }
-            }
-        }
-    }
-
-    protected static void putTextTofile(File file, String sql) throws IOException {
-        BufferedWriter writer = new BufferedWriter(new FileWriter(file));
-        writer.write(sql, 0, sql.length());
-        writer.close();
+        return new ArrayList<>(set);
     }
 
     protected static String getTextFromFile(File file) throws IOException {
@@ -293,14 +287,6 @@ public class KylinTestBase {
         return ret;
     }
 
-    protected static void batchChangeJoinType(String targetType) throws IOException {
-        List<File> files = new LinkedList<File>();
-        getFilesFromFolderR("src/test/resources/query", files, ".sql");
-        for (File file : files) {
-            String x = changeJoinType(getTextFromFile(file), targetType);
-            putTextTofile(file, x);
-        }
-    }
 
     protected void execQueryUsingH2(String queryFolder, boolean needSort) throws Exception {
         printInfo("---------- Running H2 queries: " + queryFolder);
@@ -363,8 +349,14 @@ public class KylinTestBase {
             h2Conn.getConfig().setProperty(DatabaseConfig.PROPERTY_DATATYPE_FACTORY, new TestH2DataTypeFactory());
             ITable h2Table = executeQuery(h2Conn, queryName, sql, needSort);
 
-            // compare the result
-            Assert.assertEquals(h2Table.getRowCount(), kylinTable.getRowCount());
+
+            try {
+                // compare the result
+                Assert.assertEquals(h2Table.getRowCount(), kylinTable.getRowCount());
+            } catch (Throwable t) {
+                printInfo("execAndCompResultSize failed on: " + sqlFile.getAbsolutePath());
+                throw t;
+            }
 
             compQueryCount++;
             if (kylinTable.getRowCount() == 0) {
@@ -396,8 +388,13 @@ public class KylinTestBase {
             h2Conn.getConfig().setProperty(DatabaseConfig.PROPERTY_DATATYPE_FACTORY, new TestH2DataTypeFactory());
             ITable h2Table = executeQuery(h2Conn, queryName, sql, needSort);
 
-            // compare the result
-            Assertion.assertEquals(h2Table, kylinTable);
+            try {
+                // compare the result
+                Assertion.assertEquals(h2Table, kylinTable);
+            } catch (Throwable t) {
+                printInfo("execAndCompQuery failed on: " + sqlFile.getAbsolutePath());
+                throw t;
+            }
 
             compQueryCount++;
             if (kylinTable.getRowCount() == 0) {


[08/50] [abbrv] kylin git commit: KYLIN-1366 simply metadata version binding

Posted by li...@apache.org.
KYLIN-1366 simply metadata version binding

kylin 1366


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/ab9d5791
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/ab9d5791
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/ab9d5791

Branch: refs/heads/master
Commit: ab9d57914a119b5b426d2bfae712ca55ff35c92a
Parents: 4c08ded
Author: honma <ho...@ebay.com>
Authored: Wed Feb 24 15:58:07 2016 +0800
Committer: honma <ho...@ebay.com>
Committed: Fri Feb 26 17:54:37 2016 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/common/KylinConfigBase.java   |  8 --------
 .../java/org/apache/kylin/common/KylinVersion.java |  2 +-
 .../common/persistence/RootPersistentEntity.java   | 17 +++++------------
 .../java/org/apache/kylin/cube/CubeInstance.java   |  5 ++---
 .../java/org/apache/kylin/cube/model/CubeDesc.java |  2 +-
 .../java/org/apache/kylin/dict/DictionaryInfo.java |  4 ++--
 .../apache/kylin/dict/lookup/SnapshotManager.java  |  2 +-
 .../apache/kylin/metadata/model/DataModelDesc.java |  2 +-
 .../kylin/metadata/project/ProjectInstance.java    |  2 +-
 .../kylin/metadata/project/ProjectManager.java     |  2 +-
 .../kylin/storage/hybrid/HybridInstance.java       |  2 +-
 .../org/apache/kylin/invertedindex/IIInstance.java |  2 +-
 .../kylin/rest/controller/CubeController.java      |  4 ++--
 .../storage/hbase/util/ExtendCubeToHybridCLI.java  |  4 ++--
 14 files changed, 21 insertions(+), 37 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/ab9d5791/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 5f9983a..7707684 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -61,14 +61,6 @@ public class KylinConfigBase implements Serializable {
         return kylinHome;
     }
 
-    /**
-     * @see KylinVersion
-     * @return current kylin version
-     */
-    public static String getKylinVersion(){
-        return KylinVersion.getCurrentVersion();
-    }
-
     // ============================================================================
 
     private volatile Properties properties = new Properties();

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab9d5791/core-common/src/main/java/org/apache/kylin/common/KylinVersion.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinVersion.java b/core-common/src/main/java/org/apache/kylin/common/KylinVersion.java
index d711b38..42cf237 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinVersion.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinVersion.java
@@ -21,7 +21,7 @@ package org.apache.kylin.common;
  *
  * @since 2.1
  */
-class KylinVersion {
+public class KylinVersion {
     /**
      * Require MANUAL updating kylin version per ANY upgrading.
      */

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab9d5791/core-common/src/main/java/org/apache/kylin/common/persistence/RootPersistentEntity.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/persistence/RootPersistentEntity.java b/core-common/src/main/java/org/apache/kylin/common/persistence/RootPersistentEntity.java
index 327ddcc..c46abe7 100644
--- a/core-common/src/main/java/org/apache/kylin/common/persistence/RootPersistentEntity.java
+++ b/core-common/src/main/java/org/apache/kylin/common/persistence/RootPersistentEntity.java
@@ -26,11 +26,11 @@ import java.util.Date;
 import java.util.UUID;
 
 import org.apache.commons.lang.time.FastDateFormat;
+import org.apache.kylin.common.KylinVersion;
 
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
 import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
 import com.fasterxml.jackson.annotation.JsonProperty;
-import org.apache.kylin.common.KylinConfig;
 
 /**
  * Marks the root entity of JSON persistence. Unit of read, write, cache, and
@@ -80,14 +80,14 @@ abstract public class RootPersistentEntity implements AclEntity, Serializable {
      * For example: 2.1
      */
     @JsonProperty("version")
-    protected String version;
+    protected String version = KylinVersion.getCurrentVersion();
 
     public String getVersion() {
-      return version;
+        return version;
     }
 
     public void setVersion(String version) {
-      this.version = version;
+        this.version = version;
     }
 
     public String getUuid() {
@@ -110,14 +110,7 @@ abstract public class RootPersistentEntity implements AclEntity, Serializable {
         this.lastModified = lastModified;
     }
 
-    /**
-     * Update entity's "model_version" with current kylin version and "uuid" with random UUID
-     *
-     * @see KylinConfig#getKylinVersion()
-     * @see UUID#randomUUID()
-     */
-    public void updateVersionAndRandomUuid() {
-        setVersion(KylinConfig.getKylinVersion());
+    public void updateRandomUuid() {
         setUuid(UUID.randomUUID().toString());
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab9d5791/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
index c26e2d2..2862d4f 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
@@ -63,7 +63,7 @@ public class CubeInstance extends RootPersistentEntity implements IRealization,
         cubeInstance.setCreateTimeUTC(System.currentTimeMillis());
         cubeInstance.setSegments(new ArrayList<CubeSegment>());
         cubeInstance.setStatus(RealizationStatusEnum.DISABLED);
-        cubeInstance.updateVersionAndRandomUuid();
+        cubeInstance.updateRandomUuid();
 
         return cubeInstance;
     }
@@ -427,10 +427,9 @@ public class CubeInstance extends RootPersistentEntity implements IRealization,
         newCube.setConfig(cubeInstance.getConfig());
         newCube.setStatus(cubeInstance.getStatus());
         newCube.setOwner(cubeInstance.getOwner());
-        newCube.setVersion(cubeInstance.getVersion());
         newCube.setCost(cubeInstance.getCost());
         newCube.setCreateTimeUTC(System.currentTimeMillis());
-        newCube.updateVersionAndRandomUuid();
+        newCube.updateRandomUuid();
         return newCube;
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab9d5791/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
index de73399..49f70f8 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
@@ -998,7 +998,7 @@ public class CubeDesc extends RootPersistentEntity {
         newCubeDesc.setStorageType(cubeDesc.getStorageType());
         newCubeDesc.setAggregationGroups(cubeDesc.getAggregationGroups());
         newCubeDesc.setConfig(cubeDesc.getConfig());
-        newCubeDesc.updateVersionAndRandomUuid();
+        newCubeDesc.updateRandomUuid();
         return newCubeDesc;
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab9d5791/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryInfo.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryInfo.java b/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryInfo.java
index 8e41abf..4fba59a 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryInfo.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryInfo.java
@@ -53,7 +53,7 @@ public class DictionaryInfo extends RootPersistentEntity {
 
     public DictionaryInfo(String sourceTable, String sourceColumn, int sourceColumnIndex, String dataType, TableSignature input) {
 
-        this.updateVersionAndRandomUuid();
+        this.updateRandomUuid();
 
         this.sourceTable = sourceTable;
         this.sourceColumn = sourceColumn;
@@ -64,7 +64,7 @@ public class DictionaryInfo extends RootPersistentEntity {
 
     public DictionaryInfo(DictionaryInfo other) {
 
-        this.updateVersionAndRandomUuid();
+        this.updateRandomUuid();
 
         this.sourceTable = other.sourceTable;
         this.sourceColumn = other.sourceColumn;

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab9d5791/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotManager.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotManager.java b/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotManager.java
index ccdc79d..53bf60d 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotManager.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotManager.java
@@ -87,7 +87,7 @@ public class SnapshotManager {
 
     public SnapshotTable buildSnapshot(ReadableTable table, TableDesc tableDesc) throws IOException {
         SnapshotTable snapshot = new SnapshotTable(table);
-        snapshot.updateVersionAndRandomUuid();
+        snapshot.updateRandomUuid();
 
         String dup = checkDupByInfo(snapshot);
         if (dup != null) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab9d5791/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
index c042138..1647707 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
@@ -348,7 +348,7 @@ public class DataModelDesc extends RootPersistentEntity {
         newDataModelDesc.setLookups(dataModelDesc.getLookups());
         newDataModelDesc.setMetrics(dataModelDesc.getMetrics());
         newDataModelDesc.setPartitionDesc(PartitionDesc.getCopyOf(dataModelDesc.getPartitionDesc()));
-        newDataModelDesc.updateVersionAndRandomUuid();
+        newDataModelDesc.updateRandomUuid();
         return newDataModelDesc;
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab9d5791/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectInstance.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectInstance.java b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectInstance.java
index 20741ee..e0ed3d9 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectInstance.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectInstance.java
@@ -92,7 +92,7 @@ public class ProjectInstance extends RootPersistentEntity {
     public static ProjectInstance create(String name, String owner, String description, List<RealizationEntry> realizationEntries, List<String> models) {
         ProjectInstance projectInstance = new ProjectInstance();
 
-        projectInstance.updateVersionAndRandomUuid();
+        projectInstance.updateRandomUuid();
         projectInstance.setName(name);
         projectInstance.setOwner(owner);
         projectInstance.setDescription(description);

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab9d5791/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
index f73239c..b6e99b3 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
@@ -197,7 +197,7 @@ public class ProjectManager {
             project.setDescription(newDesc);
 
             if (project.getUuid() == null)
-                project.updateVersionAndRandomUuid();
+                project.updateRandomUuid();
 
             updateProject(project);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab9d5791/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridInstance.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridInstance.java b/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridInstance.java
index 090efce..251f7c9 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridInstance.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridInstance.java
@@ -85,7 +85,7 @@ public class HybridInstance extends RootPersistentEntity implements IRealization
         hybridInstance.setConfig(config);
         hybridInstance.setName(name);
         hybridInstance.setRealizationEntries(realizationEntries);
-        hybridInstance.updateVersionAndRandomUuid();
+        hybridInstance.updateRandomUuid();
 
         return hybridInstance;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab9d5791/invertedindex/src/main/java/org/apache/kylin/invertedindex/IIInstance.java
----------------------------------------------------------------------
diff --git a/invertedindex/src/main/java/org/apache/kylin/invertedindex/IIInstance.java b/invertedindex/src/main/java/org/apache/kylin/invertedindex/IIInstance.java
index 117bc02..9b56c88 100644
--- a/invertedindex/src/main/java/org/apache/kylin/invertedindex/IIInstance.java
+++ b/invertedindex/src/main/java/org/apache/kylin/invertedindex/IIInstance.java
@@ -60,7 +60,7 @@ public class IIInstance extends RootPersistentEntity implements IRealization, IB
         iii.setDescName(iiDesc.getName());
         iii.setCreateTimeUTC(System.currentTimeMillis());
         iii.setStatus(RealizationStatusEnum.DISABLED);
-        iii.updateVersionAndRandomUuid();
+        iii.updateRandomUuid();
 
         return iii;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab9d5791/server/src/main/java/org/apache/kylin/rest/controller/CubeController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/CubeController.java b/server/src/main/java/org/apache/kylin/rest/controller/CubeController.java
index 4741cef..9afa750 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/CubeController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/CubeController.java
@@ -308,7 +308,7 @@ public class CubeController extends BasicController {
                 isStreamingCube = true;
                 newStreamingConfig = streamingConfigs.get(0).clone();
                 newStreamingConfig.setName(newCubeName + "_STREAMING");
-                newStreamingConfig.updateVersionAndRandomUuid();
+                newStreamingConfig.updateRandomUuid();
                 newStreamingConfig.setLastModified(0);
                 newStreamingConfig.setCubeName(newCubeName);
                 try {
@@ -327,7 +327,7 @@ public class CubeController extends BasicController {
                         newKafkaConfig = kafkaConfig.clone();
                         newKafkaConfig.setName(newStreamingConfig.getName());
                         newKafkaConfig.setLastModified(0);
-                        newKafkaConfig.updateVersionAndRandomUuid();
+                        newKafkaConfig.updateRandomUuid();
                     }
                 } catch (IOException e) {
                     throw new InternalErrorException("Failed to get kafka config info. ", e);

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab9d5791/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
index 1f46369..c55bde4 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
@@ -147,7 +147,7 @@ public class ExtendCubeToHybridCLI {
         CubeInstance newCubeInstance = CubeInstance.getCopyOf(cubeInstance);
         newCubeInstance.setName(newCubeInstanceName);
         newCubeInstance.setDescName(newCubeDescName);
-        newCubeInstance.updateVersionAndRandomUuid();
+        newCubeInstance.updateRandomUuid();
         Iterator<CubeSegment> segmentIterator = newCubeInstance.getSegments().iterator();
         CubeSegment currentSeg = null;
         while (segmentIterator.hasNext()) {
@@ -170,7 +170,7 @@ public class ExtendCubeToHybridCLI {
         // create new cube for old segments
         CubeDesc newCubeDesc = CubeDesc.getCopyOf(cubeDesc);
         newCubeDesc.setName(newCubeDescName);
-        newCubeDesc.updateVersionAndRandomUuid();
+        newCubeDesc.updateRandomUuid();
         newCubeDesc.init(kylinConfig, metadataManager.getAllTablesMap());
         newCubeDesc.setPartitionDateEnd(partitionDate);
         newCubeDesc.calculateSignature();


[15/50] [abbrv] kylin git commit: KYLIN-1445 Check HIVE_CONF directory before startup Kylin instance

Posted by li...@apache.org.
KYLIN-1445 Check HIVE_CONF directory before startup Kylin instance


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/4d6043f1
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/4d6043f1
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/4d6043f1

Branch: refs/heads/master
Commit: 4d6043f1c66a358f3c5e57e9cb14861401f09b28
Parents: 9c77a5e
Author: lidongsjtu <li...@apache.org>
Authored: Wed Feb 24 15:42:52 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Mon Feb 29 19:27:13 2016 +0800

----------------------------------------------------------------------
 build/bin/find-hive-dependency.sh | 24 +++++++++++++++++++++---
 1 file changed, 21 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/4d6043f1/build/bin/find-hive-dependency.sh
----------------------------------------------------------------------
diff --git a/build/bin/find-hive-dependency.sh b/build/bin/find-hive-dependency.sh
index 171c5b1..87f18dd 100644
--- a/build/bin/find-hive-dependency.sh
+++ b/build/bin/find-hive-dependency.sh
@@ -32,6 +32,13 @@ hive_classpath=`echo $hive_env | grep 'env:CLASSPATH' | awk -F '=' '{print $2}'`
 arr=(`echo $hive_classpath | cut -d ":"  --output-delimiter=" " -f 1-`)
 hive_conf_path=
 hive_exec_path=
+
+if [ -n "$HIVE_CONF" ]
+then
+    echo "HIVE_CONF is set to: $HIVE_CONF, use it to locate hive configurations."
+    hive_conf_path=$HIVE_CONF
+fi
+
 for data in ${arr[@]}
 do
     result=`echo $data | grep -e 'hive-exec[a-z0-9A-Z\.-]*jar'`
@@ -39,13 +46,24 @@ do
     then
         hive_exec_path=$data
     fi
-    result=`echo $data | grep -e 'hive[^/]*/conf'`
-    if [ $result ]
+
+    # in some versions of hive config is not in hive's classpath, find it separately
+    if [ -z "$hive_conf_path" ]
     then
-        hive_conf_path=$data
+        result=`echo $data | grep -e 'hive[^/]*/conf'`
+        if [ $result ]
+        then
+            hive_conf_path=$data
+        fi
     fi
 done
 
+if [ -z "$hive_conf_path" ]
+then
+    echo "Couldn't find hive configuration directory. Please set HIVE_CONF to the path which contains hive-site.xml."
+    exit 1
+fi
+
 # in some versions of hive hcatalog is not in hive's classpath, find it separately
 if [ -z "$HCAT_HOME" ]
 then


[31/50] [abbrv] kylin git commit: KYLIN-1465 log improvement on others

Posted by li...@apache.org.
KYLIN-1465 log improvement on others


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/b9a3418b
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/b9a3418b
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/b9a3418b

Branch: refs/heads/master
Commit: b9a3418b7f2121745090bb6463e7be754babfb73
Parents: b41c446
Author: Hongbin Ma <ma...@apache.org>
Authored: Fri Mar 4 15:59:37 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Fri Mar 4 16:01:04 2016 +0800

----------------------------------------------------------------------
 .../kylin/cube/gridtable/CubeGridTable.java     |  6 ++---
 .../gridtable/NotEnoughGTInfoException.java     | 23 --------------------
 .../kylin/gridtable/GTScanReqSerDerTest.java    | 11 +++++-----
 .../kylin/storage/translate/HBaseKeyRange.java  |  4 +---
 .../streaming/cube/StreamingCubeBuilder.java    |  6 +++--
 .../hbase/cube/v1/CubeSegmentTupleIterator.java | 14 ++++++------
 .../storage/hbase/cube/v1/CubeStorageQuery.java |  6 ++---
 .../hbase/cube/v2/CubeHBaseEndpointRPC.java     | 18 +++++++--------
 .../storage/hbase/cube/v2/CubeHBaseRPC.java     |  8 +++----
 .../hbase/cube/v2/CubeSegmentScanner.java       |  3 +--
 .../storage/hbase/cube/v2/CubeStorageQuery.java | 10 +++------
 .../kylin/storage/hbase/cube/v2/RawScan.java    |  2 +-
 12 files changed, 41 insertions(+), 70 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/b9a3418b/core-cube/src/main/java/org/apache/kylin/cube/gridtable/CubeGridTable.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/gridtable/CubeGridTable.java b/core-cube/src/main/java/org/apache/kylin/cube/gridtable/CubeGridTable.java
index 05fc8a5..5f0bb07 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/gridtable/CubeGridTable.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/gridtable/CubeGridTable.java
@@ -29,18 +29,18 @@ public class CubeGridTable {
                 dictionaryMap.put(col, dictionary);
             }
         }
-        
+
         return dictionaryMap;
     }
 
-    public static GTInfo newGTInfo(CubeSegment cubeSeg, long cuboidId) throws NotEnoughGTInfoException {
+    public static GTInfo newGTInfo(CubeSegment cubeSeg, long cuboidId) {
         Map<TblColRef, Dictionary<String>> dictionaryMap = getDimensionToDictionaryMap(cubeSeg, cuboidId);
         Cuboid cuboid = Cuboid.findById(cubeSeg.getCubeDesc(), cuboidId);
         for (TblColRef dim : cuboid.getColumns()) {
             if (cubeSeg.getCubeDesc().getRowkey().isUseDictionary(dim)) {
                 Dictionary dict = dictionaryMap.get(dim);
                 if (dict == null) {
-                    throw new NotEnoughGTInfoException();
+                    throw new RuntimeException("Dictionary for " + dim + " is not found");
                 }
             }
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/b9a3418b/core-cube/src/main/java/org/apache/kylin/cube/gridtable/NotEnoughGTInfoException.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/gridtable/NotEnoughGTInfoException.java b/core-cube/src/main/java/org/apache/kylin/cube/gridtable/NotEnoughGTInfoException.java
deleted file mode 100644
index 9bbcf75..0000000
--- a/core-cube/src/main/java/org/apache/kylin/cube/gridtable/NotEnoughGTInfoException.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.cube.gridtable;
-
-@SuppressWarnings("serial")
-public class NotEnoughGTInfoException extends Exception {
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/b9a3418b/core-cube/src/test/java/org/apache/kylin/gridtable/GTScanReqSerDerTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/gridtable/GTScanReqSerDerTest.java b/core-cube/src/test/java/org/apache/kylin/gridtable/GTScanReqSerDerTest.java
index 6642d95..77cc2d8 100644
--- a/core-cube/src/test/java/org/apache/kylin/gridtable/GTScanReqSerDerTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/gridtable/GTScanReqSerDerTest.java
@@ -29,7 +29,6 @@ import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.cube.cuboid.Cuboid;
 import org.apache.kylin.cube.gridtable.CubeGridTable;
-import org.apache.kylin.cube.gridtable.NotEnoughGTInfoException;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -73,7 +72,7 @@ public class GTScanReqSerDerTest extends LocalFileMetadataTestCase {
         buffer.flip();
 
         GTInfo sInfo = GTInfo.serializer.deserialize(buffer);
-        this.compareTwoGTInfo(info,sInfo);
+        this.compareTwoGTInfo(info, sInfo);
     }
 
     @Test
@@ -83,11 +82,11 @@ public class GTScanReqSerDerTest extends LocalFileMetadataTestCase {
         buffer.flip();
 
         GTInfo sInfo = GTInfo.serializer.deserialize(buffer);
-        this.compareTwoGTInfo(info,sInfo);
+        this.compareTwoGTInfo(info, sInfo);
     }
 
     @Test
-    public void testGTInfo() throws NotEnoughGTInfoException {
+    public void testGTInfo() {
         CubeInstance cube = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube("test_kylin_cube_with_slr_ready");
         CubeSegment segment = cube.getFirstSegment();
 
@@ -95,8 +94,8 @@ public class GTScanReqSerDerTest extends LocalFileMetadataTestCase {
         GTInfo.serializer.serialize(info, buffer);
         buffer.flip();
 
-        GTInfo sInfo = GTInfo.serializer.deserialize(buffer); 
-        this.compareTwoGTInfo(info,sInfo);
+        GTInfo sInfo = GTInfo.serializer.deserialize(buffer);
+        this.compareTwoGTInfo(info, sInfo);
     }
 
     private void compareTwoGTInfo(GTInfo info, GTInfo sInfo) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/b9a3418b/core-storage/src/main/java/org/apache/kylin/storage/translate/HBaseKeyRange.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/translate/HBaseKeyRange.java b/core-storage/src/main/java/org/apache/kylin/storage/translate/HBaseKeyRange.java
index a5ca62b..4f39cf6 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/translate/HBaseKeyRange.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/translate/HBaseKeyRange.java
@@ -45,9 +45,7 @@ import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
 /**
- * 
  * @author xjiang
- * 
  */
 public class HBaseKeyRange implements Comparable<HBaseKeyRange> {
 
@@ -147,7 +145,7 @@ public class HBaseKeyRange implements Comparable<HBaseKeyRange> {
             buf.append(BytesUtil.toHex(fuzzyKey.getFirst()));
             buf.append(" ");
             buf.append(BytesUtil.toHex(fuzzyKey.getSecond()));
-            buf.append(System.lineSeparator());
+            buf.append(";");
         }
         this.fuzzyKeyString = buf.toString();
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/b9a3418b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
index d7056cf..c4f2b7e 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
@@ -73,6 +73,7 @@ public class StreamingCubeBuilder implements StreamingBatchBuilder {
     private static final Logger logger = LoggerFactory.getLogger(StreamingCubeBuilder.class);
 
     private final String cubeName;
+    private int processedRowCount = 0;
 
     public StreamingCubeBuilder(String cubeName) {
         this.cubeName = cubeName;
@@ -81,16 +82,16 @@ public class StreamingCubeBuilder implements StreamingBatchBuilder {
     @Override
     public void build(StreamingBatch streamingBatch, Map<TblColRef, Dictionary<String>> dictionaryMap, ICuboidWriter cuboidWriter) {
         try {
-
             CubeManager cubeManager = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
             final CubeInstance cubeInstance = cubeManager.reloadCubeLocal(cubeName);
             LinkedBlockingQueue<List<String>> blockingQueue = new LinkedBlockingQueue<List<String>>();
             InMemCubeBuilder inMemCubeBuilder = new InMemCubeBuilder(cubeInstance.getDescriptor(), dictionaryMap);
             final Future<?> future = Executors.newCachedThreadPool().submit(inMemCubeBuilder.buildAsRunnable(blockingQueue, cuboidWriter));
+            processedRowCount = streamingBatch.getMessages().size();
             for (StreamingMessage streamingMessage : streamingBatch.getMessages()) {
                 blockingQueue.put(streamingMessage.getData());
             }
-            blockingQueue.put(Collections.<String> emptyList());
+            blockingQueue.put(Collections.<String>emptyList());
             future.get();
             cuboidWriter.flush();
 
@@ -157,6 +158,7 @@ public class StreamingCubeBuilder implements StreamingBatchBuilder {
     public void commit(IBuildable buildable) {
         CubeSegment cubeSegment = (CubeSegment) buildable;
         cubeSegment.setStatus(SegmentStatusEnum.READY);
+        cubeSegment.setInputRecords(processedRowCount);
         CubeUpdate cubeBuilder = new CubeUpdate(cubeSegment.getCubeInstance());
         cubeBuilder.setToAddSegs(cubeSegment);
         try {

http://git-wip-us.apache.org/repos/asf/kylin/blob/b9a3418b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeSegmentTupleIterator.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeSegmentTupleIterator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeSegmentTupleIterator.java
index 5e842f7..909de39 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeSegmentTupleIterator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeSegmentTupleIterator.java
@@ -210,26 +210,26 @@ public class CubeSegmentTupleIterator implements ITupleIterator {
 
     private void logScan(HBaseKeyRange keyRange) {
         StringBuilder info = new StringBuilder();
-        info.append("\nScan hbase table ").append(tableName).append(": ");
+        info.append(" Scan hbase table ").append(tableName).append(": ");
         if (keyRange.getCuboid().requirePostAggregation()) {
-            info.append("cuboid require post aggregation, from ");
+            info.append(" cuboid require post aggregation, from ");
         } else {
-            info.append("cuboid exact match, from ");
+            info.append(" cuboid exact match, from ");
         }
         info.append(keyRange.getCuboid().getInputID());
         info.append(" to ");
         info.append(keyRange.getCuboid().getId());
-        info.append("\nStart: ");
+        info.append(" Start: ");
         info.append(keyRange.getStartKeyAsString());
         info.append(" - ");
         info.append(Bytes.toStringBinary(keyRange.getStartKey()));
-        info.append("\nStop:  ");
+        info.append(" Stop:  ");
         info.append(keyRange.getStopKeyAsString());
         info.append(" - ");
         info.append(Bytes.toStringBinary(keyRange.getStopKey()));
         if (this.scan.getFilter() != null) {
-            info.append("\nFuzzy key counts: " + keyRange.getFuzzyKeys().size());
-            info.append("\nFuzzy: ");
+            info.append(" Fuzzy key counts: " + keyRange.getFuzzyKeys().size());
+            info.append(" Fuzzy: ");
             info.append(keyRange.getFuzzyKeyAsString());
         }
         logger.info(info.toString());

http://git-wip-us.apache.org/repos/asf/kylin/blob/b9a3418b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeStorageQuery.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeStorageQuery.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeStorageQuery.java
index 1b8b586..3d7f620 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeStorageQuery.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeStorageQuery.java
@@ -493,7 +493,7 @@ public class CubeStorageQuery implements ICachableStorageQuery {
         List<Collection<ColumnValueRange>> result = Lists.newArrayList();
 
         if (flatFilter == null) {
-            result.add(Collections.<ColumnValueRange> emptyList());
+            result.add(Collections.<ColumnValueRange>emptyList());
             return result;
         }
 
@@ -535,7 +535,7 @@ public class CubeStorageQuery implements ICachableStorageQuery {
         }
         if (globalAlwaysTrue) {
             orAndRanges.clear();
-            orAndRanges.add(Collections.<ColumnValueRange> emptyList());
+            orAndRanges.add(Collections.<ColumnValueRange>emptyList());
         }
         return orAndRanges;
     }
@@ -762,7 +762,7 @@ public class CubeStorageQuery implements ICachableStorageQuery {
     private void setLimit(TupleFilter filter, StorageContext context) {
         boolean goodAggr = context.isExactAggregation();
         boolean goodFilter = filter == null || (TupleFilter.isEvaluableRecursively(filter) && context.isCoprocessorEnabled());
-        boolean goodSort = context.hasSort() == false;
+        boolean goodSort = !context.hasSort();
         if (goodAggr && goodFilter && goodSort) {
             logger.info("Enable limit " + context.getLimit());
             context.enableLimit();

http://git-wip-us.apache.org/repos/asf/kylin/blob/b9a3418b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
index f22964f..e6f9ac1 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
@@ -293,15 +293,15 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
 
         final AtomicInteger totalScannedCount = new AtomicInteger(0);
         final ExpectedSizeIterator epResultItr = new ExpectedSizeIterator(scanRequests.size() * shardNum);
+        final String currentThreadName = Thread.currentThread().getName();
 
         for (final Pair<byte[], byte[]> epRange : getEPKeyRanges(cuboidBaseShard, shardNum, totalShards)) {
             executorService.submit(new Runnable() {
                 @Override
                 public void run() {
                     for (int i = 0; i < scanRequests.size(); ++i) {
-                        int scanIndex = i;
                         CubeVisitProtos.CubeVisitRequest.Builder builder = CubeVisitProtos.CubeVisitRequest.newBuilder();
-                        builder.setGtScanRequest(scanRequestByteStrings.get(scanIndex)).setHbaseRawScan(rawScanByteStrings.get(scanIndex));
+                        builder.setGtScanRequest(scanRequestByteStrings.get(i)).setHbaseRawScan(rawScanByteStrings.get(i));
                         for (IntList intList : hbaseColumnsToGTIntList) {
                             builder.addHbaseColumnsToGT(intList);
                         }
@@ -317,7 +317,7 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
 
                         for (Map.Entry<byte[], CubeVisitProtos.CubeVisitResponse> result : results.entrySet()) {
                             totalScannedCount.addAndGet(result.getValue().getStats().getScannedRowCount());
-                            logger.info(getStatsString(result));
+                            logger.info("<spawned by " + currentThreadName + ">" + getStatsString(result));
                             try {
                                 epResultItr.append(CompressionUtils.decompress(HBaseZeroCopyByteString.zeroCopyGetBytes(result.getValue().getCompressedRows())));
                             } catch (IOException | DataFormatException e) {
@@ -335,12 +335,12 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
     private String getStatsString(Map.Entry<byte[], CubeVisitProtos.CubeVisitResponse> result) {
         StringBuilder sb = new StringBuilder();
         Stats stats = result.getValue().getStats();
-        sb.append("Endpoint RPC returned from HTable " + cubeSeg.getStorageLocationIdentifier() + " Shard " + BytesUtil.toHex(result.getKey()) + " on host: " + stats.getHostname() + ".");
-        sb.append("Total scanned row: " + stats.getScannedRowCount() + ". ");
-        sb.append("Total filtered/aggred row: " + stats.getAggregatedRowCount() + ". ");
-        sb.append("Time elapsed in EP: " + (stats.getServiceEndTime() - stats.getServiceStartTime()) + "(ms). ");
-        sb.append("Server CPU usage: " + stats.getSystemCpuLoad() + ", server physical mem left: " + stats.getFreePhysicalMemorySize() + ", server swap mem left:" + stats.getFreeSwapSpaceSize() + ".");
-        sb.append("Etc message: " + stats.getEtcMsg() + ".");
+        sb.append("Endpoint RPC returned from HTable ").append(cubeSeg.getStorageLocationIdentifier()).append(" Shard ").append(BytesUtil.toHex(result.getKey())).append(" on host: ").append(stats.getHostname()).append(".");
+        sb.append("Total scanned row: ").append(stats.getScannedRowCount()).append(". ");
+        sb.append("Total filtered/aggred row: ").append(stats.getAggregatedRowCount()).append(". ");
+        sb.append("Time elapsed in EP: ").append(stats.getServiceEndTime() - stats.getServiceStartTime()).append("(ms). ");
+        sb.append("Server CPU usage: ").append(stats.getSystemCpuLoad()).append(", server physical mem left: ").append(stats.getFreePhysicalMemorySize()).append(", server swap mem left:").append(stats.getFreeSwapSpaceSize()).append(".");
+        sb.append("Etc message: ").append(stats.getEtcMsg()).append(".");
         return sb.toString();
 
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/b9a3418b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java
index db39455..eb5ac9f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java
@@ -273,20 +273,20 @@ public abstract class CubeHBaseRPC {
         info.append(cuboid.getInputID());
         info.append(" to ");
         info.append(cuboid.getId());
-        info.append("\nStart: ");
+        info.append(" Start: ");
         info.append(rawScan.getStartKeyAsString());
         info.append(" (");
         info.append(Bytes.toStringBinary(rawScan.startKey) + ")");
-        info.append("\nStop:  ");
+        info.append(" Stop:  ");
         info.append(rawScan.getEndKeyAsString());
         info.append(" (");
         info.append(Bytes.toStringBinary(rawScan.endKey) + ")");
         if (rawScan.fuzzyKeys != null && rawScan.fuzzyKeys.size() != 0) {
-            info.append("\nFuzzy key counts: " + rawScan.fuzzyKeys.size());
+            info.append(" Fuzzy key counts: " + rawScan.fuzzyKeys.size());
             info.append(". Fuzzy keys : ");
             info.append(rawScan.getFuzzyKeyAsString());
         } else {
-            info.append("\nNo Fuzzy Key");
+            info.append(", No Fuzzy Key");
         }
         logger.info(info.toString());
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/b9a3418b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeSegmentScanner.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeSegmentScanner.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeSegmentScanner.java
index 3f00566..ee5ae96 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeSegmentScanner.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeSegmentScanner.java
@@ -21,7 +21,6 @@ import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.cube.cuboid.Cuboid;
 import org.apache.kylin.cube.gridtable.CubeGridTable;
 import org.apache.kylin.cube.gridtable.CuboidToGridTableMapping;
-import org.apache.kylin.cube.gridtable.NotEnoughGTInfoException;
 import org.apache.kylin.cube.model.CubeDesc;
 import org.apache.kylin.dict.TupleFilterFunctionTransformer;
 import org.apache.kylin.gridtable.EmptyGTScanner;
@@ -56,7 +55,7 @@ public class CubeSegmentScanner implements IGTScanner {
     final Cuboid cuboid;
 
     public CubeSegmentScanner(CubeSegment cubeSeg, Cuboid cuboid, Set<TblColRef> dimensions, Set<TblColRef> groups, //
-            Collection<FunctionDesc> metrics, TupleFilter filter, boolean allowPreAggregate) throws NotEnoughGTInfoException {
+            Collection<FunctionDesc> metrics, TupleFilter filter, boolean allowPreAggregate)  {
         this.cuboid = cuboid;
         this.cubeSeg = cubeSeg;
         this.info = CubeGridTable.newGTInfo(cubeSeg, cuboid.getId());

http://git-wip-us.apache.org/repos/asf/kylin/blob/b9a3418b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeStorageQuery.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeStorageQuery.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeStorageQuery.java
index ab8c80f..df0bb84 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeStorageQuery.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeStorageQuery.java
@@ -11,7 +11,6 @@ import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.cube.cuboid.Cuboid;
-import org.apache.kylin.cube.gridtable.NotEnoughGTInfoException;
 import org.apache.kylin.cube.model.CubeDesc;
 import org.apache.kylin.cube.model.CubeDesc.DeriveInfo;
 import org.apache.kylin.dict.lookup.LookupStringTable;
@@ -96,14 +95,11 @@ public class CubeStorageQuery implements ICachableStorageQuery {
         List<CubeSegmentScanner> scanners = Lists.newArrayList();
         for (CubeSegment cubeSeg : cubeInstance.getSegments(SegmentStatusEnum.READY)) {
             CubeSegmentScanner scanner;
-            try {
-                scanner = new CubeSegmentScanner(cubeSeg, cuboid, dimensionsD, groupsD, metrics, filterD, !isExactAggregation);
-            } catch (NotEnoughGTInfoException e) {
-                //deal with empty cube segment
-                logger.info("Cannot construct Segment {}'s GTInfo, this may due to empty segment or broken metadata", cubeSeg);
-                logger.info("error stack", e);
+            if (cubeSeg.getInputRecords() == 0) {
+                logger.info("Skip cube segment {} because its input record is 0", cubeSeg);
                 continue;
             }
+            scanner = new CubeSegmentScanner(cubeSeg, cuboid, dimensionsD, groupsD, metrics, filterD, !isExactAggregation);
             scanners.add(scanner);
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/b9a3418b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/RawScan.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/RawScan.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/RawScan.java
index 361b1dd..c2ffdba 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/RawScan.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/RawScan.java
@@ -61,7 +61,7 @@ public class RawScan {
             buf.append(BytesUtil.toHex(fuzzyKey.getFirst()));
             buf.append(" ");
             buf.append(BytesUtil.toHex(fuzzyKey.getSecond()));
-            buf.append(System.lineSeparator());
+            buf.append(";");
         }
         return buf.toString();
     }


[16/50] [abbrv] kylin git commit: minor, increase maxperm to avoid VM crash

Posted by li...@apache.org.
minor, increase maxperm to avoid VM crash


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/5474fe4e
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/5474fe4e
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/5474fe4e

Branch: refs/heads/master
Commit: 5474fe4e887db263df0da7837f3fd9b022f18a79
Parents: 4d6043f
Author: Hongbin Ma <ma...@apache.org>
Authored: Tue Mar 1 09:49:56 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Tue Mar 1 09:51:12 2016 +0800

----------------------------------------------------------------------
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/5474fe4e/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index cb1e49e..42a0c6d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -668,7 +668,7 @@
                                     <value>false</value>
                                 </property>
                             </systemProperties>
-                            <argLine>-Xmx4G -XX:MaxPermSize=256M</argLine>
+                            <argLine>-Xmx4G -XX:MaxPermSize=512M</argLine>
                         </configuration>
                     </plugin>
 


[03/50] [abbrv] kylin git commit: KYLIN-1337 fix Nmuber sort issue

Posted by li...@apache.org.
KYLIN-1337 fix Nmuber sort issue


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/0b48e39c
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/0b48e39c
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/0b48e39c

Branch: refs/heads/master
Commit: 0b48e39cc8e7d9e462ff0da4f45091a587409104
Parents: 1fbc422
Author: janzhongi <ji...@ebay.com>
Authored: Fri Feb 26 13:19:51 2016 +0800
Committer: janzhongi <ji...@ebay.com>
Committed: Fri Feb 26 13:19:51 2016 +0800

----------------------------------------------------------------------
 webapp/app/js/filters/filter.js   | 6 +++++-
 webapp/app/js/model/cubeConfig.js | 2 +-
 2 files changed, 6 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/0b48e39c/webapp/app/js/filters/filter.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/filters/filter.js b/webapp/app/js/filters/filter.js
index f18b6f6..4e8d210 100755
--- a/webapp/app/js/filters/filter.js
+++ b/webapp/app/js/filters/filter.js
@@ -54,7 +54,11 @@ KylinApp
           result = 1;
         }
         else {
-          result = attriOfA.toLowerCase() > attriOfB.toLowerCase() ? 1 : attriOfA.toLowerCase() < attriOfB.toLowerCase() ? -1 : 0;
+          if(!isNaN(attriOfA)||!isNaN(attriOfB)){
+            result = attriOfA > attriOfB ? 1 : attriOfA < attriOfB ? -1 : 0;
+          }else{
+            result = attriOfA.toLowerCase() > attriOfB.toLowerCase() ? 1 : attriOfA.toLowerCase() < attriOfB.toLowerCase() ? -1 : 0;
+          }
         }
         return reverse ? -result : result;
       });

http://git-wip-us.apache.org/repos/asf/kylin/blob/0b48e39c/webapp/app/js/model/cubeConfig.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/model/cubeConfig.js b/webapp/app/js/model/cubeConfig.js
index fe7e193..b85789c 100644
--- a/webapp/app/js/model/cubeConfig.js
+++ b/webapp/app/js/model/cubeConfig.js
@@ -69,7 +69,7 @@ KylinApp.constant('cubeConfig', {
     {attr: 'input_records_count', name: 'Source Records'},
     {attr: 'last_build_time', name: 'Last Build Time'},
     {attr: 'owner', name: 'Owner'},
-    {attr: 'create_time', name: 'Create Time'}
+    {attr: 'create_time_utc', name: 'Create Time'}
   ],
   streamingAutoGenerateMeasure:[
     {name:"year_start",type:"date"},


[47/50] [abbrv] kylin git commit: minor, update find hive dependency script to avoid soft links of jars

Posted by li...@apache.org.
minor, update find hive dependency script to avoid soft links of jars


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/26233f79
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/26233f79
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/26233f79

Branch: refs/heads/master
Commit: 26233f79bf7fdc3fa1de9ce82e36d0baf47dcd20
Parents: f73abf6
Author: lidongsjtu <li...@apache.org>
Authored: Wed Mar 9 16:14:11 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Wed Mar 9 16:16:37 2016 +0800

----------------------------------------------------------------------
 build/bin/find-hive-dependency.sh | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/26233f79/build/bin/find-hive-dependency.sh
----------------------------------------------------------------------
diff --git a/build/bin/find-hive-dependency.sh b/build/bin/find-hive-dependency.sh
index 87f18dd..1650ec0 100644
--- a/build/bin/find-hive-dependency.sh
+++ b/build/bin/find-hive-dependency.sh
@@ -41,7 +41,7 @@ fi
 
 for data in ${arr[@]}
 do
-    result=`echo $data | grep -e 'hive-exec[a-z0-9A-Z\.-]*jar'`
+    result=`echo $data | grep -e 'hive-exec[a-z0-9A-Z\.-]*.jar'`
     if [ $result ]
     then
         hive_exec_path=$data
@@ -82,7 +82,7 @@ else
     hcatalog_home=${HCAT_HOME}
 fi
 
-hcatalog=`find -L ${hcatalog_home} -name "hive-hcatalog-core[0-9\.-]*jar" 2>&1 | grep -m 1 -v 'Permission denied'`
+hcatalog=`find -L ${hcatalog_home} -name "hive-hcatalog-core[0-9\.-]*.jar" 2>&1 | grep -m 1 -v 'Permission denied'`
 
 if [ -z "$hcatalog" ]
 then


[28/50] [abbrv] kylin git commit: minor, UI fix kylin properties format

Posted by li...@apache.org.
minor, UI fix kylin properties format


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/8a3e0e0e
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/8a3e0e0e
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/8a3e0e0e

Branch: refs/heads/master
Commit: 8a3e0e0e869fd4ec52a847fa360ec3d3c3e5a5bf
Parents: daeaf08
Author: Jason <ji...@163.com>
Authored: Fri Mar 4 10:57:46 2016 +0800
Committer: Jason <ji...@163.com>
Committed: Fri Mar 4 10:58:24 2016 +0800

----------------------------------------------------------------------
 examples/test_case_data/sandbox/kylin.properties | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/8a3e0e0e/examples/test_case_data/sandbox/kylin.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
index 7c9919b..04681af 100644
--- a/examples/test_case_data/sandbox/kylin.properties
+++ b/examples/test_case_data/sandbox/kylin.properties
@@ -116,5 +116,5 @@ kylin.web.contact_mail=
 #env DEV|QA|PROD
 deploy.env=DEV
 
- kylin.web.hive.limit=20
+kylin.web.hive.limit=20
 


[20/50] [abbrv] kylin git commit: KYLIN-1074 support load hive table from listed tree.

Posted by li...@apache.org.
KYLIN-1074 support load hive table from listed tree.


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/bc7d4f58
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/bc7d4f58
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/bc7d4f58

Branch: refs/heads/master
Commit: bc7d4f5846d52a17873738047e117e9410d17823
Parents: cf05409
Author: Jason <ji...@163.com>
Authored: Wed Mar 2 15:18:31 2016 +0800
Committer: Jason <ji...@163.com>
Committed: Wed Mar 2 15:18:55 2016 +0800

----------------------------------------------------------------------
 ...port-load-hive-table-from-listed-tree-.patch | 864 +++++++++++++++++++
 build/conf/kylin.properties                     |   2 +
 .../test_case_data/sandbox/kylin.properties     |   1 +
 pom.xml                                         |   1 +
 .../kylin/rest/controller/TableController.java  |  44 +
 .../apache/kylin/source/hive/HiveClient.java    |   8 +
 webapp/app/index.html                           |   1 +
 webapp/app/js/controllers/sourceMeta.js         | 185 +++-
 .../app/js/directives/angular-tree-control.js   | 363 ++++++++
 webapp/app/js/services/kylinProperties.js       |  12 +-
 webapp/app/js/services/tables.js                |   6 +-
 .../app/partials/tables/source_table_tree.html  |  26 +
 webapp/bower.json                               |   3 +-
 webapp/grunt.json                               |   1 -
 14 files changed, 1509 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/0001-KYLIN-1074-support-load-hive-table-from-listed-tree-.patch
----------------------------------------------------------------------
diff --git a/0001-KYLIN-1074-support-load-hive-table-from-listed-tree-.patch b/0001-KYLIN-1074-support-load-hive-table-from-listed-tree-.patch
new file mode 100644
index 0000000..31cc017
--- /dev/null
+++ b/0001-KYLIN-1074-support-load-hive-table-from-listed-tree-.patch
@@ -0,0 +1,864 @@
+From 1a79ef1aec557259f9611f5b3199c2e90400be77 Mon Sep 17 00:00:00 2001
+From: Jason <ji...@163.com>
+Date: Wed, 2 Mar 2016 14:40:19 +0800
+Subject: [PATCH] KYLIN-1074 support load hive table from listed tree, patch
+ from @nichunen
+
+---
+ build/conf/kylin.properties                        |   2 +
+ examples/test_case_data/sandbox/kylin.properties   |   1 +
+ pom.xml                                            |   2 +
+ .../kylin/rest/controller/TableController.java     |  44 +++
+ .../org/apache/kylin/source/hive/HiveClient.java   |   8 +
+ webapp/app/index.html                              |   1 +
+ webapp/app/js/controllers/sourceMeta.js            | 186 ++++++++++-
+ webapp/app/js/directives/angular-tree-control.js   | 363 +++++++++++++++++++++
+ webapp/app/js/services/kylinProperties.js          |  15 +-
+ webapp/app/js/services/tables.js                   |   7 +-
+ webapp/app/partials/tables/source_table_tree.html  |  26 ++
+ webapp/bower.json                                  |   3 +-
+ webapp/grunt.json                                  |   1 -
+ 13 files changed, 649 insertions(+), 10 deletions(-)
+ create mode 100644 webapp/app/js/directives/angular-tree-control.js
+
+diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
+index a4b8c3b..e8add7c 100644
+--- a/build/conf/kylin.properties
++++ b/build/conf/kylin.properties
+@@ -158,3 +158,5 @@ deploy.env=DEV
+ 
+ ###########################deprecated configs#######################
+ kylin.sandbox=true
++
++kylin.web.hive.limit=20
+\ No newline at end of file
+diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
+index 9451b78..1a74b80 100644
+--- a/examples/test_case_data/sandbox/kylin.properties
++++ b/examples/test_case_data/sandbox/kylin.properties
+@@ -131,3 +131,4 @@ kylin.web.contact_mail=
+ deploy.env=DEV
+ 
+ 
++kylin.web.hive.limit=20
+\ No newline at end of file
+diff --git a/pom.xml b/pom.xml
+index 9d9a54b..537693f 100644
+--- a/pom.xml
++++ b/pom.xml
+@@ -774,6 +774,8 @@
+                                 <!-- MIT license -->
+                                 <exclude>webapp/app/css/AdminLTE.css</exclude>
+                                 <exclude>webapp/app/js/directives/kylin_abn_tree_directive.js</exclude>
++                                <exclude>webapp/app/js/directives/angular-tree-control.js</exclude>
++
+ 
+                                 <!--configuration file -->
+                                 <exclude>webapp/app/routes.json</exclude>
+diff --git a/server/src/main/java/org/apache/kylin/rest/controller/TableController.java b/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
+index 39af7db..ea5fdd4 100644
+--- a/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
++++ b/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
+@@ -33,6 +33,7 @@ import org.apache.kylin.rest.request.CardinalityRequest;
+ import org.apache.kylin.rest.request.StreamingRequest;
+ import org.apache.kylin.rest.response.TableDescResponse;
+ import org.apache.kylin.rest.service.CubeService;
++import org.apache.kylin.source.hive.HiveClient;
+ import org.slf4j.Logger;
+ import org.slf4j.LoggerFactory;
+ import org.springframework.beans.factory.annotation.Autowired;
+@@ -205,6 +206,49 @@ public class TableController extends BasicController {
+         return descs;
+     }
+ 
++    /**
++     * Show all databases in Hive
++     *
++     * @return Hive databases list
++     * @throws IOException
++     */
++    @RequestMapping(value = "/hive", method = { RequestMethod.GET })
++    @ResponseBody
++    private static List<String> showHiveDatabases() throws IOException {
++        HiveClient hiveClient = new HiveClient();
++        List<String> results = null;
++
++        try {
++            results = hiveClient.getHiveDbNames();
++        } catch (Exception e) {
++            e.printStackTrace();
++            throw new IOException(e);
++        }
++        return results;
++    }
++
++    /**
++     * Show all tables in a Hive database
++     *
++     * @return Hive table list
++     * @throws IOException
++     */
++    @RequestMapping(value = "/hive/{database}", method = { RequestMethod.GET })
++    @ResponseBody
++    private static List<String> showHiveTables(@PathVariable String database) throws IOException {
++        HiveClient hiveClient = new HiveClient();
++        List<String> results = null;
++
++        try {
++            results = hiveClient.getHiveTableNames(database);
++        } catch (Exception e) {
++            e.printStackTrace();
++            throw new IOException(e);
++        }
++        return results;
++    }
++
++
+     public void setCubeService(CubeService cubeService) {
+         this.cubeMgmtService = cubeService;
+     }
+diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
+index 178889e..a99b304 100644
+--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
++++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
+@@ -132,6 +132,14 @@ public class HiveClient {
+         return getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.NUM_FILES);
+     }
+ 
++    public List<String> getHiveDbNames() throws Exception {
++        return getMetaStoreClient().getAllDatabases();
++    }
++
++    public List<String> getHiveTableNames(String database) throws Exception {
++        return getMetaStoreClient().getAllTables(database);
++    }
++
+     /**
+      * COPIED FROM org.apache.hadoop.hive.ql.stats.StatsUtil for backward compatibility
+      * 
+diff --git a/webapp/app/index.html b/webapp/app/index.html
+index 11ca283..b4eb9d7 100644
+--- a/webapp/app/index.html
++++ b/webapp/app/index.html
+@@ -113,6 +113,7 @@
+ <script src="js/filters/filter.js"></script>
+ <script src="js/directives/directives.js"></script>
+ <script src="js/directives/kylin_abn_tree_directive.js"></script>
++<script src="js/directives/angular-tree-control.js"></script>
+ <script src="js/factories/graph.js"></script>
+ <script src="js/services/cache.js"></script>
+ <script src="js/services/message.js"></script>
+diff --git a/webapp/app/js/controllers/sourceMeta.js b/webapp/app/js/controllers/sourceMeta.js
+index abdeeb8..c87d6ef 100755
+--- a/webapp/app/js/controllers/sourceMeta.js
++++ b/webapp/app/js/controllers/sourceMeta.js
+@@ -19,14 +19,14 @@
+ 'use strict';
+ 
+ KylinApp
+-  .controller('SourceMetaCtrl', function ($scope, $cacheFactory, $q, $window, $routeParams, CubeService, $modal, TableService, $route, loadingRequest, SweetAlert, tableConfig, TableModel,cubeConfig) {
++  .controller('SourceMetaCtrl', function ($scope, $cacheFactory, $q, $window, $routeParams, CubeService, $modal, TableService, $route, loadingRequest, SweetAlert, tableConfig, TableModel,cubeConfig,kylinConfig) {
+     var $httpDefaultCache = $cacheFactory.get('$http');
+     $scope.tableModel = TableModel;
+     $scope.tableModel.selectedSrcDb = [];
+     $scope.tableModel.selectedSrcTable = {};
+     $scope.window = 0.68 * $window.innerHeight;
+     $scope.tableConfig = tableConfig;
+-
++    $scope.kylinConfig = kylinConfig;
+ 
+     $scope.state = {
+       filterAttr: 'id', filterReverse: false, reverseColumn: 'id',
+@@ -100,13 +100,193 @@ KylinApp
+       });
+     };
+ 
+-    var ModalInstanceCtrl = function ($scope, $location, $modalInstance, tableNames, MessageService, projectName, scope) {
++    $scope.openTreeModal = function () {
++      $modal.open({
++        templateUrl: 'addHiveTableFromTree.html',
++        controller: ModalInstanceCtrl,
++        resolve: {
++          tableNames: function () {
++            return $scope.tableNames;
++          },
++          projectName:function(){
++            return  $scope.projectModel.selectedProject;
++          },
++          scope: function () {
++            return $scope;
++          }
++        }
++      });
++    };
++
++    var ModalInstanceCtrl = function ($scope, $location, $modalInstance, tableNames, MessageService, projectName, scope,kylinConfig) {
+       $scope.tableNames = "";
+       $scope.projectName = projectName;
+       $scope.cancel = function () {
+         $modalInstance.dismiss('cancel');
+       };
++
++      $scope.kylinConfig = kylinConfig;
++
++
++      $scope.treeOptions = {multiSelection: true};
++      $scope.selectedNodes = [];
++      $scope.hiveLimit =  kylinConfig.getHiveLimit();
++
++      $scope.loadHive = function () {
++        if($scope.hiveLoaded)
++          return;
++        TableService.showHiveDatabases({}, function (databases) {
++          $scope.dbNum = databases.length;
++          if (databases.length > 0) {
++            $scope.hiveMap = {};
++            for (var i = 0; i < databases.length; i++) {
++              var dbName = databases[i];
++              var hiveData = {"dbname":dbName,"tables":[],"expanded":false};
++              $scope.hive.push(hiveData);
++              $scope.hiveMap[dbName] = i;
++            }
++          }
++          $scope.hiveLoaded = true;
++          $scope.showMoreDatabases();
++        });
++      }
++
++      $scope.showMoreTables = function(hiveTables, node){
++        var shownTimes = parseInt(node.children.length / $scope.hiveLimit);
++        var from = $scope.hiveLimit * shownTimes;
++        var to = 0;
++        var hasMore = false;
++        if(from + $scope.hiveLimit > hiveTables.length) {
++          to = hiveTables.length - 1;
++        } else {
++          to = from + $scope.hiveLimit - 1;
++          hasMore = true;
++        }
++        if(!angular.isUndefined(node.children[from])){
++          node.children.pop();
++        }
++
++        for(var idx = from; idx <= to; idx++){
++          node.children.push({"label":node.label+'.'+hiveTables[idx],"id":idx-from+1,"children":[]});
++        }
++
++        if(hasMore){
++          var loading = {"label":"","id":65535,"children":[]};
++          node.children.push(loading);
++        }
++      }
++
++      $scope.showAllTables = function(hiveTables, node){
++        var shownTimes = parseInt(node.children.length / $scope.hiveLimit);
++        var from = $scope.hiveLimit * shownTimes;
++        var to = hiveTables.length - 1;
++        if(!angular.isUndefined(node.children[from])){
++          node.children.pop();
++        }
++        for(var idx = from; idx <= to; idx++){
++          node.children.push({"label":node.label+'.'+hiveTables[idx],"id":idx-from+1,"children":[]});
++        }
++      }
++
++      $scope.showMoreDatabases = function(){
++        var shownTimes = parseInt($scope.treedata.length / $scope.hiveLimit);
++        var from = $scope.hiveLimit * shownTimes;
++        var to = 0;
++        var hasMore = false;
++        if(from + $scope.hiveLimit > $scope.hive.length) {
++          to = $scope.hive.length - 1;
++        } else {
++          to = from + $scope.hiveLimit - 1;
++          hasMore = true;
++        }
++        if(!angular.isUndefined($scope.treedata[from])){
++          $scope.treedata.pop();
++        }
++
++        for(var idx = from; idx <= to; idx++){
++          var children = [];
++          var loading = {"label":"","id":0,"children":[]};
++          children.push(loading);
++          $scope.treedata.push({"label":$scope.hive[idx].dbname,"id":idx+1,"children":children,"expanded":false});
++        }
++
++        if(hasMore){
++          var loading = {"label":"","id":65535,"children":[0]};
++          $scope.treedata.push(loading);
++        }
++      }
++
++      $scope.showAllDatabases = function(){
++        var shownTimes = parseInt($scope.treedata.length / $scope.hiveLimit);
++        var from = $scope.hiveLimit * shownTimes;
++        var to = $scope.hive.length - 1;
++
++        if(!angular.isUndefined($scope.treedata[from])){
++          $scope.treedata.pop();
++        }
++
++        for(var idx = from; idx <= to; idx++){
++          var children = [];
++          var loading = {"label":"","id":0,"children":[]};
++          children.push(loading);
++          $scope.treedata.push({"label":$scope.hive[idx].dbname,"id":idx+1,"children":children,"expanded":false});
++        }
++      }
++
++      $scope.showMoreClicked = function($parentNode){
++        if($parentNode == null){
++          $scope.showMoreDatabases();
++        } else {
++          $scope.showMoreTables($scope.hive[$scope.hiveMap[$parentNode.label]].tables,$parentNode);
++        }
++      }
++
++      $scope.showAllClicked = function($parentNode){
++        if($parentNode == null){
++          $scope.showAllDatabases();
++        } else {
++          $scope.showAllTables($scope.hive[$scope.hiveMap[$parentNode.label]].tables,$parentNode);
++        }
++      }
++
++      $scope.showToggle = function(node) {
++        if(node.expanded == false){
++          TableService.showHiveTables({"database": node.label},function (hive_tables){
++            var tables = [];
++            for (var i = 0; i < hive_tables.length; i++) {
++              tables.push(hive_tables[i]);
++            }
++            $scope.hive[$scope.hiveMap[node.label]].tables = tables;
++            $scope.showMoreTables(tables,node);
++            node.expanded = true;
++          });
++        }
++      }
++
++      $scope.showSelected = function(node) {
++
++      }
++
++      if(angular.isUndefined($scope.hive) || angular.isUndefined($scope.hiveLoaded) || angular.isUndefined($scope.treedata) ){
++        $scope.hive = [];
++        $scope.hiveLoaded = false;
++        $scope.treedata = [];
++        $scope.loadHive();
++      }
++
++
++
++
+       $scope.add = function () {
++
++        if($scope.tableNames.length === 0 && $scope.selectedNodes.length > 0) {
++          for(var i = 0; i <  $scope.selectedNodes.length; i++){
++            if($scope.selectedNodes[i].label.indexOf(".") >= 0){
++              $scope.tableNames += ($scope.selectedNodes[i].label) += ',';
++            }
++          }
++        }
++
+         if ($scope.tableNames.trim() === "") {
+           SweetAlert.swal('', 'Please input table(s) you want to synchronize.', 'info');
+           return;
+diff --git a/webapp/app/js/directives/angular-tree-control.js b/webapp/app/js/directives/angular-tree-control.js
+new file mode 100644
+index 0000000..6fca987
+--- /dev/null
++++ b/webapp/app/js/directives/angular-tree-control.js
+@@ -0,0 +1,363 @@
++/*
++ * The MIT License (MIT)
++ *
++ * Copyright (c) 2013 Steve
++ *
++ * Permission is hereby granted, free of charge, to any person obtaining a copy of
++ * this software and associated documentation files (the "Software"), to deal in
++ * the Software without restriction, including without limitation the rights to
++ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
++ * the Software, and to permit persons to whom the Software is furnished to do so,
++ *   subject to the following conditions:
++ *
++ *   The above copyright notice and this permission notice shall be included in all
++ * copies or substantial portions of the Software.
++ *
++ *   THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
++ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
++ * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
++ * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
++ * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
++ * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
++ */
++
++(function ( angular ) {
++  'use strict';
++
++  angular.module( 'treeControl', [] )
++    .directive( 'treecontrol', ['$compile', function( $compile ) {
++      /**
++       * @param cssClass - the css class
++       * @param addClassProperty - should we wrap the class name with class=""
++       */
++      function classIfDefined(cssClass, addClassProperty) {
++        if (cssClass) {
++          if (addClassProperty)
++            return 'class="' + cssClass + '"';
++          else
++            return cssClass;
++        }
++        else
++          return "";
++      }
++
++      function ensureDefault(obj, prop, value) {
++        if (!obj.hasOwnProperty(prop))
++          obj[prop] = value;
++      }
++
++      return {
++        restrict: 'EA',
++        require: "treecontrol",
++        transclude: true,
++        scope: {
++          treeModel: "=",
++          selectedNode: "=?",
++          selectedNodes: "=?",
++          expandedNodes: "=?",
++          onSelection: "&",
++          onNodeToggle: "&",
++          options: "=?",
++          orderBy: "@",
++          reverseOrder: "@",
++          filterExpression: "=?",
++          filterComparator: "=?",
++          onDblclick: "&"
++        },
++        controller: ['$scope', function( $scope ) {
++
++          function defaultIsLeaf(node) {
++            return !node[$scope.options.nodeChildren] || node[$scope.options.nodeChildren].length === 0;
++          }
++
++          function shallowCopy(src, dst) {
++            if (angular.isArray(src)) {
++              dst = dst || [];
++
++              for ( var i = 0; i < src.length; i++) {
++                dst[i] = src[i];
++              }
++            } else if (angular.isObject(src)) {
++              dst = dst || {};
++
++              for (var key in src) {
++                if (hasOwnProperty.call(src, key) && !(key.charAt(0) === '$' && key.charAt(1) === '$')) {
++                  dst[key] = src[key];
++                }
++              }
++            }
++
++            return dst || src;
++          }
++          function defaultEquality(a, b) {
++            if (a === undefined || b === undefined)
++              return false;
++            a = shallowCopy(a);
++            a[$scope.options.nodeChildren] = [];
++            b = shallowCopy(b);
++            b[$scope.options.nodeChildren] = [];
++            return angular.equals(a, b);
++          }
++
++          $scope.options = $scope.options || {};
++          ensureDefault($scope.options, "multiSelection", false);
++          ensureDefault($scope.options, "nodeChildren", "children");
++          ensureDefault($scope.options, "dirSelectable", "true");
++          ensureDefault($scope.options, "injectClasses", {});
++          ensureDefault($scope.options.injectClasses, "ul", "");
++          ensureDefault($scope.options.injectClasses, "li", "");
++          ensureDefault($scope.options.injectClasses, "liSelected", "");
++          ensureDefault($scope.options.injectClasses, "iExpanded", "");
++          ensureDefault($scope.options.injectClasses, "iCollapsed", "");
++          ensureDefault($scope.options.injectClasses, "iLeaf", "");
++          ensureDefault($scope.options.injectClasses, "label", "");
++          ensureDefault($scope.options.injectClasses, "labelSelected", "");
++          ensureDefault($scope.options, "equality", defaultEquality);
++          ensureDefault($scope.options, "isLeaf", defaultIsLeaf);
++
++          $scope.selectedNodes = $scope.selectedNodes || [];
++          $scope.expandedNodes = $scope.expandedNodes || [];
++          $scope.expandedNodesMap = {};
++          for (var i=0; i < $scope.expandedNodes.length; i++) {
++            $scope.expandedNodesMap[""+i] = $scope.expandedNodes[i];
++          }
++          $scope.parentScopeOfTree = $scope.$parent;
++
++
++          function isSelectedNode(node) {
++            if (!$scope.options.multiSelection && ($scope.options.equality(node, $scope.selectedNode)))
++              return true;
++            else if ($scope.options.multiSelection && $scope.selectedNodes) {
++              for (var i = 0; (i < $scope.selectedNodes.length); i++) {
++                if ($scope.options.equality(node, $scope.selectedNodes[i])) {
++                  return true;
++                }
++              }
++              return false;
++            }
++          }
++
++          $scope.headClass = function(node) {
++            var liSelectionClass = classIfDefined($scope.options.injectClasses.liSelected, false);
++            var injectSelectionClass = "";
++            if (liSelectionClass && isSelectedNode(node))
++              injectSelectionClass = " " + liSelectionClass;
++            if ($scope.options.isLeaf(node))
++              return "tree-leaf" + injectSelectionClass;
++            if ($scope.expandedNodesMap[this.$id])
++              return "tree-expanded" + injectSelectionClass;
++            else
++              return "tree-collapsed" + injectSelectionClass;
++          };
++
++          $scope.iBranchClass = function() {
++            if ($scope.expandedNodesMap[this.$id])
++              return classIfDefined($scope.options.injectClasses.iExpanded);
++            else
++              return classIfDefined($scope.options.injectClasses.iCollapsed);
++          };
++
++          $scope.nodeExpanded = function() {
++            return !!$scope.expandedNodesMap[this.$id];
++          };
++
++          $scope.selectNodeHead = function() {
++            var expanding = $scope.expandedNodesMap[this.$id] === undefined;
++            $scope.expandedNodesMap[this.$id] = (expanding ? this.node : undefined);
++            if (expanding) {
++              $scope.expandedNodes.push(this.node);
++            }
++            else {
++              var index;
++              for (var i=0; (i < $scope.expandedNodes.length) && !index; i++) {
++                if ($scope.options.equality($scope.expandedNodes[i], this.node)) {
++                  index = i;
++                }
++              }
++              if (index != undefined)
++                $scope.expandedNodes.splice(index, 1);
++            }
++            if ($scope.onNodeToggle)
++              $scope.onNodeToggle({node: this.node, expanded: expanding});
++          };
++
++          $scope.selectNodeLabel = function( selectedNode ){
++            if(selectedNode[$scope.options.nodeChildren] && selectedNode[$scope.options.nodeChildren].length > 0){
++              this.selectNodeHead();
++            }
++            if($scope.options.dirSelectable || !(selectedNode[$scope.options.nodeChildren] && selectedNode[$scope.options.nodeChildren].length > 0) )
++             {
++              var selected = false;
++              if ($scope.options.multiSelection) {
++                var pos = $scope.selectedNodes.indexOf(selectedNode);
++                if (pos === -1) {
++                  $scope.selectedNodes.push(selectedNode);
++                  selected = true;
++                } else {
++                  $scope.selectedNodes.splice(pos, 1);
++                }
++              } else {
++                if ($scope.selectedNode != selectedNode) {
++                  $scope.selectedNode = selectedNode;
++                  selected = true;
++                }
++                else {
++                  $scope.selectedNode = undefined;
++                }
++              }
++              if ($scope.onSelection)
++                $scope.onSelection({node: selectedNode, selected: selected});
++            }
++          };
++
++
++          $scope.dblClickNode = function(selectedNode){
++            if($scope.onDblclick!=null){
++              $scope.onDblclick({node:selectedNode});
++            }
++          }
++
++          $scope.selectedClass = function() {
++            var isThisNodeSelected = isSelectedNode(this.node);
++            var labelSelectionClass = classIfDefined($scope.options.injectClasses.labelSelected, false);
++            var injectSelectionClass = "";
++            if (labelSelectionClass && isThisNodeSelected)
++              injectSelectionClass = " " + labelSelectionClass;
++
++            return isThisNodeSelected?"tree-selected" + injectSelectionClass:"";
++          };
++
++          //tree template
++          var orderBy = $scope.orderBy ? ' | orderBy:orderBy:reverseOrder' : '';
++          var template =
++            '<ul '+classIfDefined($scope.options.injectClasses.ul, true)+'>' +
++            '<li ng-repeat="node in node.' + $scope.options.nodeChildren + ' | filter:filterExpression:filterComparator ' + orderBy + '" ng-class="headClass(node)" '+classIfDefined($scope.options.injectClasses.li, true)+'>' +
++            '<i class="tree-branch-head" ng-class="iBranchClass()" ng-click="selectNodeHead(node)"></i>' +
++            '<i class="tree-leaf-head '+classIfDefined($scope.options.injectClasses.iLeaf, false)+'"></i>' +
++            '<div class="tree-label '+classIfDefined($scope.options.injectClasses.label, false)+'" ng-class="selectedClass()" ng-click="selectNodeLabel(node)" ng-dblclick="dblClickNode(node)" tree-transclude></div>' +
++            '<treeitem ng-if="nodeExpanded()"></treeitem>' +
++            '</li>' +
++            '</ul>';
++
++          this.template = $compile(template);
++        }],
++        compile: function(element, attrs, childTranscludeFn) {
++          return function ( scope, element, attrs, treemodelCntr ) {
++
++            scope.$watch("treeModel", function updateNodeOnRootScope(newValue) {
++              if (angular.isArray(newValue)) {
++                if (angular.isDefined(scope.node) && angular.equals(scope.node[scope.options.nodeChildren], newValue))
++                  return;
++                scope.node = {};
++                scope.synteticRoot = scope.node;
++                scope.node[scope.options.nodeChildren] = newValue;
++              }
++              else {
++                if (angular.equals(scope.node, newValue))
++                  return;
++                scope.node = newValue;
++              }
++            });
++
++            scope.$watchCollection('expandedNodes', function(newValue) {
++              var notFoundIds = 0;
++              var newExpandedNodesMap = {};
++              var $liElements = element.find('li');
++              var existingScopes = [];
++              // find all nodes visible on the tree and the scope $id of the scopes including them
++              angular.forEach($liElements, function(liElement) {
++                var $liElement = angular.element(liElement);
++                var liScope = $liElement.scope();
++                existingScopes.push(liScope);
++              });
++              // iterate over the newValue, the new expanded nodes, and for each find it in the existingNodesAndScopes
++              // if found, add the mapping $id -> node into newExpandedNodesMap
++              // if not found, add the mapping num -> node into newExpandedNodesMap
++              angular.forEach(newValue, function(newExNode) {
++                var found = false;
++                for (var i=0; (i < existingScopes.length) && !found; i++) {
++                  var existingScope = existingScopes[i];
++                  if (scope.options.equality(newExNode, existingScope.node)) {
++                    newExpandedNodesMap[existingScope.$id] = existingScope.node;
++                    found = true;
++                  }
++                }
++                if (!found)
++                  newExpandedNodesMap[notFoundIds++] = newExNode;
++              });
++              scope.expandedNodesMap = newExpandedNodesMap;
++            });
++
++//                        scope.$watch('expandedNodesMap', function(newValue) {
++//
++//                        });
++
++            //Rendering template for a root node
++            treemodelCntr.template( scope, function(clone) {
++              element.html('').append( clone );
++            });
++            // save the transclude function from compile (which is not bound to a scope as apposed to the one from link)
++            // we can fix this to work with the link transclude function with angular 1.2.6. as for angular 1.2.0 we need
++            // to keep using the compile function
++            scope.$treeTransclude = childTranscludeFn;
++          }
++        }
++      };
++    }])
++    .directive("treeitem", function() {
++      return {
++        restrict: 'E',
++        require: "^treecontrol",
++        link: function( scope, element, attrs, treemodelCntr) {
++          // Rendering template for the current node
++          treemodelCntr.template(scope, function(clone) {
++            element.html('').append(clone);
++          });
++        }
++      }
++    })
++    .directive("treeTransclude", function() {
++      return {
++        link: function(scope, element, attrs, controller) {
++          if (!scope.options.isLeaf(scope.node)) {
++            angular.forEach(scope.expandedNodesMap, function (node, id) {
++              if (scope.options.equality(node, scope.node)) {
++                scope.expandedNodesMap[scope.$id] = scope.node;
++                scope.expandedNodesMap[id] = undefined;
++              }
++            });
++          }
++          if (!scope.options.multiSelection && scope.options.equality(scope.node, scope.selectedNode)) {
++            scope.selectedNode = scope.node;
++          } else if (scope.options.multiSelection) {
++            var newSelectedNodes = [];
++            for (var i = 0; (i < scope.selectedNodes.length); i++) {
++              if (scope.options.equality(scope.node, scope.selectedNodes[i])) {
++                newSelectedNodes.push(scope.node);
++              }
++            }
++            scope.selectedNodes = newSelectedNodes;
++          }
++
++          // create a scope for the transclusion, whos parent is the parent of the tree control
++          scope.transcludeScope = scope.parentScopeOfTree.$new();
++          scope.transcludeScope.node = scope.node;
++          scope.transcludeScope.$parentNode = (scope.$parent.node === scope.synteticRoot)?null:scope.$parent.node;
++          scope.transcludeScope.$index = scope.$index;
++          scope.transcludeScope.$first = scope.$first;
++          scope.transcludeScope.$middle = scope.$middle;
++          scope.transcludeScope.$last = scope.$last;
++          scope.transcludeScope.$odd = scope.$odd;
++          scope.transcludeScope.$even = scope.$even;
++          scope.$on('$destroy', function() {
++            scope.transcludeScope.$destroy();
++          });
++
++          scope.$treeTransclude(scope.transcludeScope, function(clone) {
++            element.empty();
++            element.append(clone);
++          });
++        }
++      }
++    });
++})( angular );
+diff --git a/webapp/app/js/services/kylinProperties.js b/webapp/app/js/services/kylinProperties.js
+index a03403b..b1f04c0 100644
+--- a/webapp/app/js/services/kylinProperties.js
++++ b/webapp/app/js/services/kylinProperties.js
+@@ -20,6 +20,7 @@ KylinApp.service('kylinConfig', function (AdminService, $log) {
+   var _config;
+   var timezone;
+   var deployEnv;
++  var hiveLimit;
+ 
+ 
+   this.init = function () {
+@@ -56,12 +57,22 @@ KylinApp.service('kylinConfig', function (AdminService, $log) {
+   }
+ 
+   this.getDeployEnv = function () {
++    this.deployEnv = this.getProperty("deploy.env");
+     if (!this.deployEnv) {
+-      this.deployEnv = this.getProperty("deploy.env").trim();
++      return "DEV";
+     }
+-    return this.deployEnv.toUpperCase();
++    return this.deployEnv.toUpperCase().trim();
+   }
+ 
++  this.getHiveLimit = function () {
++    this.hiveLimit = this.getProperty("kylin.web.hive.limit");
++    if (!this.hiveLimit) {
++      return 20;
++    }
++    return this.hiveLimit;
++  }
++
++
+   //fill config info for Config from backend
+   this.initWebConfigInfo = function () {
+ 
+diff --git a/webapp/app/js/services/tables.js b/webapp/app/js/services/tables.js
+index 3b5e9f4..9b2d376 100755
+--- a/webapp/app/js/services/tables.js
++++ b/webapp/app/js/services/tables.js
+@@ -17,13 +17,14 @@
+  */
+ 
+ KylinApp.factory('TableService', ['$resource', function ($resource, config) {
+-  return $resource(Config.service.url + 'tables/:tableName/:action', {}, {
++  return $resource(Config.service.url + 'tables/:tableName/:action/:database', {}, {
+     list: {method: 'GET', params: {}, cache: true, isArray: true},
+     get: {method: 'GET', params: {}, isArray: false},
+     getExd: {method: 'GET', params: {action: 'exd-map'}, isArray: false},
+     reload: {method: 'PUT', params: {action: 'reload'}, isArray: false},
+     loadHiveTable: {method: 'POST', params: {}, isArray: false},
+     addStreamingSrc: {method: 'POST', params: {action:'addStreamingSrc'}, isArray: false},
+-    genCardinality: {method: 'PUT', params: {action: 'cardinality'}, isArray: false}
+-  });
++    genCardinality: {method: 'PUT', params: {action: 'cardinality'}, isArray: false},
++    showHiveDatabases: {method: 'GET', params: {action:'hive'}, cache: true, isArray: true},
++    showHiveTables: {method: 'GET', params: {action:'hive'}, cache: true, isArray: true}  });
+ }]);
+diff --git a/webapp/app/partials/tables/source_table_tree.html b/webapp/app/partials/tables/source_table_tree.html
+index 767eb43..c091dca 100755
+--- a/webapp/app/partials/tables/source_table_tree.html
++++ b/webapp/app/partials/tables/source_table_tree.html
+@@ -26,6 +26,7 @@
+         <div class="col-xs-5" style="padding-left: 0px;margin-top: 20px;">
+             <div class="pull-right">
+                 <a class="btn btn-xs btn-primary" tooltip="Load Hive Table"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openModal()"><i class="fa fa-download"></i></a>
++                <a class="btn btn-xs btn-info" tooltip="Load Hive Table From Tree"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openTreeModal()"><i class="fa fa-download"></i></a>
+                 <a class="btn btn-xs btn-primary" tooltip="Add Streaming Table"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openStreamingSourceModal()"><i class="fa fa-area-chart"></i></a>
+             </div>
+         </div>
+@@ -47,3 +48,28 @@
+ </div>
+ 
+ <div ng-include="'partials/tables/table_load.html'"></div>
++
++<script type="text/ng-template" id="addHiveTableFromTree.html">
++  <div class="modal-header"><button class="close" type="button" data-dismiss="modal" ng-click="cancel()">×</button>
++    <h4>Load Hive Table Metadata From Tree</h4>
++  </div>
++  <div class="modal-body">
++    <span><strong>Project: </strong>{{ $parent.projectName!=null?$parent.projectName:'NULL'}}</span>
++    <div class="form-group searchBox">
++      <input type="text" placeholder="Filter ..." class="nav-search-input" ng-model="predicate" />
++    </div>
++    <loading ng-if="!hiveLoaded" text="Loading Databases..."></loading>
++    <treecontrol class="tree-light check" tree-model="treedata" selected-nodes="selectedNodes" filter-expression="predicate" on-selection="showSelected(node)" on-node-toggle="showToggle(node)" options="treeOptions">
++      <div ng-if="node.label==''&&node.id==0"><img src="image/ajax-loader.gif">Loading Tables...</div>
++      <button class="btn btn-xs btn-primary" ng-if="node.label==''&&node.id==65535" ng-click="showMoreClicked($parentNode)">Show More</button>
++      <button class="btn btn-xs btn-primary" ng-if="node.label==''&&node.id==65535" ng-click="showAllClicked($parentNode)">Show All</button>
++      {{node.label}}
++    </treecontrol>
++  </div>
++
++  <div class="modal-footer">
++    <button class="btn btn-primary" ng-click="add()">Sync</button>
++    <button class="btn btn-primary" ng-click="cancel()">Cancel</button>
++  </div>
++
++</script>
+diff --git a/webapp/bower.json b/webapp/bower.json
+index 41144f9..bba4a52 100755
+--- a/webapp/bower.json
++++ b/webapp/bower.json
+@@ -32,7 +32,8 @@
+     "bootstrap-sweetalert": "~0.4.3",
+     "angular-toggle-switch":"1.3.0",
+     "angular-ui-select": "0.13.2",
+-    "angular-sanitize": "1.2.18"
++    "angular-sanitize": "1.2.18",
++    "angular-tree-control": "0.2.8"
+   },
+   "devDependencies": {
+     "less.js": "~1.4.0",
+diff --git a/webapp/grunt.json b/webapp/grunt.json
+index 3219b5e..86ad1dc 100755
+--- a/webapp/grunt.json
++++ b/webapp/grunt.json
+@@ -19,7 +19,6 @@
+                 "app/components/angularLocalStorage/src/angularLocalStorage.js",
+                 "app/components/angular-base64/angular-base64.min.js",
+                 "app/components/ng-grid/build/ng-grid.js",
+-                "app/components/angular-tree-control/angular-tree-control.js",
+                 "app/components/ace-builds/src-min-noconflict/ace.js",
+                 "app/components/ace-builds/src-min-noconflict/ext-language_tools.js",
+                 "app/components/ace-builds/src-min-noconflict/mode-json.js",
+-- 
+2.5.4 (Apple Git-61)
+

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/build/conf/kylin.properties
----------------------------------------------------------------------
diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
index 5532339..78a564d 100644
--- a/build/conf/kylin.properties
+++ b/build/conf/kylin.properties
@@ -148,3 +148,5 @@ deploy.env=DEV
 
 ###########################deprecated configs#######################
 kylin.sandbox=true
+
+ kylin.web.hive.limit=20
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/examples/test_case_data/sandbox/kylin.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
index 0c68a7e..7c9919b 100644
--- a/examples/test_case_data/sandbox/kylin.properties
+++ b/examples/test_case_data/sandbox/kylin.properties
@@ -116,4 +116,5 @@ kylin.web.contact_mail=
 #env DEV|QA|PROD
 deploy.env=DEV
 
+ kylin.web.hive.limit=20
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 42a0c6d..2e42841 100644
--- a/pom.xml
+++ b/pom.xml
@@ -781,6 +781,7 @@
                                 <!-- MIT license -->
                                 <exclude>webapp/app/css/AdminLTE.css</exclude>
                                 <exclude>webapp/app/js/directives/kylin_abn_tree_directive.js</exclude>
+                                <exclude>webapp/app/js/directives/angular-tree-control.js</exclude>
 
                                 <!--configuration file -->
                                 <exclude>webapp/app/routes.json</exclude>

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/TableController.java b/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
index 98e8d58..bd04ad8 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
@@ -36,6 +36,7 @@ import org.apache.kylin.rest.response.TableDescResponse;
 import org.apache.kylin.rest.service.CubeService;
 import org.apache.kylin.rest.service.ModelService;
 import org.apache.kylin.rest.service.ProjectService;
+import org.apache.kylin.source.hive.HiveClient;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -258,6 +259,49 @@ public class TableController extends BasicController {
         return descs;
     }
 
+
+    /**
+     * Show all databases in Hive
+     *
+     * @return Hive databases list
+     * @throws IOException
+     */
+    @RequestMapping(value = "/hive", method = { RequestMethod.GET })
+    @ResponseBody
+    private static List<String> showHiveDatabases() throws IOException {
+        HiveClient hiveClient = new HiveClient();
+        List<String> results = null;
+
+        try {
+            results = hiveClient.getHiveDbNames();
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new IOException(e);
+        }
+        return results;
+    }
+
+    /**
+     * Show all tables in a Hive database
+     *
+     * @return Hive table list
+     * @throws IOException
+     */
+    @RequestMapping(value = "/hive/{database}", method = { RequestMethod.GET })
+    @ResponseBody
+    private static List<String> showHiveTables(@PathVariable String database) throws IOException {
+        HiveClient hiveClient = new HiveClient();
+        List<String> results = null;
+
+        try {
+            results = hiveClient.getHiveTableNames(database);
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new IOException(e);
+        }
+        return results;
+    }
+
     public void setCubeService(CubeService cubeService) {
         this.cubeMgmtService = cubeService;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
index 178889e..a99b304 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
@@ -132,6 +132,14 @@ public class HiveClient {
         return getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.NUM_FILES);
     }
 
+    public List<String> getHiveDbNames() throws Exception {
+        return getMetaStoreClient().getAllDatabases();
+    }
+
+    public List<String> getHiveTableNames(String database) throws Exception {
+        return getMetaStoreClient().getAllTables(database);
+    }
+
     /**
      * COPIED FROM org.apache.hadoop.hive.ql.stats.StatsUtil for backward compatibility
      * 

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/webapp/app/index.html
----------------------------------------------------------------------
diff --git a/webapp/app/index.html b/webapp/app/index.html
index 11ca283..b4eb9d7 100644
--- a/webapp/app/index.html
+++ b/webapp/app/index.html
@@ -113,6 +113,7 @@
 <script src="js/filters/filter.js"></script>
 <script src="js/directives/directives.js"></script>
 <script src="js/directives/kylin_abn_tree_directive.js"></script>
+<script src="js/directives/angular-tree-control.js"></script>
 <script src="js/factories/graph.js"></script>
 <script src="js/services/cache.js"></script>
 <script src="js/services/message.js"></script>

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/webapp/app/js/controllers/sourceMeta.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/controllers/sourceMeta.js b/webapp/app/js/controllers/sourceMeta.js
index cbd9f52..69f1a44 100755
--- a/webapp/app/js/controllers/sourceMeta.js
+++ b/webapp/app/js/controllers/sourceMeta.js
@@ -100,6 +100,24 @@ KylinApp
       });
     };
 
+    $scope.openTreeModal = function () {
+      $modal.open({
+        templateUrl: 'addHiveTableFromTree.html',
+        controller: ModalInstanceCtrl,
+        resolve: {
+          tableNames: function () {
+            return $scope.tableNames;
+          },
+          projectName:function(){
+            return  $scope.projectModel.selectedProject;
+          },
+          scope: function () {
+            return $scope;
+          }
+        }
+      });
+    };
+
     $scope.openUnLoadModal = function () {
       $modal.open({
         templateUrl: 'removeHiveTable.html',
@@ -119,13 +137,175 @@ KylinApp
       });
     };
 
-    var ModalInstanceCtrl = function ($scope, $location, $modalInstance, tableNames, MessageService, projectName, scope) {
+    var ModalInstanceCtrl = function ($scope, $location, $modalInstance, tableNames, MessageService, projectName, scope,kylinConfig) {
       $scope.tableNames = "";
       $scope.projectName = projectName;
       $scope.cancel = function () {
         $modalInstance.dismiss('cancel');
       };
+
+      $scope.kylinConfig = kylinConfig;
+
+
+      $scope.treeOptions = {multiSelection: true};
+      $scope.selectedNodes = [];
+      $scope.hiveLimit =  kylinConfig.getHiveLimit();
+
+      $scope.loadHive = function () {
+        if($scope.hiveLoaded)
+          return;
+        TableService.showHiveDatabases({}, function (databases) {
+          $scope.dbNum = databases.length;
+          if (databases.length > 0) {
+            $scope.hiveMap = {};
+            for (var i = 0; i < databases.length; i++) {
+              var dbName = databases[i];
+              var hiveData = {"dbname":dbName,"tables":[],"expanded":false};
+              $scope.hive.push(hiveData);
+              $scope.hiveMap[dbName] = i;
+            }
+          }
+          $scope.hiveLoaded = true;
+          $scope.showMoreDatabases();
+        });
+      }
+
+      $scope.showMoreTables = function(hiveTables, node){
+        var shownTimes = parseInt(node.children.length / $scope.hiveLimit);
+        var from = $scope.hiveLimit * shownTimes;
+        var to = 0;
+        var hasMore = false;
+        if(from + $scope.hiveLimit > hiveTables.length) {
+          to = hiveTables.length - 1;
+        } else {
+          to = from + $scope.hiveLimit - 1;
+          hasMore = true;
+        }
+        if(!angular.isUndefined(node.children[from])){
+          node.children.pop();
+        }
+
+        for(var idx = from; idx <= to; idx++){
+          node.children.push({"label":node.label+'.'+hiveTables[idx],"id":idx-from+1,"children":[]});
+        }
+
+        if(hasMore){
+          var loading = {"label":"","id":65535,"children":[]};
+          node.children.push(loading);
+        }
+      }
+
+      $scope.showAllTables = function(hiveTables, node){
+        var shownTimes = parseInt(node.children.length / $scope.hiveLimit);
+        var from = $scope.hiveLimit * shownTimes;
+        var to = hiveTables.length - 1;
+        if(!angular.isUndefined(node.children[from])){
+          node.children.pop();
+        }
+        for(var idx = from; idx <= to; idx++){
+          node.children.push({"label":node.label+'.'+hiveTables[idx],"id":idx-from+1,"children":[]});
+        }
+      }
+
+      $scope.showMoreDatabases = function(){
+        var shownTimes = parseInt($scope.treedata.length / $scope.hiveLimit);
+        var from = $scope.hiveLimit * shownTimes;
+        var to = 0;
+        var hasMore = false;
+        if(from + $scope.hiveLimit > $scope.hive.length) {
+          to = $scope.hive.length - 1;
+        } else {
+          to = from + $scope.hiveLimit - 1;
+          hasMore = true;
+        }
+        if(!angular.isUndefined($scope.treedata[from])){
+          $scope.treedata.pop();
+        }
+
+        for(var idx = from; idx <= to; idx++){
+          var children = [];
+          var loading = {"label":"","id":0,"children":[]};
+          children.push(loading);
+          $scope.treedata.push({"label":$scope.hive[idx].dbname,"id":idx+1,"children":children,"expanded":false});
+        }
+
+        if(hasMore){
+          var loading = {"label":"","id":65535,"children":[0]};
+          $scope.treedata.push(loading);
+        }
+      }
+
+      $scope.showAllDatabases = function(){
+        var shownTimes = parseInt($scope.treedata.length / $scope.hiveLimit);
+        var from = $scope.hiveLimit * shownTimes;
+        var to = $scope.hive.length - 1;
+
+        if(!angular.isUndefined($scope.treedata[from])){
+          $scope.treedata.pop();
+        }
+
+        for(var idx = from; idx <= to; idx++){
+          var children = [];
+          var loading = {"label":"","id":0,"children":[]};
+          children.push(loading);
+          $scope.treedata.push({"label":$scope.hive[idx].dbname,"id":idx+1,"children":children,"expanded":false});
+        }
+      }
+
+      $scope.showMoreClicked = function($parentNode){
+        if($parentNode == null){
+          $scope.showMoreDatabases();
+        } else {
+          $scope.showMoreTables($scope.hive[$scope.hiveMap[$parentNode.label]].tables,$parentNode);
+        }
+      }
+
+      $scope.showAllClicked = function($parentNode){
+        if($parentNode == null){
+          $scope.showAllDatabases();
+        } else {
+          $scope.showAllTables($scope.hive[$scope.hiveMap[$parentNode.label]].tables,$parentNode);
+        }
+      }
+
+      $scope.showToggle = function(node) {
+        if(node.expanded == false){
+          TableService.showHiveTables({"database": node.label},function (hive_tables){
+            var tables = [];
+            for (var i = 0; i < hive_tables.length; i++) {
+              tables.push(hive_tables[i]);
+            }
+            $scope.hive[$scope.hiveMap[node.label]].tables = tables;
+            $scope.showMoreTables(tables,node);
+            node.expanded = true;
+          });
+        }
+      }
+
+      $scope.showSelected = function(node) {
+
+      }
+
+      if(angular.isUndefined($scope.hive) || angular.isUndefined($scope.hiveLoaded) || angular.isUndefined($scope.treedata) ){
+        $scope.hive = [];
+        $scope.hiveLoaded = false;
+        $scope.treedata = [];
+        $scope.loadHive();
+      }
+
+
+
+
       $scope.add = function () {
+
+        if($scope.tableNames.length === 0 && $scope.selectedNodes.length > 0) {
+          for(var i = 0; i <  $scope.selectedNodes.length; i++){
+            if($scope.selectedNodes[i].label.indexOf(".") >= 0){
+              $scope.tableNames += ($scope.selectedNodes[i].label) += ',';
+            }
+          }
+        }
+
         if ($scope.tableNames.trim() === "") {
           SweetAlert.swal('', 'Please input table(s) you want to synchronize.', 'info');
           return;
@@ -172,7 +352,8 @@ KylinApp
         })
       }
 
-      $scope.remove = function () {
+
+    $scope.remove = function () {
         if ($scope.tableNames.trim() === "") {
           SweetAlert.swal('', 'Please input table(s) you want to synchronize.', 'info');
           return;

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/webapp/app/js/directives/angular-tree-control.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/directives/angular-tree-control.js b/webapp/app/js/directives/angular-tree-control.js
new file mode 100644
index 0000000..6fca987
--- /dev/null
+++ b/webapp/app/js/directives/angular-tree-control.js
@@ -0,0 +1,363 @@
+/*
+ * The MIT License (MIT)
+ *
+ * Copyright (c) 2013 Steve
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+ * the Software, and to permit persons to whom the Software is furnished to do so,
+ *   subject to the following conditions:
+ *
+ *   The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ *   THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+ * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+ * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+ * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+ * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+(function ( angular ) {
+  'use strict';
+
+  angular.module( 'treeControl', [] )
+    .directive( 'treecontrol', ['$compile', function( $compile ) {
+      /**
+       * @param cssClass - the css class
+       * @param addClassProperty - should we wrap the class name with class=""
+       */
+      function classIfDefined(cssClass, addClassProperty) {
+        if (cssClass) {
+          if (addClassProperty)
+            return 'class="' + cssClass + '"';
+          else
+            return cssClass;
+        }
+        else
+          return "";
+      }
+
+      function ensureDefault(obj, prop, value) {
+        if (!obj.hasOwnProperty(prop))
+          obj[prop] = value;
+      }
+
+      return {
+        restrict: 'EA',
+        require: "treecontrol",
+        transclude: true,
+        scope: {
+          treeModel: "=",
+          selectedNode: "=?",
+          selectedNodes: "=?",
+          expandedNodes: "=?",
+          onSelection: "&",
+          onNodeToggle: "&",
+          options: "=?",
+          orderBy: "@",
+          reverseOrder: "@",
+          filterExpression: "=?",
+          filterComparator: "=?",
+          onDblclick: "&"
+        },
+        controller: ['$scope', function( $scope ) {
+
+          function defaultIsLeaf(node) {
+            return !node[$scope.options.nodeChildren] || node[$scope.options.nodeChildren].length === 0;
+          }
+
+          function shallowCopy(src, dst) {
+            if (angular.isArray(src)) {
+              dst = dst || [];
+
+              for ( var i = 0; i < src.length; i++) {
+                dst[i] = src[i];
+              }
+            } else if (angular.isObject(src)) {
+              dst = dst || {};
+
+              for (var key in src) {
+                if (hasOwnProperty.call(src, key) && !(key.charAt(0) === '$' && key.charAt(1) === '$')) {
+                  dst[key] = src[key];
+                }
+              }
+            }
+
+            return dst || src;
+          }
+          function defaultEquality(a, b) {
+            if (a === undefined || b === undefined)
+              return false;
+            a = shallowCopy(a);
+            a[$scope.options.nodeChildren] = [];
+            b = shallowCopy(b);
+            b[$scope.options.nodeChildren] = [];
+            return angular.equals(a, b);
+          }
+
+          $scope.options = $scope.options || {};
+          ensureDefault($scope.options, "multiSelection", false);
+          ensureDefault($scope.options, "nodeChildren", "children");
+          ensureDefault($scope.options, "dirSelectable", "true");
+          ensureDefault($scope.options, "injectClasses", {});
+          ensureDefault($scope.options.injectClasses, "ul", "");
+          ensureDefault($scope.options.injectClasses, "li", "");
+          ensureDefault($scope.options.injectClasses, "liSelected", "");
+          ensureDefault($scope.options.injectClasses, "iExpanded", "");
+          ensureDefault($scope.options.injectClasses, "iCollapsed", "");
+          ensureDefault($scope.options.injectClasses, "iLeaf", "");
+          ensureDefault($scope.options.injectClasses, "label", "");
+          ensureDefault($scope.options.injectClasses, "labelSelected", "");
+          ensureDefault($scope.options, "equality", defaultEquality);
+          ensureDefault($scope.options, "isLeaf", defaultIsLeaf);
+
+          $scope.selectedNodes = $scope.selectedNodes || [];
+          $scope.expandedNodes = $scope.expandedNodes || [];
+          $scope.expandedNodesMap = {};
+          for (var i=0; i < $scope.expandedNodes.length; i++) {
+            $scope.expandedNodesMap[""+i] = $scope.expandedNodes[i];
+          }
+          $scope.parentScopeOfTree = $scope.$parent;
+
+
+          function isSelectedNode(node) {
+            if (!$scope.options.multiSelection && ($scope.options.equality(node, $scope.selectedNode)))
+              return true;
+            else if ($scope.options.multiSelection && $scope.selectedNodes) {
+              for (var i = 0; (i < $scope.selectedNodes.length); i++) {
+                if ($scope.options.equality(node, $scope.selectedNodes[i])) {
+                  return true;
+                }
+              }
+              return false;
+            }
+          }
+
+          $scope.headClass = function(node) {
+            var liSelectionClass = classIfDefined($scope.options.injectClasses.liSelected, false);
+            var injectSelectionClass = "";
+            if (liSelectionClass && isSelectedNode(node))
+              injectSelectionClass = " " + liSelectionClass;
+            if ($scope.options.isLeaf(node))
+              return "tree-leaf" + injectSelectionClass;
+            if ($scope.expandedNodesMap[this.$id])
+              return "tree-expanded" + injectSelectionClass;
+            else
+              return "tree-collapsed" + injectSelectionClass;
+          };
+
+          $scope.iBranchClass = function() {
+            if ($scope.expandedNodesMap[this.$id])
+              return classIfDefined($scope.options.injectClasses.iExpanded);
+            else
+              return classIfDefined($scope.options.injectClasses.iCollapsed);
+          };
+
+          $scope.nodeExpanded = function() {
+            return !!$scope.expandedNodesMap[this.$id];
+          };
+
+          $scope.selectNodeHead = function() {
+            var expanding = $scope.expandedNodesMap[this.$id] === undefined;
+            $scope.expandedNodesMap[this.$id] = (expanding ? this.node : undefined);
+            if (expanding) {
+              $scope.expandedNodes.push(this.node);
+            }
+            else {
+              var index;
+              for (var i=0; (i < $scope.expandedNodes.length) && !index; i++) {
+                if ($scope.options.equality($scope.expandedNodes[i], this.node)) {
+                  index = i;
+                }
+              }
+              if (index != undefined)
+                $scope.expandedNodes.splice(index, 1);
+            }
+            if ($scope.onNodeToggle)
+              $scope.onNodeToggle({node: this.node, expanded: expanding});
+          };
+
+          $scope.selectNodeLabel = function( selectedNode ){
+            if(selectedNode[$scope.options.nodeChildren] && selectedNode[$scope.options.nodeChildren].length > 0){
+              this.selectNodeHead();
+            }
+            if($scope.options.dirSelectable || !(selectedNode[$scope.options.nodeChildren] && selectedNode[$scope.options.nodeChildren].length > 0) )
+             {
+              var selected = false;
+              if ($scope.options.multiSelection) {
+                var pos = $scope.selectedNodes.indexOf(selectedNode);
+                if (pos === -1) {
+                  $scope.selectedNodes.push(selectedNode);
+                  selected = true;
+                } else {
+                  $scope.selectedNodes.splice(pos, 1);
+                }
+              } else {
+                if ($scope.selectedNode != selectedNode) {
+                  $scope.selectedNode = selectedNode;
+                  selected = true;
+                }
+                else {
+                  $scope.selectedNode = undefined;
+                }
+              }
+              if ($scope.onSelection)
+                $scope.onSelection({node: selectedNode, selected: selected});
+            }
+          };
+
+
+          $scope.dblClickNode = function(selectedNode){
+            if($scope.onDblclick!=null){
+              $scope.onDblclick({node:selectedNode});
+            }
+          }
+
+          $scope.selectedClass = function() {
+            var isThisNodeSelected = isSelectedNode(this.node);
+            var labelSelectionClass = classIfDefined($scope.options.injectClasses.labelSelected, false);
+            var injectSelectionClass = "";
+            if (labelSelectionClass && isThisNodeSelected)
+              injectSelectionClass = " " + labelSelectionClass;
+
+            return isThisNodeSelected?"tree-selected" + injectSelectionClass:"";
+          };
+
+          //tree template
+          var orderBy = $scope.orderBy ? ' | orderBy:orderBy:reverseOrder' : '';
+          var template =
+            '<ul '+classIfDefined($scope.options.injectClasses.ul, true)+'>' +
+            '<li ng-repeat="node in node.' + $scope.options.nodeChildren + ' | filter:filterExpression:filterComparator ' + orderBy + '" ng-class="headClass(node)" '+classIfDefined($scope.options.injectClasses.li, true)+'>' +
+            '<i class="tree-branch-head" ng-class="iBranchClass()" ng-click="selectNodeHead(node)"></i>' +
+            '<i class="tree-leaf-head '+classIfDefined($scope.options.injectClasses.iLeaf, false)+'"></i>' +
+            '<div class="tree-label '+classIfDefined($scope.options.injectClasses.label, false)+'" ng-class="selectedClass()" ng-click="selectNodeLabel(node)" ng-dblclick="dblClickNode(node)" tree-transclude></div>' +
+            '<treeitem ng-if="nodeExpanded()"></treeitem>' +
+            '</li>' +
+            '</ul>';
+
+          this.template = $compile(template);
+        }],
+        compile: function(element, attrs, childTranscludeFn) {
+          return function ( scope, element, attrs, treemodelCntr ) {
+
+            scope.$watch("treeModel", function updateNodeOnRootScope(newValue) {
+              if (angular.isArray(newValue)) {
+                if (angular.isDefined(scope.node) && angular.equals(scope.node[scope.options.nodeChildren], newValue))
+                  return;
+                scope.node = {};
+                scope.synteticRoot = scope.node;
+                scope.node[scope.options.nodeChildren] = newValue;
+              }
+              else {
+                if (angular.equals(scope.node, newValue))
+                  return;
+                scope.node = newValue;
+              }
+            });
+
+            scope.$watchCollection('expandedNodes', function(newValue) {
+              var notFoundIds = 0;
+              var newExpandedNodesMap = {};
+              var $liElements = element.find('li');
+              var existingScopes = [];
+              // find all nodes visible on the tree and the scope $id of the scopes including them
+              angular.forEach($liElements, function(liElement) {
+                var $liElement = angular.element(liElement);
+                var liScope = $liElement.scope();
+                existingScopes.push(liScope);
+              });
+              // iterate over the newValue, the new expanded nodes, and for each find it in the existingNodesAndScopes
+              // if found, add the mapping $id -> node into newExpandedNodesMap
+              // if not found, add the mapping num -> node into newExpandedNodesMap
+              angular.forEach(newValue, function(newExNode) {
+                var found = false;
+                for (var i=0; (i < existingScopes.length) && !found; i++) {
+                  var existingScope = existingScopes[i];
+                  if (scope.options.equality(newExNode, existingScope.node)) {
+                    newExpandedNodesMap[existingScope.$id] = existingScope.node;
+                    found = true;
+                  }
+                }
+                if (!found)
+                  newExpandedNodesMap[notFoundIds++] = newExNode;
+              });
+              scope.expandedNodesMap = newExpandedNodesMap;
+            });
+
+//                        scope.$watch('expandedNodesMap', function(newValue) {
+//
+//                        });
+
+            //Rendering template for a root node
+            treemodelCntr.template( scope, function(clone) {
+              element.html('').append( clone );
+            });
+            // save the transclude function from compile (which is not bound to a scope as apposed to the one from link)
+            // we can fix this to work with the link transclude function with angular 1.2.6. as for angular 1.2.0 we need
+            // to keep using the compile function
+            scope.$treeTransclude = childTranscludeFn;
+          }
+        }
+      };
+    }])
+    .directive("treeitem", function() {
+      return {
+        restrict: 'E',
+        require: "^treecontrol",
+        link: function( scope, element, attrs, treemodelCntr) {
+          // Rendering template for the current node
+          treemodelCntr.template(scope, function(clone) {
+            element.html('').append(clone);
+          });
+        }
+      }
+    })
+    .directive("treeTransclude", function() {
+      return {
+        link: function(scope, element, attrs, controller) {
+          if (!scope.options.isLeaf(scope.node)) {
+            angular.forEach(scope.expandedNodesMap, function (node, id) {
+              if (scope.options.equality(node, scope.node)) {
+                scope.expandedNodesMap[scope.$id] = scope.node;
+                scope.expandedNodesMap[id] = undefined;
+              }
+            });
+          }
+          if (!scope.options.multiSelection && scope.options.equality(scope.node, scope.selectedNode)) {
+            scope.selectedNode = scope.node;
+          } else if (scope.options.multiSelection) {
+            var newSelectedNodes = [];
+            for (var i = 0; (i < scope.selectedNodes.length); i++) {
+              if (scope.options.equality(scope.node, scope.selectedNodes[i])) {
+                newSelectedNodes.push(scope.node);
+              }
+            }
+            scope.selectedNodes = newSelectedNodes;
+          }
+
+          // create a scope for the transclusion, whos parent is the parent of the tree control
+          scope.transcludeScope = scope.parentScopeOfTree.$new();
+          scope.transcludeScope.node = scope.node;
+          scope.transcludeScope.$parentNode = (scope.$parent.node === scope.synteticRoot)?null:scope.$parent.node;
+          scope.transcludeScope.$index = scope.$index;
+          scope.transcludeScope.$first = scope.$first;
+          scope.transcludeScope.$middle = scope.$middle;
+          scope.transcludeScope.$last = scope.$last;
+          scope.transcludeScope.$odd = scope.$odd;
+          scope.transcludeScope.$even = scope.$even;
+          scope.$on('$destroy', function() {
+            scope.transcludeScope.$destroy();
+          });
+
+          scope.$treeTransclude(scope.transcludeScope, function(clone) {
+            element.empty();
+            element.append(clone);
+          });
+        }
+      }
+    });
+})( angular );

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/webapp/app/js/services/kylinProperties.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/services/kylinProperties.js b/webapp/app/js/services/kylinProperties.js
index a03403b..546db2b 100644
--- a/webapp/app/js/services/kylinProperties.js
+++ b/webapp/app/js/services/kylinProperties.js
@@ -56,12 +56,20 @@ KylinApp.service('kylinConfig', function (AdminService, $log) {
   }
 
   this.getDeployEnv = function () {
+    this.deployEnv = this.getProperty("deploy.env");
     if (!this.deployEnv) {
-      this.deployEnv = this.getProperty("deploy.env").trim();
+      return "DEV";
     }
-    return this.deployEnv.toUpperCase();
+    return this.deployEnv.toUpperCase().trim();
   }
 
+  this.getHiveLimit = function () {
+    this.hiveLimit = this.getProperty("kylin.web.hive.limit");
+    if (!this.hiveLimit) {
+      return 20;
+    }
+    return this.hiveLimit;
+  }
   //fill config info for Config from backend
   this.initWebConfigInfo = function () {
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/webapp/app/js/services/tables.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/services/tables.js b/webapp/app/js/services/tables.js
index ca7fc42..4199d6c 100755
--- a/webapp/app/js/services/tables.js
+++ b/webapp/app/js/services/tables.js
@@ -17,7 +17,7 @@
  */
 
 KylinApp.factory('TableService', ['$resource', function ($resource, config) {
-  return $resource(Config.service.url + 'tables/:tableName/:action', {}, {
+  return $resource(Config.service.url + 'tables/:tableName/:action/:database', {}, {
     list: {method: 'GET', params: {}, cache: true, isArray: true},
     get: {method: 'GET', params: {}, isArray: false},
     getExd: {method: 'GET', params: {action: 'exd-map'}, isArray: false},
@@ -25,6 +25,8 @@ KylinApp.factory('TableService', ['$resource', function ($resource, config) {
     loadHiveTable: {method: 'POST', params: {}, isArray: false},
     unLoadHiveTable: {method: 'DELETE', params: {}, isArray: false},
     addStreamingSrc: {method: 'POST', params: {action:'addStreamingSrc'}, isArray: false},
-    genCardinality: {method: 'PUT', params: {action: 'cardinality'}, isArray: false}
+    genCardinality: {method: 'PUT', params: {action: 'cardinality'}, isArray: false},
+    showHiveDatabases: {method: 'GET', params: {action:'hive'}, cache: true, isArray: true},
+    showHiveTables: {method: 'GET', params: {action:'hive'}, cache: true, isArray: true}
   });
 }]);

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/webapp/app/partials/tables/source_table_tree.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/tables/source_table_tree.html b/webapp/app/partials/tables/source_table_tree.html
index 4eddc4f..c2dc219 100755
--- a/webapp/app/partials/tables/source_table_tree.html
+++ b/webapp/app/partials/tables/source_table_tree.html
@@ -26,6 +26,7 @@
         <div class="col-xs-5" style="padding-left: 0px;margin-top: 20px;">
             <div class="pull-right">
               <a class="btn btn-xs btn-primary" tooltip="Load Hive Table"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openModal()"><i class="fa fa-download"></i></a>
+              <a class="btn btn-xs btn-info" tooltip="Load Hive Table From Tree"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openTreeModal()"><i class="fa fa-download"></i></a>
               <a class="btn btn-xs btn-info" tooltip="UnLoad Hive Table"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openUnLoadModal()"><i class="fa fa-remove"></i></a>
               <a class="btn btn-xs btn-primary" tooltip="Add Streaming Table"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openStreamingSourceModal()"><i class="fa fa-area-chart"></i></a>
             </div>
@@ -47,5 +48,30 @@
     </div>
 </div>
 
+<script type="text/ng-template" id="addHiveTableFromTree.html">
+  <div class="modal-header"><button class="close" type="button" data-dismiss="modal" ng-click="cancel()">×</button>
+    <h4>Load Hive Table Metadata From Tree</h4>
+  </div>
+  <div class="modal-body">
+    <span><strong>Project: </strong>{{ $parent.projectName!=null?$parent.projectName:'NULL'}}</span>
+    <div class="form-group searchBox">
+      <input type="text" placeholder="Filter ..." class="nav-search-input" ng-model="predicate" />
+    </div>
+    <loading ng-if="!hiveLoaded" text="Loading Databases..."></loading>
+    <treecontrol class="tree-light check" tree-model="treedata" selected-nodes="selectedNodes" filter-expression="predicate" on-selection="showSelected(node)" on-node-toggle="showToggle(node)" options="treeOptions">
+      <div ng-if="node.label==''&&node.id==0"><img src="image/ajax-loader.gif">Loading Tables...</div>
+      <button class="btn btn-xs btn-primary" ng-if="node.label==''&&node.id==65535" ng-click="showMoreClicked($parentNode)">Show More</button>
+      <button class="btn btn-xs btn-primary" ng-if="node.label==''&&node.id==65535" ng-click="showAllClicked($parentNode)">Show All</button>
+      {{node.label}}
+    </treecontrol>
+  </div>
+
+  <div class="modal-footer">
+    <button class="btn btn-primary" ng-click="add()">Sync</button>
+    <button class="btn btn-primary" ng-click="cancel()">Cancel</button>
+  </div>
+
+</script>
+
 <div ng-include="'partials/tables/table_load.html'"></div>
 <div ng-include="'partials/tables/table_unload.html'"></div>

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/webapp/bower.json
----------------------------------------------------------------------
diff --git a/webapp/bower.json b/webapp/bower.json
index 41144f9..bba4a52 100755
--- a/webapp/bower.json
+++ b/webapp/bower.json
@@ -32,7 +32,8 @@
     "bootstrap-sweetalert": "~0.4.3",
     "angular-toggle-switch":"1.3.0",
     "angular-ui-select": "0.13.2",
-    "angular-sanitize": "1.2.18"
+    "angular-sanitize": "1.2.18",
+    "angular-tree-control": "0.2.8"
   },
   "devDependencies": {
     "less.js": "~1.4.0",

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/webapp/grunt.json
----------------------------------------------------------------------
diff --git a/webapp/grunt.json b/webapp/grunt.json
index 3219b5e..86ad1dc 100755
--- a/webapp/grunt.json
+++ b/webapp/grunt.json
@@ -19,7 +19,6 @@
                 "app/components/angularLocalStorage/src/angularLocalStorage.js",
                 "app/components/angular-base64/angular-base64.min.js",
                 "app/components/ng-grid/build/ng-grid.js",
-                "app/components/angular-tree-control/angular-tree-control.js",
                 "app/components/ace-builds/src-min-noconflict/ace.js",
                 "app/components/ace-builds/src-min-noconflict/ext-language_tools.js",
                 "app/components/ace-builds/src-min-noconflict/mode-json.js",


[23/50] [abbrv] kylin git commit: KYLIN-1456 do not display date as 1970-01-01 as default value in datepicker-popup

Posted by li...@apache.org.
KYLIN-1456 do not display date as 1970-01-01 as default value in datepicker-popup


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/a5410681
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/a5410681
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/a5410681

Branch: refs/heads/master
Commit: a54106810a40c2a1065ee9b781f96fd2f1df298d
Parents: 1ea781f
Author: Jason <ji...@163.com>
Authored: Wed Mar 2 17:51:24 2016 +0800
Committer: Jason <ji...@163.com>
Committed: Wed Mar 2 17:51:36 2016 +0800

----------------------------------------------------------------------
 webapp/app/js/directives/directives.js                 | 6 ++++++
 webapp/app/less/app.less                               | 4 ++++
 webapp/app/partials/cubeDesigner/refresh_settings.html | 2 +-
 3 files changed, 11 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/a5410681/webapp/app/js/directives/directives.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/directives/directives.js b/webapp/app/js/directives/directives.js
index d07cee1..a9cd956 100644
--- a/webapp/app/js/directives/directives.js
+++ b/webapp/app/js/directives/directives.js
@@ -228,6 +228,12 @@ KylinApp.directive('kylinPagination', function ($parse, $q) {
       require: 'ngModel',
       link: function (scope, element, attrs, ctrl) {
         ctrl.$formatters.push(function (value) {
+
+          //set null for 0
+          if(value===0){
+            return null;
+          }
+
           //return value;
           var date = new Date(value + (60000 * new Date().getTimezoneOffset()));
           return date;

http://git-wip-us.apache.org/repos/asf/kylin/blob/a5410681/webapp/app/less/app.less
----------------------------------------------------------------------
diff --git a/webapp/app/less/app.less b/webapp/app/less/app.less
index 40d3280..b6b3131 100644
--- a/webapp/app/less/app.less
+++ b/webapp/app/less/app.less
@@ -784,3 +784,7 @@ input[placeholder] {
 input:-moz-placeholder {
   text-overflow: ellipsis;
 }
+
+.dropdown-menu{
+  z-index:9999;
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/a5410681/webapp/app/partials/cubeDesigner/refresh_settings.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/cubeDesigner/refresh_settings.html b/webapp/app/partials/cubeDesigner/refresh_settings.html
index f371c33..15dd4af 100755
--- a/webapp/app/partials/cubeDesigner/refresh_settings.html
+++ b/webapp/app/partials/cubeDesigner/refresh_settings.html
@@ -131,7 +131,7 @@
                       Please input start date when partition date column is defined in model.
                     </small>
                     <!--vier model will convert use filter-->
-                    <span ng-if="state.mode=='view'&&metaModel.model.partition_desc.partition_date_column">{{(cubeMetaFrame.partition_date_start)|reverseToGMT0 }}</span>
+                    <span ng-if="state.mode=='view' && metaModel.model.partition_desc.partition_date_column!=null && metaModel.model.partition_desc.partition_date_column">{{(cubeMetaFrame.partition_date_start)|reverseToGMT0 }}</span>
                   </div>
                 </div>
               </div>


[48/50] [abbrv] kylin git commit: KYLIN-1464 prevent NullPointerException when CubeDesc init failed due to IllegalStateException

Posted by li...@apache.org.
KYLIN-1464 prevent NullPointerException when CubeDesc init failed due to IllegalStateException

Signed-off-by: Hongbin Ma <ma...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/ed57c3b3
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/ed57c3b3
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/ed57c3b3

Branch: refs/heads/master
Commit: ed57c3b30c8680c2bcb73b6d8b889d05b952d596
Parents: 26233f7
Author: John Zhao <yu...@ebay.com>
Authored: Wed Mar 2 14:51:58 2016 -0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Thu Mar 10 09:27:10 2016 +0800

----------------------------------------------------------------------
 .../src/main/java/org/apache/kylin/cube/CubeInstance.java     | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/ed57c3b3/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
index 2862d4f..d89e736 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
@@ -125,7 +125,12 @@ public class CubeInstance extends RootPersistentEntity implements IRealization,
 
     @Override
     public DataModelDesc getDataModelDesc() {
-        return this.getDescriptor().getModel();
+        CubeDesc cubeDesc = this.getDescriptor();
+        if (cubeDesc != null) {
+            return cubeDesc.getModel();
+        } else {
+            return null;
+        }
     }
 
     public boolean isReady() {


[05/50] [abbrv] kylin git commit: KYLIN-579 Unload Hive table from kylin

Posted by li...@apache.org.
KYLIN-579 Unload Hive table from kylin

Signed-off-by: wangxianbin1987 <wa...@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/2e1d2f6b
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/2e1d2f6b
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/2e1d2f6b

Branch: refs/heads/master
Commit: 2e1d2f6b62903b16b17bd2442e3456107dc0aa6a
Parents: 6ee409c
Author: wangxianbin1987 <wa...@gmail.com>
Authored: Thu Feb 25 19:22:34 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Feb 26 16:18:50 2016 +0800

----------------------------------------------------------------------
 .../java/org/apache/kylin/cube/CubeManager.java |  2 +
 .../apache/kylin/metadata/MetadataManager.java  | 30 +++++++++
 .../kylin/metadata/project/ProjectManager.java  | 12 ++++
 .../kylin/rest/controller/TableController.java  | 59 ++++++++++++++++-
 .../apache/kylin/rest/service/CubeService.java  | 14 +++++
 .../apache/kylin/rest/service/ModelService.java | 15 +++++
 .../kylin/rest/service/ProjectService.java      | 20 ++++++
 .../source/hive/HiveSourceTableLoader.java      |  6 ++
 webapp/app/js/controllers/sourceMeta.js         | 66 ++++++++++++++++++++
 webapp/app/js/services/tables.js                |  1 +
 .../app/partials/tables/source_table_tree.html  |  6 +-
 webapp/app/partials/tables/table_unload.html    | 33 ++++++++++
 12 files changed, 259 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/2e1d2f6b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
index 84dd30a..4951ce6 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
@@ -46,7 +46,9 @@ import org.apache.kylin.metadata.MetadataManager;
 import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.model.TableDesc;
 import org.apache.kylin.metadata.model.TblColRef;
+import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.metadata.project.ProjectManager;
+import org.apache.kylin.metadata.project.RealizationEntry;
 import org.apache.kylin.metadata.realization.IRealization;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
 import org.apache.kylin.metadata.realization.IRealizationProvider;

http://git-wip-us.apache.org/repos/asf/kylin/blob/2e1d2f6b/core-metadata/src/main/java/org/apache/kylin/metadata/MetadataManager.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/MetadataManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/MetadataManager.java
index 80ee8b3..9f2a934 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/MetadataManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/MetadataManager.java
@@ -197,6 +197,12 @@ public class MetadataManager {
         srcTableMap.put(srcTable.getIdentity(), srcTable);
     }
 
+    public void removeSourceTable(String tableIdentity) throws IOException {
+        String path = TableDesc.concatResourcePath(tableIdentity);
+        getStore().deleteResource(path);
+        srcTableMap.remove(tableIdentity);
+    }
+
     private void init(KylinConfig config) throws IOException {
         this.config = config;
         this.srcTableMap = new CaseInsensitiveStringCache<TableDesc>(config, Broadcaster.TYPE.TABLE);
@@ -336,6 +342,24 @@ public class MetadataManager {
         return new ArrayList<>(ret);
     }
 
+    public boolean isTableInModel(String tableName, String projectName) throws IOException {
+        for(DataModelDesc modelDesc : getModels(projectName)) {
+            if(modelDesc.getAllTables().contains(tableName.toUpperCase())) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    public boolean isTableInAnyModel(String tableName) {
+        for(DataModelDesc modelDesc : getModels()) {
+            if(modelDesc.getAllTables().contains(tableName.toUpperCase())){
+                return true;
+            }
+        }
+        return false;
+    }
+
     private void reloadAllDataModel() throws IOException {
         ResourceStore store = getStore();
         logger.debug("Reloading DataModel from folder " + store.getReadableResourcePath(ResourceStore.DATA_MODEL_DESC_RESOURCE_ROOT));
@@ -441,6 +465,12 @@ public class MetadataManager {
         srcTableExdMap.put(tableId, tableExdProperties);
     }
 
+    public void removeTableExd(String tableIdentity) throws IOException {
+        String path = TableDesc.concatExdResourcePath(tableIdentity);
+        getStore().deleteResource(path);
+        srcTableExdMap.remove(tableIdentity);
+    }
+
     public String appendDBName(String table) {
 
         if (table.indexOf(".") > 0)

http://git-wip-us.apache.org/repos/asf/kylin/blob/2e1d2f6b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
index 45bbb1b..f73239c 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
@@ -286,6 +286,18 @@ public class ProjectManager {
         return projectInstance;
     }
 
+    public void removeTableDescFromProject(String tableIdentities, String projectName) throws IOException {
+        MetadataManager metaMgr = getMetadataManager();
+        ProjectInstance projectInstance = getProject(projectName);
+        TableDesc table = metaMgr.getTableDesc(tableIdentities);
+        if (table == null) {
+            throw new IllegalStateException("Cannot find table '" + table + "' in metadata manager");
+        }
+
+        projectInstance.removeTable(table.getIdentity());
+        updateProject(projectInstance);
+    }
+
     public List<ProjectInstance> findProjects(RealizationType type, String realizationName) {
         List<ProjectInstance> result = Lists.newArrayList();
         for (ProjectInstance prj : projectMap.values()) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/2e1d2f6b/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/TableController.java b/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
index 39af7db..98e8d58 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
@@ -21,6 +21,7 @@ package org.apache.kylin.rest.controller;
 import java.io.IOException;
 import java.util.*;
 
+import com.google.common.collect.Sets;
 import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.JsonUtil;
@@ -33,6 +34,8 @@ import org.apache.kylin.rest.request.CardinalityRequest;
 import org.apache.kylin.rest.request.StreamingRequest;
 import org.apache.kylin.rest.response.TableDescResponse;
 import org.apache.kylin.rest.service.CubeService;
+import org.apache.kylin.rest.service.ModelService;
+import org.apache.kylin.rest.service.ProjectService;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -55,6 +58,10 @@ public class TableController extends BasicController {
 
     @Autowired
     private CubeService cubeMgmtService;
+    @Autowired
+    private ProjectService projectService;
+    @Autowired
+    private ModelService modelService;
 
     /**
      * Get available table list of the input database
@@ -124,21 +131,67 @@ public class TableController extends BasicController {
         cubeMgmtService.syncTableToProject(loaded, project);
         Map<String, String[]> result = new HashMap<String, String[]>();
         result.put("result.loaded", loaded);
-        result.put("result.unloaded", new String[] {});
+        result.put("result.unloaded", new String[]{});
+        return result;
+    }
+
+    @RequestMapping(value = "/{tables}/{project}", method = { RequestMethod.DELETE })
+    @ResponseBody
+    public Map<String, String[]> unLoadHiveTables(@PathVariable String tables, @PathVariable String project) {
+        Set<String> unLoadSuccess = Sets.newHashSet();
+        Set<String> unLoadFail = Sets.newHashSet();
+        Map<String, String[]> result = new HashMap<String, String[]>();
+        for (String tableName : tables.split(",")) {
+            if (unLoadHiveTable(tableName, project)) {
+                unLoadSuccess.add(tableName);
+            } else {
+                unLoadFail.add(tableName);
+            }
+        }
+        result.put("result.unload.success", (String[]) unLoadSuccess.toArray(new String[unLoadSuccess.size()]));
+        result.put("result.unload.fail", (String[]) unLoadFail.toArray(new String[unLoadFail.size()]));
         return result;
     }
 
+    /**
+     * table may referenced by several projects, and kylin only keep one copy of meta for each table,
+     * that's why we have two if statement here.
+     * @param tableName
+     * @param project
+     * @return
+     */
+    private boolean unLoadHiveTable(String tableName, String project) {
+        boolean rtn= false;
+		try {
+			if (!modelService.isTableInModel(tableName, project)) {
+				cubeMgmtService.removeTableFromProject(tableName, project);
+				rtn = true;
+			}
+		} catch (IOException e) {
+			logger.error(e.getMessage(), e);
+		}
+        if(!projectService.isTableInAnyProject(tableName) && !modelService.isTableInAnyModel(tableName)) {
+            try {
+                cubeMgmtService.unLoadHiveTable(tableName);
+                rtn = true;
+            } catch (IOException e) {
+                logger.error(e.getMessage(), e);
+                rtn = false;
+            }
+        }
+        return rtn;
+    }
 
     @RequestMapping(value = "/addStreamingSrc", method = { RequestMethod.POST })
     @ResponseBody
     public Map<String, String> addStreamingTable(@RequestBody StreamingRequest request) throws IOException {
         Map<String, String> result = new HashMap<String, String>();
         String project = request.getProject();
-        TableDesc desc = JsonUtil.readValue(request.getTableData(),TableDesc.class);
+        TableDesc desc = JsonUtil.readValue(request.getTableData(), TableDesc.class);
         desc.setUuid(UUID.randomUUID().toString());
         MetadataManager metaMgr = MetadataManager.getInstance(KylinConfig.getInstanceFromEnv());
         metaMgr.saveSourceTable(desc);
-        cubeMgmtService.syncTableToProject(new String[]{desc.getName()},project);
+        cubeMgmtService.syncTableToProject(new String[]{desc.getName()}, project);
         result.put("success","true");
         return result;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/2e1d2f6b/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
index 5d2776f..0c57d00 100644
--- a/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -557,10 +557,24 @@ public class CubeService extends BasicService {
     }
 
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN)
+    public void unLoadHiveTable(String tableName) throws IOException {
+        String[] dbTableName = HadoopUtil.parseHiveTableName(tableName);
+        tableName = dbTableName[0] + "." + dbTableName[1];
+        HiveSourceTableLoader.unLoadHiveTable(tableName.toUpperCase());
+    }
+
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN)
     public void syncTableToProject(String[] tables, String project) throws IOException {
         getProjectManager().addTableDescToProject(tables, project);
     }
 
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN)
+    public void removeTableFromProject(String tableName, String projectName) throws IOException {
+        String[] dbTableName = HadoopUtil.parseHiveTableName(tableName);
+        tableName = dbTableName[0] + "." + dbTableName[1];
+        getProjectManager().removeTableDescFromProject(tableName, projectName);
+    }
+
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_MODELER + " or " + Constant.ACCESS_HAS_ROLE_ADMIN)
     public void calculateCardinalityIfNotPresent(String[] tables, String submitter) throws IOException {
         MetadataManager metaMgr = getMetadataManager();

http://git-wip-us.apache.org/repos/asf/kylin/blob/2e1d2f6b/server/src/main/java/org/apache/kylin/rest/service/ModelService.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/service/ModelService.java b/server/src/main/java/org/apache/kylin/rest/service/ModelService.java
index 9dae312..9d8ccfb 100644
--- a/server/src/main/java/org/apache/kylin/rest/service/ModelService.java
+++ b/server/src/main/java/org/apache/kylin/rest/service/ModelService.java
@@ -23,6 +23,7 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.kylin.cube.model.CubeDesc;
+import org.apache.kylin.engine.mr.HadoopUtil;
 import org.apache.kylin.invertedindex.model.IIDesc;
 import org.apache.kylin.metadata.model.DataModelDesc;
 import org.apache.kylin.metadata.project.ProjectInstance;
@@ -128,4 +129,18 @@ public class ModelService extends BasicService {
 
         accessService.clean(desc, true);
     }
+
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#desc, 'ADMINISTRATION') or hasPermission(#desc, 'MANAGEMENT')")
+    public boolean isTableInAnyModel(String tableName) {
+        String[] dbTableName = HadoopUtil.parseHiveTableName(tableName);
+        tableName = dbTableName[0] + "." + dbTableName[1];
+        return getMetadataManager().isTableInAnyModel(tableName);
+    }
+
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#desc, 'ADMINISTRATION') or hasPermission(#desc, 'MANAGEMENT')")
+    public boolean isTableInModel(String tableName, String projectName) throws IOException {
+        String[] dbTableName = HadoopUtil.parseHiveTableName(tableName);
+        tableName = dbTableName[0] + "." + dbTableName[1];
+        return getMetadataManager().isTableInModel(tableName, projectName);
+    }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/2e1d2f6b/server/src/main/java/org/apache/kylin/rest/service/ProjectService.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/service/ProjectService.java b/server/src/main/java/org/apache/kylin/rest/service/ProjectService.java
index be70534..ad5a982 100644
--- a/server/src/main/java/org/apache/kylin/rest/service/ProjectService.java
+++ b/server/src/main/java/org/apache/kylin/rest/service/ProjectService.java
@@ -23,6 +23,7 @@ import java.util.Collections;
 import java.util.List;
 
 import org.apache.kylin.metadata.project.ProjectInstance;
+import org.apache.kylin.metadata.project.ProjectManager;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.exception.InternalErrorException;
 import org.apache.kylin.rest.request.CreateProjectRequest;
@@ -104,4 +105,23 @@ public class ProjectService extends BasicService {
         accessService.clean(project, true);
     }
 
+    public boolean isTableInAnyProject(String tableName) {
+        for(ProjectInstance projectInstance : ProjectManager.getInstance(getConfig()).listAllProjects()) {
+            if(projectInstance.containsTable(tableName.toUpperCase())) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    public boolean isTableInProject(String tableName, String projectName) {
+        ProjectInstance projectInstance = ProjectManager.getInstance(getConfig()).getProject(projectName);
+        if(projectInstance != null) {
+            if(projectInstance.containsTable(tableName.toUpperCase())) {
+                return true;
+            }
+        }
+        return false;
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/2e1d2f6b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
index bc722b3..f2f2d2a 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
@@ -78,6 +78,12 @@ public class HiveSourceTableLoader {
         return loadedTables;
     }
 
+    public static void unLoadHiveTable(String hiveTable) throws IOException {
+        MetadataManager metaMgr = MetadataManager.getInstance(KylinConfig.getInstanceFromEnv());
+        metaMgr.removeSourceTable(hiveTable);
+        metaMgr.removeTableExd(hiveTable);
+    }
+
     private static List<String> extractHiveTables(String database, Set<String> tables, KylinConfig config) throws IOException {
 
         List<String> loadedTables = Lists.newArrayList();

http://git-wip-us.apache.org/repos/asf/kylin/blob/2e1d2f6b/webapp/app/js/controllers/sourceMeta.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/controllers/sourceMeta.js b/webapp/app/js/controllers/sourceMeta.js
index abdeeb8..cbd9f52 100755
--- a/webapp/app/js/controllers/sourceMeta.js
+++ b/webapp/app/js/controllers/sourceMeta.js
@@ -100,6 +100,25 @@ KylinApp
       });
     };
 
+    $scope.openUnLoadModal = function () {
+      $modal.open({
+        templateUrl: 'removeHiveTable.html',
+        controller: ModalInstanceCtrl,
+        backdrop : 'static',
+        resolve: {
+          tableNames: function () {
+            return $scope.tableNames;
+          },
+          projectName: function () {
+            return $scope.projectModel.selectedProject;
+          },
+          scope: function () {
+            return $scope;
+          }
+        }
+      });
+    };
+
     var ModalInstanceCtrl = function ($scope, $location, $modalInstance, tableNames, MessageService, projectName, scope) {
       $scope.tableNames = "";
       $scope.projectName = projectName;
@@ -152,6 +171,53 @@ KylinApp
           loadingRequest.hide();
         })
       }
+
+      $scope.remove = function () {
+        if ($scope.tableNames.trim() === "") {
+          SweetAlert.swal('', 'Please input table(s) you want to synchronize.', 'info');
+          return;
+        }
+
+        if (!$scope.projectName) {
+          SweetAlert.swal('', 'Please choose your project first!.', 'info');
+          return;
+        }
+
+        $scope.cancel();
+        loadingRequest.show();
+        TableService.unLoadHiveTable({tableName: $scope.tableNames, action: projectName}, {}, function (result) {
+          var removedTableInfo = "";
+          angular.forEach(result['result.unload.success'], function (table) {
+            removedTableInfo += "\n" + table;
+          })
+          var unRemovedTableInfo = "";
+          angular.forEach(result['result.unload.fail'], function (table) {
+            unRemovedTableInfo += "\n" + table;
+          })
+
+          if (result['result.unload.fail'].length != 0 && result['result.unload.success'].length == 0) {
+            SweetAlert.swal('Failed!', 'Failed to synchronize following table(s): ' + unRemovedTableInfo, 'error');
+          }
+          if (result['result.unload.success'].length != 0 && result['result.unload.fail'].length == 0) {
+            SweetAlert.swal('Success!', 'The following table(s) have been successfully synchronized: ' + removedTableInfo, 'success');
+          }
+          if (result['result.unload.success'].length != 0 && result['result.unload.fail'].length != 0) {
+            SweetAlert.swal('Partial unloaded!', 'The following table(s) have been successfully synchronized: ' + removedTableInfo + "\n\n Failed to synchronize following table(s):" + unRemovedTableInfo, 'warning');
+          }
+          loadingRequest.hide();
+          scope.aceSrcTbLoaded(true);
+
+        }, function (e) {
+          if (e.data && e.data.exception) {
+            var message = e.data.exception;
+            var msg = !!(message) ? message : 'Failed to take action.';
+            SweetAlert.swal('Oops...', msg, 'error');
+          } else {
+            SweetAlert.swal('Oops...', "Failed to take action.", 'error');
+          }
+          loadingRequest.hide();
+        })
+      }
     };
 
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/2e1d2f6b/webapp/app/js/services/tables.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/services/tables.js b/webapp/app/js/services/tables.js
index 3b5e9f4..ca7fc42 100755
--- a/webapp/app/js/services/tables.js
+++ b/webapp/app/js/services/tables.js
@@ -23,6 +23,7 @@ KylinApp.factory('TableService', ['$resource', function ($resource, config) {
     getExd: {method: 'GET', params: {action: 'exd-map'}, isArray: false},
     reload: {method: 'PUT', params: {action: 'reload'}, isArray: false},
     loadHiveTable: {method: 'POST', params: {}, isArray: false},
+    unLoadHiveTable: {method: 'DELETE', params: {}, isArray: false},
     addStreamingSrc: {method: 'POST', params: {action:'addStreamingSrc'}, isArray: false},
     genCardinality: {method: 'PUT', params: {action: 'cardinality'}, isArray: false}
   });

http://git-wip-us.apache.org/repos/asf/kylin/blob/2e1d2f6b/webapp/app/partials/tables/source_table_tree.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/tables/source_table_tree.html b/webapp/app/partials/tables/source_table_tree.html
index 767eb43..4eddc4f 100755
--- a/webapp/app/partials/tables/source_table_tree.html
+++ b/webapp/app/partials/tables/source_table_tree.html
@@ -25,8 +25,9 @@
         <!--button-->
         <div class="col-xs-5" style="padding-left: 0px;margin-top: 20px;">
             <div class="pull-right">
-                <a class="btn btn-xs btn-primary" tooltip="Load Hive Table"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openModal()"><i class="fa fa-download"></i></a>
-                <a class="btn btn-xs btn-primary" tooltip="Add Streaming Table"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openStreamingSourceModal()"><i class="fa fa-area-chart"></i></a>
+              <a class="btn btn-xs btn-primary" tooltip="Load Hive Table"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openModal()"><i class="fa fa-download"></i></a>
+              <a class="btn btn-xs btn-info" tooltip="UnLoad Hive Table"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openUnLoadModal()"><i class="fa fa-remove"></i></a>
+              <a class="btn btn-xs btn-primary" tooltip="Add Streaming Table"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openStreamingSourceModal()"><i class="fa fa-area-chart"></i></a>
             </div>
         </div>
 
@@ -47,3 +48,4 @@
 </div>
 
 <div ng-include="'partials/tables/table_load.html'"></div>
+<div ng-include="'partials/tables/table_unload.html'"></div>

http://git-wip-us.apache.org/repos/asf/kylin/blob/2e1d2f6b/webapp/app/partials/tables/table_unload.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/tables/table_unload.html b/webapp/app/partials/tables/table_unload.html
new file mode 100644
index 0000000..a1fcf6f
--- /dev/null
+++ b/webapp/app/partials/tables/table_unload.html
@@ -0,0 +1,33 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+  <script type="text/ng-template" id="removeHiveTable.html">
+    <div class="modal-header">
+      <h4>UnLoad Hive Table Metadata</h4>
+    </div>
+    <div class="modal-body">
+      <span><strong>Project: </strong>{{ $parent.projectName!=null?$parent.projectName:'NULL'}}</span>
+      <label for="tables"> Table Names:(Seperate with comma)</label>
+            <textarea ng-model="$parent.tableNames" class="form-control" id="tables"
+                      placeholder="table1,table2  By default,system will choose 'Default' as database,you can specify database like this 'database.table'"></textarea>
+    </div>
+    <div class="modal-footer">
+      <button class="btn btn-primary" ng-click="remove()">Sync</button>
+      <button class="btn btn-primary" ng-click="cancel()">Cancel</button>
+    </div>
+  </script>


[33/50] [abbrv] kylin git commit: very minor, eclipse settings update

Posted by li...@apache.org.
very minor, eclipse settings update


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/11dc04b6
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/11dc04b6
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/11dc04b6

Branch: refs/heads/master
Commit: 11dc04b6117c9a146dc54f919cee0354ceac1e24
Parents: ecd1266
Author: Yang Li <li...@apache.org>
Authored: Sat Mar 5 08:05:37 2016 +0800
Committer: Yang Li <li...@apache.org>
Committed: Sat Mar 5 08:05:37 2016 +0800

----------------------------------------------------------------------
 core-cube/.settings/org.eclipse.core.resources.prefs | 1 -
 1 file changed, 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/11dc04b6/core-cube/.settings/org.eclipse.core.resources.prefs
----------------------------------------------------------------------
diff --git a/core-cube/.settings/org.eclipse.core.resources.prefs b/core-cube/.settings/org.eclipse.core.resources.prefs
index 04cfa2c..365bbd6 100644
--- a/core-cube/.settings/org.eclipse.core.resources.prefs
+++ b/core-cube/.settings/org.eclipse.core.resources.prefs
@@ -2,5 +2,4 @@ eclipse.preferences.version=1
 encoding//src/main/java=UTF-8
 encoding//src/main/resources=UTF-8
 encoding//src/test/java=UTF-8
-encoding//src/test/resources=UTF-8
 encoding/<project>=UTF-8


[42/50] [abbrv] kylin git commit: KYLIN-1421 fix the “Last build time” is always empty issue

Posted by li...@apache.org.
KYLIN-1421 fix the “Last build time” is always empty issue


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/0ec3ed0e
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/0ec3ed0e
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/0ec3ed0e

Branch: refs/heads/master
Commit: 0ec3ed0e899eeab1497667ed28f10226aec520e7
Parents: d1a574b
Author: shaofengshi <sh...@apache.org>
Authored: Tue Feb 16 14:07:00 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Tue Mar 8 12:21:56 2016 +0800

----------------------------------------------------------------------
 .../apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java    | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/0ec3ed0e/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
index b4182fe..f5cb66e 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
@@ -118,6 +118,7 @@ public class StreamingCubeBuilder implements StreamingBatchBuilder {
             CubeSegment segment = cubeManager.appendSegments(cubeInstance, streamingBatch.getTimeRange().getFirst(), streamingBatch.getTimeRange().getSecond(), false, false);
             segment.setLastBuildJobID(segment.getUuid()); // give a fake job id
             segment.setInputRecords(streamingBatch.getMessages().size());
+            segment.setLastBuildTime(System.currentTimeMillis());
             return segment;
         } catch (IOException e) {
             throw new RuntimeException("failed to create IBuildable", e);


[19/50] [abbrv] kylin git commit: KYLIN-1054 Update beeline params in testcases

Posted by li...@apache.org.
KYLIN-1054 Update beeline params in testcases


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/cf05409c
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/cf05409c
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/cf05409c

Branch: refs/heads/master
Commit: cf05409c75339356fe8af1661a6b6c6790a7192c
Parents: 098a853
Author: lidongsjtu <li...@apache.org>
Authored: Tue Mar 1 19:13:24 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Tue Mar 1 19:13:24 2016 +0800

----------------------------------------------------------------------
 examples/test_case_data/sandbox/hive-site.xml    | 2 +-
 examples/test_case_data/sandbox/kylin.properties | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/cf05409c/examples/test_case_data/sandbox/hive-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/hive-site.xml b/examples/test_case_data/sandbox/hive-site.xml
index f4c7738..1e78107 100644
--- a/examples/test_case_data/sandbox/hive-site.xml
+++ b/examples/test_case_data/sandbox/hive-site.xml
@@ -533,7 +533,7 @@
 
     <property>
         <name>hive.server2.enable.doAs</name>
-        <value>false</value>
+        <value>true</value>
     </property>
 
     <property>

http://git-wip-us.apache.org/repos/asf/kylin/blob/cf05409c/examples/test_case_data/sandbox/kylin.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
index a304cab..0c68a7e 100644
--- a/examples/test_case_data/sandbox/kylin.properties
+++ b/examples/test_case_data/sandbox/kylin.properties
@@ -21,7 +21,7 @@ kylin.storage.url=hbase
 kylin.hdfs.working.dir=/kylin
 
 # Parameters for beeline client
-kylin.hive.beeline.params=--hiveconf hive.security.authorization.sqlstd.confwhitelist.append='mapreduce.job.*|dfs.*' -u 'jdbc:hive2://localhost:10000'
+kylin.hive.beeline.params=-n root --hiveconf hive.security.authorization.sqlstd.confwhitelist.append='mapreduce.job.*|dfs.*' -u 'jdbc:hive2://localhost:10000'
 
 kylin.job.mapreduce.default.reduce.input.mb=500
 


[38/50] [abbrv] kylin git commit: KYLIN-1387 Streaming cubing doesn't generate cuboids files on HDFS, cause cube merge failure

Posted by li...@apache.org.
KYLIN-1387 Streaming cubing doesn't generate cuboids files on HDFS, cause cube merge failure

Conflicts:
	engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MapContextGTRecordWriter.java


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/929c7a49
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/929c7a49
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/929c7a49

Branch: refs/heads/master
Commit: 929c7a4908a3cd655ab31e71eb6453971f3acd36
Parents: 250978d
Author: shaofengshi <sh...@apache.org>
Authored: Tue Feb 2 17:34:46 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Tue Mar 8 12:19:36 2016 +0800

----------------------------------------------------------------------
 .../cube/inmemcubing/CompoundCuboidWriter.java  | 57 ++++++++++++++
 .../kylin/cube/inmemcubing/ICuboidWriter.java   |  4 +-
 .../kylin/job/constant/ExecutableConstants.java |  1 +
 .../kylin/engine/mr/steps/KVGTRecordWriter.java | 81 ++++++++++++++++++++
 .../mr/steps/MapContextGTRecordWriter.java      | 68 ++--------------
 .../streaming/cube/StreamingCubeBuilder.java    | 12 ++-
 .../storage/hbase/steps/HBaseCuboidWriter.java  | 24 +++---
 .../hbase/steps/HBaseMROutput2Transition.java   |  2 +-
 .../kylin/storage/hbase/steps/HBaseMRSteps.java |  2 +-
 .../hbase/steps/HBaseStreamingOutput.java       |  8 +-
 .../hbase/steps/SequenceFileCuboidWriter.java   | 75 ++++++++++++++++++
 11 files changed, 254 insertions(+), 80 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/929c7a49/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/CompoundCuboidWriter.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/CompoundCuboidWriter.java b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/CompoundCuboidWriter.java
new file mode 100644
index 0000000..46eef50
--- /dev/null
+++ b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/CompoundCuboidWriter.java
@@ -0,0 +1,57 @@
+/*
+ *  Licensed to the Apache Software Foundation (ASF) under one or more
+ *  contributor license agreements. See the NOTICE file distributed with
+ *  this work for additional information regarding copyright ownership.
+ *  The ASF licenses this file to You under the Apache License, Version 2.0
+ *  (the "License"); you may not use this file except in compliance with
+ *  the License. You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.kylin.cube.inmemcubing;
+
+import org.apache.kylin.gridtable.GTRecord;
+
+import java.io.IOException;
+
+/**
+ */
+public class CompoundCuboidWriter implements ICuboidWriter {
+
+    private Iterable<ICuboidWriter> cuboidWriters;
+
+    public CompoundCuboidWriter(Iterable<ICuboidWriter> cuboidWriters) {
+        this.cuboidWriters = cuboidWriters;
+
+    }
+
+    @Override
+    public void write(long cuboidId, GTRecord record) throws IOException {
+        for (ICuboidWriter writer : cuboidWriters) {
+            writer.write(cuboidId, record);
+        }
+    }
+
+    @Override
+    public void flush() throws IOException {
+        for (ICuboidWriter writer : cuboidWriters) {
+            writer.flush();
+        }
+
+    }
+
+    @Override
+    public void close() throws IOException {
+        for (ICuboidWriter writer : cuboidWriters) {
+            writer.close();
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/929c7a49/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/ICuboidWriter.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/ICuboidWriter.java b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/ICuboidWriter.java
index 9e26e5e..e6cfa02 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/ICuboidWriter.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/ICuboidWriter.java
@@ -27,7 +27,7 @@ public interface ICuboidWriter {
 
     void write(long cuboidId, GTRecord record) throws IOException;
 
-    void flush();
+    void flush() throws IOException;
     
-    void close();
+    void close() throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/929c7a49/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java b/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java
index ba50880..d370b0d 100644
--- a/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java
+++ b/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java
@@ -56,6 +56,7 @@ public final class ExecutableConstants {
     public static final String STEP_NAME_MERGE_CUBOID = "Merge Cuboid Data";
     public static final String STEP_NAME_UPDATE_CUBE_INFO = "Update Cube Info";
     public static final String STEP_NAME_GARBAGE_COLLECTION = "Garbage Collection";
+    public static final String STEP_NAME_GARBAGE_COLLECTION_HDFS = "Garbage Collection on HDFS";
 
     public static final String STEP_NAME_BUILD_II = "Build Inverted Index";
     public static final String STEP_NAME_CONVERT_II_TO_HFILE = "Convert Inverted Index Data to HFile";

http://git-wip-us.apache.org/repos/asf/kylin/blob/929c7a49/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/KVGTRecordWriter.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/KVGTRecordWriter.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/KVGTRecordWriter.java
new file mode 100644
index 0000000..e201705
--- /dev/null
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/KVGTRecordWriter.java
@@ -0,0 +1,81 @@
+package org.apache.kylin.engine.mr.steps;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.cube.cuboid.Cuboid;
+import org.apache.kylin.cube.inmemcubing.ICuboidWriter;
+import org.apache.kylin.cube.kv.AbstractRowKeyEncoder;
+import org.apache.kylin.cube.kv.RowConstants;
+import org.apache.kylin.cube.model.CubeDesc;
+import org.apache.kylin.engine.mr.ByteArrayWritable;
+import org.apache.kylin.gridtable.GTRecord;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+/**
+ */
+public abstract class KVGTRecordWriter implements ICuboidWriter {
+
+    private static final Log logger = LogFactory.getLog(KVGTRecordWriter.class);
+    private Long lastCuboidId;
+    protected CubeSegment cubeSegment;
+    protected CubeDesc cubeDesc;
+
+    private AbstractRowKeyEncoder rowKeyEncoder;
+    private int dimensions;
+    private int measureCount;
+    private byte[] keyBuf;
+    private int[] measureColumnsIndex;
+    private ByteBuffer valueBuf = ByteBuffer.allocate(RowConstants.ROWVALUE_BUFFER_SIZE);
+    private ByteArrayWritable outputKey = new ByteArrayWritable();
+    private ByteArrayWritable outputValue = new ByteArrayWritable();
+    private long cuboidRowCount = 0;
+
+    //for shard
+
+    public KVGTRecordWriter(CubeDesc cubeDesc, CubeSegment cubeSegment) {
+        this.cubeDesc = cubeDesc;
+        this.cubeSegment = cubeSegment;
+        this.measureCount = cubeDesc.getMeasures().size();
+    }
+
+    @Override
+    public void write(long cuboidId, GTRecord record) throws IOException {
+
+        if (lastCuboidId == null || !lastCuboidId.equals(cuboidId)) {
+            if (lastCuboidId != null) {
+                logger.info("Cuboid " + lastCuboidId + " has " + cuboidRowCount + " rows");
+                cuboidRowCount = 0;
+            }
+            // output another cuboid
+            initVariables(cuboidId);
+            lastCuboidId = cuboidId;
+        }
+
+        cuboidRowCount++;
+        rowKeyEncoder.encode(record, record.getInfo().getPrimaryKey(), keyBuf);
+
+        //output measures
+        valueBuf.clear();
+        record.exportColumns(measureColumnsIndex, valueBuf);
+
+        outputKey.set(keyBuf, 0, keyBuf.length);
+        outputValue.set(valueBuf.array(), 0, valueBuf.position());
+        writeAsKeyValue(outputKey, outputValue);
+    }
+
+    protected abstract void writeAsKeyValue(ByteArrayWritable key, ByteArrayWritable value) throws IOException;
+
+    private void initVariables(Long cuboidId) {
+        rowKeyEncoder = AbstractRowKeyEncoder.createInstance(cubeSegment, Cuboid.findById(cubeDesc, cuboidId));
+        keyBuf = rowKeyEncoder.createBuf();
+
+        dimensions = Long.bitCount(cuboidId);
+        measureColumnsIndex = new int[measureCount];
+        for (int i = 0; i < measureCount; i++) {
+            measureColumnsIndex[i] = dimensions + i;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/929c7a49/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MapContextGTRecordWriter.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MapContextGTRecordWriter.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MapContextGTRecordWriter.java
index 8416d95..6b4d07d 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MapContextGTRecordWriter.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MapContextGTRecordWriter.java
@@ -1,76 +1,32 @@
 package org.apache.kylin.engine.mr.steps;
 
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.BitSet;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapreduce.MapContext;
 import org.apache.kylin.cube.CubeSegment;
-import org.apache.kylin.cube.cuboid.Cuboid;
-import org.apache.kylin.cube.inmemcubing.ICuboidWriter;
-import org.apache.kylin.cube.kv.AbstractRowKeyEncoder;
-import org.apache.kylin.cube.kv.RowConstants;
 import org.apache.kylin.cube.model.CubeDesc;
 import org.apache.kylin.engine.mr.ByteArrayWritable;
-import org.apache.kylin.gridtable.GTRecord;
+
+import java.io.IOException;
 
 /**
  */
-public class MapContextGTRecordWriter implements ICuboidWriter {
+public class MapContextGTRecordWriter extends KVGTRecordWriter {
 
     private static final Log logger = LogFactory.getLog(MapContextGTRecordWriter.class);
     protected MapContext<?, ?, ByteArrayWritable, ByteArrayWritable> mapContext;
-    private Long lastCuboidId;
-    protected CubeSegment cubeSegment;
-    protected CubeDesc cubeDesc;
-
-    private AbstractRowKeyEncoder rowKeyEncoder;
-    private int dimensions;
-    private int measureCount;
-    private byte[] keyBuf;
-    private int[] measureColumnsIndex;
-    private ByteBuffer valueBuf = ByteBuffer.allocate(RowConstants.ROWVALUE_BUFFER_SIZE);
-    private ByteArrayWritable outputKey = new ByteArrayWritable();
-    private ByteArrayWritable outputValue = new ByteArrayWritable();
-    private long cuboidRowCount = 0;
-
-    //for shard
 
     public MapContextGTRecordWriter(MapContext<?, ?, ByteArrayWritable, ByteArrayWritable> mapContext, CubeDesc cubeDesc, CubeSegment cubeSegment) {
+        super(cubeDesc, cubeSegment);
         this.mapContext = mapContext;
-        this.cubeDesc = cubeDesc;
-        this.cubeSegment = cubeSegment;
-        this.measureCount = cubeDesc.getMeasures().size();
     }
 
     @Override
-    public void write(long cuboidId, GTRecord record) throws IOException {
-
-        if (lastCuboidId == null || !lastCuboidId.equals(cuboidId)) {
-            if (lastCuboidId != null) {
-                logger.info("Cuboid " + lastCuboidId + " has " + cuboidRowCount + " rows");
-                cuboidRowCount = 0;
-            }
-            // output another cuboid
-            initVariables(cuboidId);
-            lastCuboidId = cuboidId;
-        }
-
-        cuboidRowCount++;
-        rowKeyEncoder.encode(record, record.getInfo().getPrimaryKey(), keyBuf);
-
-        //output measures
-        valueBuf.clear();
-        record.exportColumns(measureColumnsIndex, valueBuf);
-
-        outputKey.set(keyBuf, 0, keyBuf.length);
-        outputValue.set(valueBuf.array(), 0, valueBuf.position());
+    protected void writeAsKeyValue(ByteArrayWritable key, ByteArrayWritable value) throws IOException {
         try {
-            mapContext.write(outputKey, outputValue);
+            mapContext.write(key, value);
         } catch (InterruptedException e) {
-            throw new RuntimeException(e);
+            throw new IOException(e);
         }
     }
 
@@ -84,14 +40,4 @@ public class MapContextGTRecordWriter implements ICuboidWriter {
 
     }
 
-    private void initVariables(Long cuboidId) {
-        rowKeyEncoder = AbstractRowKeyEncoder.createInstance(cubeSegment, Cuboid.findById(cubeDesc, cuboidId));
-        keyBuf = rowKeyEncoder.createBuf();
-
-        dimensions = Long.bitCount(cuboidId);
-        measureColumnsIndex = new int[measureCount];
-        for (int i = 0; i < measureCount; i++) {
-            measureColumnsIndex[i] = dimensions + i;
-        }
-    }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/929c7a49/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
index c4f2b7e..ec2ad91 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
@@ -99,6 +99,14 @@ public class StreamingCubeBuilder implements StreamingBatchBuilder {
             throw new RuntimeException(e);
         } catch (ExecutionException e) {
             throw new RuntimeException("error build cube from StreamingBatch", e.getCause());
+        } catch (IOException e) {
+            throw new RuntimeException("error build cube from StreamingBatch", e.getCause());
+        } finally {
+            try {
+                cuboidWriter.close();
+            } catch (IOException e) {
+                throw new RuntimeException("error build cube from StreamingBatch", e.getCause());
+            }
         }
     }
 
@@ -107,7 +115,9 @@ public class StreamingCubeBuilder implements StreamingBatchBuilder {
         CubeManager cubeManager = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
         final CubeInstance cubeInstance = cubeManager.reloadCubeLocal(cubeName);
         try {
-            return cubeManager.appendSegments(cubeInstance, streamingBatch.getTimeRange().getFirst(), streamingBatch.getTimeRange().getSecond(), false, false);
+            CubeSegment segment = cubeManager.appendSegments(cubeInstance, streamingBatch.getTimeRange().getFirst(), streamingBatch.getTimeRange().getSecond(), false, false);
+            segment.setLastBuildJobID(segment.getUuid()); // give a fake job id
+            return segment;
         } catch (IOException e) {
             throw new RuntimeException("failed to create IBuildable", e);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/929c7a49/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
index c4dc0b5..ddc868d 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
@@ -33,9 +33,8 @@
  */
 package org.apache.kylin.storage.hbase.steps;
 
-import java.io.IOException;
-import java.util.List;
-
+import com.google.common.collect.Lists;
+import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
@@ -51,13 +50,14 @@ import org.apache.kylin.gridtable.GTRecord;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Lists;
+import java.io.IOException;
+import java.util.List;
 
 /**
  */
-public final class HBaseCuboidWriter implements ICuboidWriter {
+public class HBaseCuboidWriter implements ICuboidWriter {
 
-    private static final Logger logger = LoggerFactory.getLogger(HBaseStreamingOutput.class);
+    private static final Logger logger = LoggerFactory.getLogger(HBaseCuboidWriter.class);
 
     private static final int BATCH_PUT_THRESHOLD = 10000;
 
@@ -125,8 +125,8 @@ public final class HBaseCuboidWriter implements ICuboidWriter {
         }
     }
 
-    public final void flush() {
-        try {
+    @Override
+    public final void flush() throws IOException {
             if (!puts.isEmpty()) {
                 long t = System.currentTimeMillis();
                 if (hTable != null) {
@@ -136,14 +136,12 @@ public final class HBaseCuboidWriter implements ICuboidWriter {
                 logger.info("commit total " + puts.size() + " puts, totally cost:" + (System.currentTimeMillis() - t) + "ms");
                 puts.clear();
             }
-        } catch (IOException e) {
-            throw new RuntimeException(e);
-        }
     }
 
     @Override
-    public void close() {
-
+    public void close() throws IOException {
+        flush();
+        IOUtils.closeQuietly(hTable);
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/929c7a49/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java
index 4c2737d..7bb3647 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java
@@ -80,7 +80,7 @@ public class HBaseMROutput2Transition implements IMROutput2 {
 
             @Override
             public void addStepPhase3_Cleanup(DefaultChainedExecutable jobFlow) {
-                jobFlow.addTask(steps.createMergeGCStep());
+                steps.addMergingGarbageCollectionSteps(jobFlow);
             }
         };
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/929c7a49/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
index 2a21640..a828728 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
@@ -161,7 +161,7 @@ public class HBaseMRSteps extends JobBuilderSupport {
         toDeletePaths.addAll(getMergingHDFSPaths());
 
         HDFSPathGarbageCollectionStep step = new HDFSPathGarbageCollectionStep();
-        step.setName(ExecutableConstants.STEP_NAME_GARBAGE_COLLECTION);
+        step.setName(ExecutableConstants.STEP_NAME_GARBAGE_COLLECTION_HDFS);
         step.setDeletePaths(toDeletePaths);
         step.setJobId(jobId);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/929c7a49/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
index 770be3c..4cc4794 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
@@ -18,9 +18,11 @@
 package org.apache.kylin.storage.hbase.steps;
 
 import java.io.IOException;
+import java.util.List;
 import java.util.Map;
 import java.util.UUID;
 
+import com.google.common.collect.Lists;
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -31,6 +33,7 @@ import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.measure.hllc.HyperLogLogPlusCounter;
 import org.apache.kylin.common.persistence.ResourceStore;
 import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.cube.inmemcubing.CompoundCuboidWriter;
 import org.apache.kylin.cube.inmemcubing.ICuboidWriter;
 import org.apache.kylin.engine.mr.HadoopUtil;
 import org.apache.kylin.engine.mr.common.BatchConstants;
@@ -54,7 +57,10 @@ public class HBaseStreamingOutput implements IStreamingOutput {
 
             final HTableInterface hTable;
             hTable = createHTable(cubeSegment);
-            return new HBaseCuboidWriter(cubeSegment, hTable);
+            List<ICuboidWriter> cuboidWriters = Lists.newArrayList();
+            cuboidWriters.add(new HBaseCuboidWriter(cubeSegment, hTable));
+            cuboidWriters.add(new SequenceFileCuboidWriter(cubeSegment.getCubeDesc(), cubeSegment));
+            return new CompoundCuboidWriter(cuboidWriters);
         } catch (IOException e) {
             throw new RuntimeException("failed to get ICuboidWriter", e);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/929c7a49/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
new file mode 100644
index 0000000..4d76522
--- /dev/null
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
@@ -0,0 +1,75 @@
+package org.apache.kylin.storage.hbase.steps;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.Text;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.cube.model.CubeDesc;
+import org.apache.kylin.engine.mr.ByteArrayWritable;
+import org.apache.kylin.engine.mr.HadoopUtil;
+import org.apache.kylin.engine.mr.JobBuilderSupport;
+import org.apache.kylin.engine.mr.steps.KVGTRecordWriter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+
+/**
+ */
+public class SequenceFileCuboidWriter extends KVGTRecordWriter {
+
+    private static final Logger logger = LoggerFactory.getLogger(SequenceFileCuboidWriter.class);
+    private SequenceFile.Writer writer = null;
+
+    public SequenceFileCuboidWriter(CubeDesc cubeDesc, CubeSegment segment) {
+        super(cubeDesc, segment);
+    }
+
+
+    @Override
+    protected void writeAsKeyValue(ByteArrayWritable key, ByteArrayWritable value) throws IOException {
+        if (writer == null) {
+            synchronized (SequenceFileCuboidWriter.class) {
+                if (writer == null) {
+                    JobBuilderSupport jobBuilderSupport = new JobBuilderSupport(cubeSegment, "SYSTEM");
+                    String cuboidRoot = jobBuilderSupport.getCuboidRootPath(cubeSegment);
+                    Path cuboidPath = new Path(cuboidRoot);
+                    FileSystem fs = HadoopUtil.getFileSystem(cuboidRoot);
+                    try {
+                        if (fs.exists(cuboidPath)) {
+                            fs.delete(cuboidPath, true);
+                        }
+
+                        fs.mkdirs(cuboidPath);
+                    } finally {
+                        IOUtils.closeQuietly(fs);
+                    }
+
+                    Path cuboidFile = new Path(cuboidPath, "data.seq");
+                    logger.debug("Cuboid is written to " + cuboidFile);
+                    writer = SequenceFile.createWriter(HadoopUtil.getCurrentConfiguration(), SequenceFile.Writer.file(cuboidFile), SequenceFile.Writer.keyClass(Text.class), SequenceFile.Writer.valueClass(Text.class));
+                }
+            }
+        }
+
+        Text outputValue = new Text();
+        Text outputKey = new Text();
+        outputKey.set(key.array(), key.offset(), key.length());
+        outputValue.set(value.array(), value.offset(), value.length());
+        writer.append(outputKey, outputValue);
+    }
+
+    @Override
+    public void flush() throws IOException {
+        if (writer != null) {
+            writer.hflush();
+        }
+    }
+
+    @Override
+    public void close() throws IOException {
+        IOUtils.closeQuietly(writer);
+    }
+}


[49/50] [abbrv] kylin git commit: KYLIN-1483 Command tool to visualize all cuboids in a cube/segment

Posted by li...@apache.org.
KYLIN-1483 Command tool to visualize all cuboids in a cube/segment


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/c4be5461
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/c4be5461
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/c4be5461

Branch: refs/heads/master
Commit: c4be5461f070b073c12388d0ad9d8d9d3b7519f0
Parents: ed57c3b
Author: Hongbin Ma <ma...@apache.org>
Authored: Thu Mar 10 14:21:57 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Thu Mar 10 14:21:57 2016 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/cube/cuboid/Cuboid.java    | 39 +++++++++++---------
 .../kylin/engine/mr/common/CubeStatsReader.java |  7 ++--
 2 files changed, 25 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/c4be5461/core-cube/src/main/java/org/apache/kylin/cube/cuboid/Cuboid.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/Cuboid.java b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/Cuboid.java
index 513513c..2cd96a6 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/Cuboid.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/Cuboid.java
@@ -18,14 +18,11 @@
 
 package org.apache.kylin.cube.cuboid;
 
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-
+import com.google.common.base.Function;
+import com.google.common.collect.Collections2;
+import com.google.common.collect.ComparisonChain;
+import com.google.common.collect.Lists;
+import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.cube.gridtable.CuboidToGridTableMapping;
 import org.apache.kylin.cube.model.AggregationGroup;
@@ -34,10 +31,13 @@ import org.apache.kylin.cube.model.CubeDesc;
 import org.apache.kylin.cube.model.RowKeyColDesc;
 import org.apache.kylin.metadata.model.TblColRef;
 
-import com.google.common.base.Function;
-import com.google.common.collect.Collections2;
-import com.google.common.collect.ComparisonChain;
-import com.google.common.collect.Lists;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 
 public class Cuboid implements Comparable<Cuboid> {
 
@@ -222,7 +222,8 @@ public class Cuboid implements Comparable<Cuboid> {
             return true;
         }
 
-        hier: for (HierarchyMask hierarchyMasks : hierarchyMaskList) {
+        hier:
+        for (HierarchyMask hierarchyMasks : hierarchyMaskList) {
             long result = cuboidID & hierarchyMasks.fullMask;
             if (result > 0) {
                 // if match one of the hierarchy constrains, return true;
@@ -385,11 +386,13 @@ public class Cuboid implements Comparable<Cuboid> {
 
     public static String getDisplayName(long cuboidID, int dimensionCount) {
         StringBuilder sb = new StringBuilder();
-        String temp = Long.toString(cuboidID);
-        for (int i = 0; i < dimensionCount - temp.length(); i++) {
-            sb.append("0");
+        for (int i = 0; i < dimensionCount; ++i) {
+            if ((cuboidID & (1L << i)) == 0) {
+                sb.append('0');
+            } else {
+                sb.append('1');
+            }
         }
-        sb.append(temp);
-        return sb.toString();
+        return StringUtils.reverse(sb.toString());
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/c4be5461/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsReader.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsReader.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsReader.java
index e842c01..57e93c3 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsReader.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsReader.java
@@ -241,7 +241,7 @@ public class CubeStatsReader {
         CuboidScheduler scheduler = new CuboidScheduler(cubeDesc);
         long baseCuboid = Cuboid.getBaseCuboidId(cubeDesc);
         int dimensionCount = Long.bitCount(baseCuboid);
-        printCuboidInfoTree(0L, baseCuboid, scheduler, cuboidRows, cuboidSizes, dimensionCount, 0, out);
+        printCuboidInfoTree(-1L, baseCuboid, scheduler, cuboidRows, cuboidSizes, dimensionCount, 0, out);
     }
 
     private static void printCuboidInfoTree(long parent, long cuboidID, final CuboidScheduler scheduler, Map<Long, Long> cuboidRows, Map<Long, Double> cuboidSizes, int dimensionCount, int depth, PrintWriter out) {
@@ -251,7 +251,7 @@ public class CubeStatsReader {
         Collections.sort(children);
 
         for (Long child : children) {
-            printCuboidInfoTree(parent, child, scheduler, cuboidRows, cuboidSizes, dimensionCount, depth + 1, out);
+            printCuboidInfoTree(cuboidID, child, scheduler, cuboidRows, cuboidSizes, dimensionCount, depth + 1, out);
         }
     }
 
@@ -268,7 +268,7 @@ public class CubeStatsReader {
         sb.append(", est row: ").append(rowCount).append(", est MB: ").append(formatDouble(size));
 
         if (parent != -1) {
-            sb.append(", shrink: ").append(formatDouble(1.0 * cuboidRows.get(cuboidID) / cuboidRows.get(parent))).append("%");
+            sb.append(", shrink: ").append(formatDouble(100.0 * cuboidRows.get(cuboidID) / cuboidRows.get(parent))).append("%");
         }
 
         out.println(sb.toString());
@@ -279,6 +279,7 @@ public class CubeStatsReader {
     }
 
     public static void main(String[] args) throws IOException {
+        System.out.println("CubeStatsReader is used to read cube statistic saved in metadata store");
         KylinConfig config = KylinConfig.getInstanceFromEnv();
         CubeInstance cube = CubeManager.getInstance(config).getCube(args[0]);
         List<CubeSegment> segments = cube.getSegments(SegmentStatusEnum.READY);


[09/50] [abbrv] kylin git commit: KYLIN-1340 CubeMetaExtractor support streaming case and skip segments

Posted by li...@apache.org.
KYLIN-1340 CubeMetaExtractor support streaming case and skip segments


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/4c08ded6
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/4c08ded6
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/4c08ded6

Branch: refs/heads/master
Commit: 4c08ded63f78aad93eefa9814d48af2486725967
Parents: 2e1d2f6
Author: honma <ho...@ebay.com>
Authored: Wed Feb 24 15:45:38 2016 +0800
Committer: honma <ho...@ebay.com>
Committed: Fri Feb 26 17:54:37 2016 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/job/CubeMetaExtractor.java | 327 +++++++++++++++++++
 .../kylin/common/persistence/ResourceTool.java  |   2 +-
 .../engine/streaming/StreamingManager.java      | 100 +-----
 .../kylin/source/kafka/KafkaConfigManager.java  |  47 +--
 .../kylin/source/kafka/config/KafkaConfig.java  |   4 +-
 .../storage/hbase/util/CubeMetaExtractor.java   | 284 ----------------
 6 files changed, 345 insertions(+), 419 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/4c08ded6/assembly/src/test/java/org/apache/kylin/job/CubeMetaExtractor.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/CubeMetaExtractor.java b/assembly/src/test/java/org/apache/kylin/job/CubeMetaExtractor.java
new file mode 100644
index 0000000..527ef0a
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/job/CubeMetaExtractor.java
@@ -0,0 +1,327 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.job;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.OptionGroup;
+import org.apache.commons.cli.Options;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.persistence.ResourceStore;
+import org.apache.kylin.common.persistence.ResourceTool;
+import org.apache.kylin.common.util.AbstractApplication;
+import org.apache.kylin.common.util.OptionsHelper;
+import org.apache.kylin.cube.CubeDescManager;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.cube.model.CubeDesc;
+import org.apache.kylin.engine.streaming.StreamingConfig;
+import org.apache.kylin.engine.streaming.StreamingManager;
+import org.apache.kylin.invertedindex.IIInstance;
+import org.apache.kylin.job.dao.ExecutableDao;
+import org.apache.kylin.job.dao.ExecutablePO;
+import org.apache.kylin.job.exception.PersistentException;
+import org.apache.kylin.metadata.MetadataManager;
+import org.apache.kylin.metadata.model.DataModelDesc;
+import org.apache.kylin.metadata.model.SegmentStatusEnum;
+import org.apache.kylin.metadata.model.TableDesc;
+import org.apache.kylin.metadata.project.ProjectInstance;
+import org.apache.kylin.metadata.project.ProjectManager;
+import org.apache.kylin.metadata.project.RealizationEntry;
+import org.apache.kylin.metadata.realization.IRealization;
+import org.apache.kylin.metadata.realization.RealizationRegistry;
+import org.apache.kylin.metadata.realization.RealizationType;
+import org.apache.kylin.source.kafka.KafkaConfigManager;
+import org.apache.kylin.source.kafka.config.KafkaConfig;
+import org.apache.kylin.storage.hybrid.HybridInstance;
+import org.apache.kylin.storage.hybrid.HybridManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Lists;
+
+/**
+ * extract cube related info for debugging/distributing purpose
+ * TODO: deal with II case
+ */
+public class CubeMetaExtractor extends AbstractApplication {
+
+    private static final Logger logger = LoggerFactory.getLogger(CubeMetaExtractor.class);
+
+    @SuppressWarnings("static-access")
+    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("Specify which cube to extract").create("cube");
+    @SuppressWarnings("static-access")
+    private static final Option OPTION_HYBRID = OptionBuilder.withArgName("hybrid").hasArg().isRequired(false).withDescription("Specify which hybrid to extract").create("hybrid");
+    @SuppressWarnings("static-access")
+    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false).withDescription("Specify realizations in which project to extract").create("project");
+
+    @SuppressWarnings("static-access")
+    private static final Option OPTION_INCLUDE_SEGMENTS = OptionBuilder.withArgName("includeSegments").hasArg().isRequired(false).withDescription("set this to true if want extract the segments info, related dicts, etc. Default true").create("includeSegments");
+    @SuppressWarnings("static-access")
+    private static final Option OPTION_INCLUDE_JOB = OptionBuilder.withArgName("includeJobs").hasArg().isRequired(false).withDescription("set this to true if want to extract job info/outputs too. Default true").create("includeJobs");
+
+    @SuppressWarnings("static-access")
+    private static final Option OPTION_DEST = OptionBuilder.withArgName("destDir").hasArg().isRequired(false).withDescription("specify the dest dir to save the related metadata").create("destDir");
+
+    private Options options = null;
+    private KylinConfig kylinConfig;
+    private MetadataManager metadataManager;
+    private ProjectManager projectManager;
+    private HybridManager hybridManager;
+    private CubeManager cubeManager;
+    private StreamingManager streamingManager;
+    private KafkaConfigManager kafkaConfigManager;
+    private CubeDescManager cubeDescManager;
+    private ExecutableDao executableDao;
+    private RealizationRegistry realizationRegistry;
+
+    boolean includeSegments;
+    boolean includeJobs;
+
+    List<String> requiredResources = Lists.newArrayList();
+    List<String> optionalResources = Lists.newArrayList();
+    List<CubeInstance> cubesToTrimAndSave = Lists.newArrayList();//these cubes needs to be saved skipping segments
+
+    public CubeMetaExtractor() {
+        options = new Options();
+
+        OptionGroup realizationOrProject = new OptionGroup();
+        realizationOrProject.addOption(OPTION_CUBE);
+        realizationOrProject.addOption(OPTION_PROJECT);
+        realizationOrProject.addOption(OPTION_HYBRID);
+        realizationOrProject.setRequired(true);
+
+        options.addOptionGroup(realizationOrProject);
+        options.addOption(OPTION_INCLUDE_SEGMENTS);
+        options.addOption(OPTION_INCLUDE_JOB);
+        options.addOption(OPTION_DEST);
+
+    }
+
+    @Override
+    protected Options getOptions() {
+        return options;
+    }
+
+    @Override
+    protected void execute(OptionsHelper optionsHelper) throws Exception {
+        includeSegments = optionsHelper.hasOption(OPTION_INCLUDE_SEGMENTS) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_SEGMENTS)) : true;
+        includeJobs = optionsHelper.hasOption(OPTION_INCLUDE_JOB) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_JOB)) : true;
+        String dest = null;
+        if (optionsHelper.hasOption(OPTION_DEST)) {
+            dest = optionsHelper.getOptionValue(OPTION_DEST);
+        }
+
+        kylinConfig = KylinConfig.getInstanceFromEnv();
+        metadataManager = MetadataManager.getInstance(kylinConfig);
+        projectManager = ProjectManager.getInstance(kylinConfig);
+        hybridManager = HybridManager.getInstance(kylinConfig);
+        cubeManager = CubeManager.getInstance(kylinConfig);
+        cubeDescManager = CubeDescManager.getInstance(kylinConfig);
+        streamingManager = StreamingManager.getInstance(kylinConfig);
+        kafkaConfigManager = KafkaConfigManager.getInstance(kylinConfig);
+        executableDao = ExecutableDao.getInstance(kylinConfig);
+        realizationRegistry = RealizationRegistry.getInstance(kylinConfig);
+
+        if (optionsHelper.hasOption(OPTION_PROJECT)) {
+            ProjectInstance projectInstance = projectManager.getProject(optionsHelper.getOptionValue(OPTION_PROJECT));
+            if (projectInstance == null) {
+                throw new IllegalArgumentException("Project " + optionsHelper.getOptionValue(OPTION_PROJECT) + " does not exist");
+            }
+            addRequired(requiredResources, ProjectInstance.concatResourcePath(projectInstance.getName()));
+            List<RealizationEntry> realizationEntries = projectInstance.getRealizationEntries();
+            for (RealizationEntry realizationEntry : realizationEntries) {
+                retrieveResourcePath(getRealization(realizationEntry));
+            }
+        } else if (optionsHelper.hasOption(OPTION_CUBE)) {
+            String cubeName = optionsHelper.getOptionValue(OPTION_CUBE);
+            IRealization realization;
+
+            if ((realization = cubeManager.getRealization(cubeName)) != null) {
+                retrieveResourcePath(realization);
+            } else {
+                throw new IllegalArgumentException("No cube found with name of " + cubeName);
+            }
+        } else if (optionsHelper.hasOption(OPTION_HYBRID)) {
+            String hybridName = optionsHelper.getOptionValue(OPTION_HYBRID);
+            IRealization realization;
+
+            if ((realization = hybridManager.getRealization(hybridName)) != null) {
+                retrieveResourcePath(realization);
+            } else {
+                throw new IllegalArgumentException("No hybrid found with name of" + hybridName);
+            }
+        }
+
+        executeExtraction(dest);
+    }
+
+    private void executeExtraction(String dest) {
+        logger.info("The resource paths going to be extracted:");
+        for (String s : requiredResources) {
+            logger.info(s + "(required)");
+        }
+        for (String s : optionalResources) {
+            logger.info(s + "(optional)");
+        }
+        for (CubeInstance cube : cubesToTrimAndSave) {
+            logger.info("Cube {} will be trimmed and extracted", cube);
+        }
+
+        if (dest == null) {
+            logger.info("Dest is not set, exit directly without extracting");
+        } else {
+            try {
+                ResourceStore src = ResourceStore.getStore(KylinConfig.getInstanceFromEnv());
+                ResourceStore dst = ResourceStore.getStore(KylinConfig.createInstanceFromUri(dest));
+
+                for (String path : requiredResources) {
+                    ResourceTool.copyR(src, dst, path);
+                }
+
+                for (String path : optionalResources) {
+                    try {
+                        ResourceTool.copyR(src, dst, path);
+                    } catch (Exception e) {
+                        logger.warn("Exception when copying optional resource {}. May be caused by resource missing. Ignore it.");
+                    }
+                }
+
+                for (CubeInstance cube : cubesToTrimAndSave) {
+                    CubeInstance trimmedCube = CubeInstance.getCopyOf(cube);
+                    trimmedCube.getSegments().clear();
+                    trimmedCube.setUuid(cube.getUuid());
+                    dst.putResource(trimmedCube.getResourcePath(), trimmedCube, CubeManager.CUBE_SERIALIZER);
+                }
+
+            } catch (IOException e) {
+                throw new RuntimeException("IOException", e);
+            }
+        }
+    }
+
+    private IRealization getRealization(RealizationEntry realizationEntry) {
+        return realizationRegistry.getRealization(realizationEntry.getType(), realizationEntry.getRealization());
+    }
+
+    private void dealWithStreaming(CubeInstance cube) {
+        for (StreamingConfig streamingConfig : streamingManager.listAllStreaming()) {
+            if (streamingConfig.getCubeName() != null && streamingConfig.getCubeName().equalsIgnoreCase(cube.getName())) {
+                requiredResources.add(StreamingConfig.concatResourcePath(streamingConfig.getName()));
+                requiredResources.add(KafkaConfig.concatResourcePath(streamingConfig.getName()));
+            }
+        }
+    }
+
+    private void retrieveResourcePath(IRealization realization) {
+
+        logger.info("Deal with realization {} of type {}", realization.getName(), realization.getType());
+
+        if (realization instanceof CubeInstance) {
+            CubeInstance cube = (CubeInstance) realization;
+            String descName = cube.getDescName();
+            CubeDesc cubeDesc = cubeDescManager.getCubeDesc(descName);
+            String modelName = cubeDesc.getModelName();
+            DataModelDesc modelDesc = metadataManager.getDataModelDesc(modelName);
+
+            dealWithStreaming(cube);
+
+            for (String tableName : modelDesc.getAllTables()) {
+                addRequired(requiredResources, TableDesc.concatResourcePath(tableName));
+                addOptional(optionalResources, TableDesc.concatExdResourcePath(tableName));
+            }
+
+            addRequired(requiredResources, DataModelDesc.concatResourcePath(modelDesc.getName()));
+            addRequired(requiredResources, CubeDesc.concatResourcePath(cubeDesc.getName()));
+
+            if (includeSegments) {
+                addRequired(requiredResources, CubeInstance.concatResourcePath(cube.getName()));
+                for (CubeSegment segment : cube.getSegments(SegmentStatusEnum.READY)) {
+                    for (String dictPat : segment.getDictionaryPaths()) {
+                        addRequired(requiredResources, dictPat);
+                    }
+                    for (String snapshotPath : segment.getSnapshotPaths()) {
+                        addRequired(requiredResources, snapshotPath);
+                    }
+                    addRequired(requiredResources, segment.getStatisticsResourcePath());
+
+                    if (includeJobs) {
+                        String lastJobId = segment.getLastBuildJobID();
+                        if (!StringUtils.isEmpty(lastJobId)) {
+                            throw new RuntimeException("No job exist for segment :" + segment);
+                        } else {
+                            try {
+                                ExecutablePO executablePO = executableDao.getJob(lastJobId);
+                                addRequired(requiredResources, ExecutableDao.pathOfJob(lastJobId));
+                                addRequired(requiredResources, ExecutableDao.pathOfJobOutput(lastJobId));
+                                for (ExecutablePO task : executablePO.getTasks()) {
+                                    addRequired(requiredResources, ExecutableDao.pathOfJob(task.getUuid()));
+                                    addRequired(requiredResources, ExecutableDao.pathOfJobOutput(task.getUuid()));
+                                }
+                            } catch (PersistentException e) {
+                                throw new RuntimeException("PersistentException", e);
+                            }
+                        }
+                    } else {
+                        logger.info("Job info will not be extracted");
+                    }
+                }
+            } else {
+                if (includeJobs) {
+                    logger.warn("It's useless to set includeJobs to true when includeSegments is set to false");
+                }
+
+                cubesToTrimAndSave.add(cube);
+            }
+        } else if (realization instanceof HybridInstance) {
+            HybridInstance hybridInstance = (HybridInstance) realization;
+            addRequired(requiredResources, HybridInstance.concatResourcePath(hybridInstance.getName()));
+            for (IRealization iRealization : hybridInstance.getRealizations()) {
+                if (iRealization.getType() != RealizationType.CUBE) {
+                    throw new RuntimeException("Hybrid " + iRealization.getName() + " contains non cube child " + iRealization.getName() + " with type " + iRealization.getType());
+                }
+                retrieveResourcePath(iRealization);
+            }
+        } else if (realization instanceof IIInstance) {
+            throw new IllegalStateException("Does not support extract II instance or hybrid that contains II");
+        } else {
+            throw new IllegalStateException("Unknown realization type: " + realization.getType());
+        }
+    }
+
+    private void addRequired(List<String> resourcePaths, String record) {
+        logger.info("adding required resource {}", record);
+        resourcePaths.add(record);
+    }
+
+    private void addOptional(List<String> optionalPaths, String record) {
+        logger.info("adding optional resource {}", record);
+        optionalPaths.add(record);
+    }
+
+    public static void main(String[] args) {
+        CubeMetaExtractor extractor = new CubeMetaExtractor();
+        extractor.execute(args);
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/4c08ded6/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
index 3b8e0c1..489e45a 100644
--- a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
+++ b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
@@ -115,7 +115,7 @@ public class ResourceTool {
         copyR(src, dst, "/");
     }
 
-    private static void copyR(ResourceStore src, ResourceStore dst, String path) throws IOException {
+    public static void copyR(ResourceStore src, ResourceStore dst, String path) throws IOException {
         ArrayList<String> children = src.listResources(path);
 
         // case of resource (not a folder)

http://git-wip-us.apache.org/repos/asf/kylin/blob/4c08ded6/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingManager.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingManager.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingManager.java
index af04a11..e0b086d 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingManager.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingManager.java
@@ -34,24 +34,14 @@
 
 package org.apache.kylin.engine.streaming;
 
-import java.io.BufferedReader;
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
 import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
-import org.apache.kylin.common.persistence.RawResource;
 import org.apache.kylin.common.persistence.ResourceStore;
 import org.apache.kylin.common.persistence.Serializer;
 import org.apache.kylin.common.restclient.Broadcaster;
@@ -60,13 +50,6 @@ import org.apache.kylin.metadata.MetadataConstants;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.fasterxml.jackson.databind.JavaType;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.type.MapType;
-import com.fasterxml.jackson.databind.type.SimpleType;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-
 /**
  */
 public class StreamingManager {
@@ -121,18 +104,6 @@ public class StreamingManager {
         }
     }
 
-    private String formatStreamingConfigPath(String name) {
-        return ResourceStore.STREAMING_RESOURCE_ROOT + "/" + name + ".json";
-    }
-
-    private String formatStreamingOutputPath(String streaming, int partition) {
-        return ResourceStore.STREAMING_OUTPUT_RESOURCE_ROOT + "/" + streaming + "_" + partition + ".json";
-    }
-
-    private String formatStreamingOutputPath(String streaming, List<Integer> partitions) {
-        return ResourceStore.STREAMING_OUTPUT_RESOURCE_ROOT + "/" + streaming + "_" + StringUtils.join(partitions, "_") + ".json";
-    }
-
     public StreamingConfig getStreamingConfig(String name) {
         return streamingMap.get(name);
     }
@@ -214,77 +185,12 @@ public class StreamingManager {
         if (streamingMap.containsKey(streamingConfig.getName()))
             throw new IllegalArgumentException("StreamingConfig '" + streamingConfig.getName() + "' already exists");
 
-        String path = formatStreamingConfigPath(streamingConfig.getName());
+        String path  = StreamingConfig.concatResourcePath(streamingConfig.getName());
         getStore().putResource(path, streamingConfig, StreamingConfig.SERIALIZER);
         streamingMap.put(streamingConfig.getName(), streamingConfig);
         return streamingConfig;
     }
 
-    public long getOffset(String streaming, int shard) {
-        final String resPath = formatStreamingOutputPath(streaming, shard);
-        InputStream inputStream = null; 
-        try {
-            final RawResource res = getStore().getResource(resPath);
-            if (res == null) {
-                return 0;
-            } else {
-            	inputStream = res.inputStream;
-                final BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
-                return Long.parseLong(br.readLine());
-            }
-        } catch (Exception e) {
-            logger.error("error get offset, path:" + resPath, e);
-            throw new RuntimeException("error get offset, path:" + resPath, e);
-        } finally {
-        	IOUtils.closeQuietly(inputStream);
-        }
-    }
-
-    public void updateOffset(String streaming, int shard, long offset) {
-        Preconditions.checkArgument(offset >= 0, "offset cannot be smaller than 0");
-        final String resPath = formatStreamingOutputPath(streaming, shard);
-        try {
-            getStore().putResource(resPath, new ByteArrayInputStream(Long.valueOf(offset).toString().getBytes()), getStore().getResourceTimestamp(resPath));
-        } catch (IOException e) {
-            logger.error("error update offset, path:" + resPath, e);
-            throw new RuntimeException("error update offset, path:" + resPath, e);
-        }
-    }
-
-    public Map<Integer, Long> getOffset(String streaming, List<Integer> partitions) {
-        Collections.sort(partitions);
-        final String resPath = formatStreamingOutputPath(streaming, partitions);
-        InputStream inputStream = null;
-        try {
-        	RawResource res = getStore().getResource(resPath);
-        	if (res == null)
-        		return Collections.emptyMap();
-        	
-        	inputStream = res.inputStream;
-            final HashMap<Integer, Long> result = mapper.readValue(inputStream, mapType);
-            return result;
-        } catch (IOException e) {
-            logger.error("error get offset, path:" + resPath, e);
-            throw new RuntimeException("error get offset, path:" + resPath, e);
-        } finally {
-        	IOUtils.closeQuietly(inputStream);
-        }
-    }
-
-    public void updateOffset(String streaming, HashMap<Integer, Long> offset) {
-        List<Integer> partitions = Lists.newLinkedList(offset.keySet());
-        Collections.sort(partitions);
-        final String resPath = formatStreamingOutputPath(streaming, partitions);
-        try {
-            final ByteArrayOutputStream baos = new ByteArrayOutputStream();
-            mapper.writeValue(baos, offset);
-            getStore().putResource(resPath, new ByteArrayInputStream(baos.toByteArray()), getStore().getResourceTimestamp(resPath));
-        } catch (IOException e) {
-            logger.error("error update offset, path:" + resPath, e);
-            throw new RuntimeException("error update offset, path:" + resPath, e);
-        }
-    }
-
     private StreamingConfig loadStreamingConfigAt(String path) throws IOException {
         ResourceStore store = getStore();
         StreamingConfig streamingDesc = store.getResource(path, StreamingConfig.class, STREAMING_SERIALIZER);
@@ -324,8 +230,4 @@ public class StreamingManager {
 
         logger.debug("Loaded " + streamingMap.size() + " StreamingConfig(s)");
     }
-
-    private final ObjectMapper mapper = new ObjectMapper();
-    private final JavaType mapType = MapType.construct(HashMap.class, SimpleType.construct(Integer.class), SimpleType.construct(Long.class));
-
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4c08ded6/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaConfigManager.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaConfigManager.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaConfigManager.java
index ac20fc3..1d07f23 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaConfigManager.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaConfigManager.java
@@ -36,7 +36,6 @@ package org.apache.kylin.source.kafka;
 
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
 import java.util.concurrent.ConcurrentHashMap;
 
@@ -52,11 +51,6 @@ import org.apache.kylin.source.kafka.config.KafkaConfig;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.fasterxml.jackson.databind.JavaType;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.type.MapType;
-import com.fasterxml.jackson.databind.type.SimpleType;
-
 /**
  */
 public class KafkaConfigManager {
@@ -87,7 +81,7 @@ public class KafkaConfigManager {
         return ResourceStore.getStore(this.config);
     }
 
-    public static KafkaConfigManager getInstance(KylinConfig config){
+    public static KafkaConfigManager getInstance(KylinConfig config) {
         KafkaConfigManager r = CACHE.get(config);
         if (r != null) {
             return r;
@@ -98,16 +92,16 @@ public class KafkaConfigManager {
             if (r != null) {
                 return r;
             }
-            try{
-            r = new KafkaConfigManager(config);
-            CACHE.put(config, r);
-            if (CACHE.size() > 1) {
-                logger.warn("More than one KafkaConfigManager singleton exist");
+            try {
+                r = new KafkaConfigManager(config);
+                CACHE.put(config, r);
+                if (CACHE.size() > 1) {
+                    logger.warn("More than one KafkaConfigManager singleton exist");
+                }
+                return r;
+            } catch (IOException e) {
+                throw new IllegalStateException("Failed to init KafkaConfigManager from " + config, e);
             }
-            return r;
-        } catch (IOException e) {
-            throw new IllegalStateException("Failed to init KafkaConfigManager from " + config, e);
-        }
         }
     }
 
@@ -125,7 +119,7 @@ public class KafkaConfigManager {
     public KafkaConfig reloadKafkaConfigLocal(String name) throws IOException {
 
         // Save Source
-        String path = KafkaConfig.getKafkaResourcePath(name);
+        String path = KafkaConfig.concatResourcePath(name);
 
         // Reload the KafkaConfig
         KafkaConfig ndesc = loadKafkaConfigAt(path);
@@ -135,14 +129,6 @@ public class KafkaConfigManager {
         return ndesc;
     }
 
-    private boolean checkExistence(String name) {
-        return true;
-    }
-
-    private String formatStreamingConfigPath(String name) {
-        return ResourceStore.KAFKA_RESOURCE_ROOT + "/" + name + ".json";
-    }
-
     public boolean createKafkaConfig(String name, KafkaConfig config) {
 
         if (config == null || StringUtils.isEmpty(config.getName())) {
@@ -152,7 +138,7 @@ public class KafkaConfigManager {
         if (kafkaMap.containsKey(config.getName()))
             throw new IllegalArgumentException("KafkaConfig '" + config.getName() + "' already exists");
         try {
-            getStore().putResource(formatStreamingConfigPath(name), config, KafkaConfig.SERIALIZER);
+            getStore().putResource(KafkaConfig.concatResourcePath(name), config, KafkaConfig.SERIALIZER);
             kafkaMap.put(config.getName(), config);
             return true;
         } catch (IOException e) {
@@ -185,7 +171,7 @@ public class KafkaConfigManager {
 
     private KafkaConfig loadKafkaConfigAt(String path) throws IOException {
         ResourceStore store = getStore();
-        KafkaConfig kafkaConfig = store.getResource(path, KafkaConfig.class,KAFKA_SERIALIZER );
+        KafkaConfig kafkaConfig = store.getResource(path, KafkaConfig.class, KAFKA_SERIALIZER);
 
         if (StringUtils.isBlank(kafkaConfig.getName())) {
             throw new IllegalStateException("KafkaConfig name must not be blank");
@@ -193,7 +179,6 @@ public class KafkaConfigManager {
         return kafkaConfig;
     }
 
-
     public KafkaConfig getKafkaConfig(String name) {
         return kafkaMap.get(name);
     }
@@ -203,7 +188,7 @@ public class KafkaConfigManager {
             throw new IllegalArgumentException();
         }
 
-        String path = formatStreamingConfigPath(kafkaConfig.getName());
+        String path = KafkaConfig.concatResourcePath(kafkaConfig.getName());
         getStore().putResource(path, kafkaConfig, KafkaConfig.SERIALIZER);
     }
 
@@ -214,7 +199,6 @@ public class KafkaConfigManager {
         kafkaMap.remove(kafkaConfig.getName());
     }
 
-
     private void reloadAllKafkaConfig() throws IOException {
         ResourceStore store = getStore();
         logger.info("Reloading Kafka Metadata from folder " + store.getReadableResourcePath(ResourceStore.KAFKA_RESOURCE_ROOT));
@@ -245,7 +229,4 @@ public class KafkaConfigManager {
         logger.debug("Loaded " + kafkaMap.size() + " KafkaConfig(s)");
     }
 
-    private final ObjectMapper mapper = new ObjectMapper();
-    private final JavaType mapType = MapType.construct(HashMap.class, SimpleType.construct(Integer.class), SimpleType.construct(Long.class));
-
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4c08ded6/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaConfig.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaConfig.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaConfig.java
index 100ca2d..1dce844 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaConfig.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaConfig.java
@@ -85,10 +85,10 @@ public class KafkaConfig extends RootPersistentEntity {
     private String parserProperties;
     
     public String getResourcePath() {
-        return getKafkaResourcePath(name);
+        return concatResourcePath(name);
     }
 
-    public static String getKafkaResourcePath(String streamingName) {
+    public static String concatResourcePath(String streamingName) {
         return ResourceStore.KAFKA_RESOURCE_ROOT + "/" + streamingName + MetadataConstants.FILE_SURFIX;
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/4c08ded6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMetaExtractor.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMetaExtractor.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMetaExtractor.java
deleted file mode 100644
index 680dff8..0000000
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMetaExtractor.java
+++ /dev/null
@@ -1,284 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.storage.hbase.util;
-
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionBuilder;
-import org.apache.commons.cli.OptionGroup;
-import org.apache.commons.cli.Options;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.persistence.ResourceTool;
-import org.apache.kylin.common.util.AbstractApplication;
-import org.apache.kylin.common.util.OptionsHelper;
-import org.apache.kylin.cube.CubeDescManager;
-import org.apache.kylin.cube.CubeInstance;
-import org.apache.kylin.cube.CubeManager;
-import org.apache.kylin.cube.CubeSegment;
-import org.apache.kylin.cube.model.CubeDesc;
-import org.apache.kylin.invertedindex.IIDescManager;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.job.dao.ExecutableDao;
-import org.apache.kylin.job.dao.ExecutablePO;
-import org.apache.kylin.job.exception.PersistentException;
-import org.apache.kylin.metadata.MetadataManager;
-import org.apache.kylin.metadata.model.DataModelDesc;
-import org.apache.kylin.metadata.model.TableDesc;
-import org.apache.kylin.metadata.project.ProjectInstance;
-import org.apache.kylin.metadata.project.ProjectManager;
-import org.apache.kylin.metadata.project.RealizationEntry;
-import org.apache.kylin.metadata.realization.IRealization;
-import org.apache.kylin.metadata.realization.RealizationRegistry;
-import org.apache.kylin.storage.hybrid.HybridInstance;
-import org.apache.kylin.storage.hybrid.HybridManager;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.Lists;
-
-/**
- * extract cube related info for debugging/distributing purpose
- * TODO: deal with II case, deal with Streaming case
- */
-public class CubeMetaExtractor extends AbstractApplication {
-
-    private static final Logger logger = LoggerFactory.getLogger(CubeMetaExtractor.class);
-
-    @SuppressWarnings("static-access")
-    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("Specify which cube to extract").create("cube");
-    @SuppressWarnings("static-access")
-    private static final Option OPTION_HYBRID = OptionBuilder.withArgName("hybrid").hasArg().isRequired(false).withDescription("Specify which hybrid to extract").create("hybrid");
-    @SuppressWarnings("static-access")
-    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false).withDescription("Specify realizations in which project to extract").create("project");
-
-    @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_SEGMENTS = OptionBuilder.withArgName("includeSegments").hasArg().isRequired(false).withDescription("set this to true if want extract the segments info, related dicts, etc.").create("includeSegments");
-    @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_JOB = OptionBuilder.withArgName("includeJobs").hasArg().isRequired(false).withDescription("set this to true if want to extract job info/outputs too").create("includeJobs");
-
-    @SuppressWarnings("static-access")
-    private static final Option OPTION_DEST = OptionBuilder.withArgName("destDir").hasArg().isRequired(false).withDescription("specify the dest dir to save the related metadata").create("destDir");
-
-    private Options options = null;
-    private KylinConfig kylinConfig;
-    private MetadataManager metadataManager;
-    private ProjectManager projectManager;
-    private HybridManager hybridManager;
-    private CubeManager cubeManager;
-    private CubeDescManager cubeDescManager;
-    private IIManager iiManager;
-    private IIDescManager iiDescManager;
-    private ExecutableDao executableDao;
-    RealizationRegistry realizationRegistry;
-
-    public CubeMetaExtractor() {
-        options = new Options();
-
-        OptionGroup realizationOrProject = new OptionGroup();
-        realizationOrProject.addOption(OPTION_CUBE);
-        realizationOrProject.addOption(OPTION_PROJECT);
-        realizationOrProject.addOption(OPTION_HYBRID);
-        realizationOrProject.setRequired(true);
-
-        options.addOptionGroup(realizationOrProject);
-        options.addOption(OPTION_INCLUDE_SEGMENTS);
-        options.addOption(OPTION_INCLUDE_JOB);
-        options.addOption(OPTION_DEST);
-
-    }
-
-    @Override
-    protected Options getOptions() {
-        return options;
-    }
-
-    @Override
-    protected void execute(OptionsHelper optionsHelper) throws Exception {
-        boolean includeSegments = optionsHelper.hasOption(OPTION_INCLUDE_SEGMENTS) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_SEGMENTS)) : true;
-        boolean includeJobs = optionsHelper.hasOption(OPTION_INCLUDE_JOB) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_JOB)) : true;
-        String dest = null;
-        if (optionsHelper.hasOption(OPTION_DEST)) {
-            dest = optionsHelper.getOptionValue(OPTION_DEST);
-        }
-
-        if (!includeSegments) {
-            throw new RuntimeException("Does not support skip segments for now");
-        }
-
-        kylinConfig = KylinConfig.getInstanceFromEnv();
-        metadataManager = MetadataManager.getInstance(kylinConfig);
-        projectManager = ProjectManager.getInstance(kylinConfig);
-        hybridManager = HybridManager.getInstance(kylinConfig);
-        cubeManager = CubeManager.getInstance(kylinConfig);
-        cubeDescManager = CubeDescManager.getInstance(kylinConfig);
-        iiManager = IIManager.getInstance(kylinConfig);
-        iiDescManager = IIDescManager.getInstance(kylinConfig);
-        executableDao = ExecutableDao.getInstance(kylinConfig);
-        realizationRegistry = RealizationRegistry.getInstance(kylinConfig);
-
-        List<String> requiredResources = Lists.newArrayList();
-        List<String> optionalResources = Lists.newArrayList();
-
-        if (optionsHelper.hasOption(OPTION_PROJECT)) {
-            ProjectInstance projectInstance = projectManager.getProject(optionsHelper.getOptionValue(OPTION_PROJECT));
-            if (projectInstance == null) {
-                throw new IllegalArgumentException("Project " + optionsHelper.getOptionValue(OPTION_PROJECT) + " does not exist");
-            }
-            addRequired(requiredResources, ProjectInstance.concatResourcePath(projectInstance.getName()));
-            List<RealizationEntry> realizationEntries = projectInstance.getRealizationEntries();
-            for (RealizationEntry realizationEntry : realizationEntries) {
-                retrieveResourcePath(getRealization(realizationEntry), includeSegments, includeJobs, requiredResources, optionalResources);
-            }
-        } else if (optionsHelper.hasOption(OPTION_CUBE)) {
-            String cubeName = optionsHelper.getOptionValue(OPTION_CUBE);
-            IRealization realization;
-
-            if ((realization = cubeManager.getRealization(cubeName)) != null) {
-                retrieveResourcePath(realization, includeSegments, includeJobs, requiredResources, optionalResources);
-            } else {
-                throw new IllegalArgumentException("No cube found with name of " + cubeName);
-            }
-        } else if (optionsHelper.hasOption(OPTION_HYBRID)) {
-            String hybridName = optionsHelper.getOptionValue(OPTION_HYBRID);
-            IRealization realization;
-
-            if ((realization = hybridManager.getRealization(hybridName)) != null) {
-                retrieveResourcePath(realization, includeSegments, includeJobs, requiredResources, optionalResources);
-            } else {
-                throw new IllegalArgumentException("No hybrid found with name of" + hybridName);
-            }
-        }
-
-        executeExtraction(requiredResources, optionalResources, dest);
-    }
-
-    private void executeExtraction(List<String> requiredPaths, List<String> optionalPaths, String dest) {
-        logger.info("The resource paths going to be extracted:");
-        for (String s : requiredPaths) {
-            logger.info(s + "(required)");
-        }
-        for (String s : optionalPaths) {
-            logger.info(s + "(optional)");
-        }
-
-        if (dest == null) {
-            logger.info("Dest is not set, exit directly without extracting");
-        } else {
-            try {
-                ResourceTool.copy(KylinConfig.getInstanceFromEnv(), KylinConfig.createInstanceFromUri(dest));
-            } catch (IOException e) {
-                throw new RuntimeException("IOException", e);
-            }
-        }
-    }
-
-    private IRealization getRealization(RealizationEntry realizationEntry) {
-        return realizationRegistry.getRealization(realizationEntry.getType(), realizationEntry.getRealization());
-    }
-
-    private void retrieveResourcePath(IRealization realization, boolean includeSegments, boolean includeJobs, List<String> requiredResources, List<String> optionalResources) {
-
-        logger.info("Deal with realization {} of type {}", realization.getName(), realization.getType());
-
-        if (realization instanceof CubeInstance) {
-            CubeInstance cube = (CubeInstance) realization;
-            String descName = cube.getDescName();
-            CubeDesc cubeDesc = cubeDescManager.getCubeDesc(descName);
-            String modelName = cubeDesc.getModelName();
-            DataModelDesc modelDesc = metadataManager.getDataModelDesc(modelName);
-
-            for (String tableName : modelDesc.getAllTables()) {
-                addRequired(requiredResources, TableDesc.concatResourcePath(tableName));
-                addOptional(optionalResources, TableDesc.concatExdResourcePath(tableName));
-            }
-
-            addRequired(requiredResources, DataModelDesc.concatResourcePath(modelDesc.getName()));
-            addRequired(requiredResources, CubeDesc.concatResourcePath(cubeDesc.getName()));
-
-            if (includeSegments) {
-                addRequired(requiredResources, CubeInstance.concatResourcePath(cube.getName()));
-                for (CubeSegment segment : cube.getSegments()) {
-                    for (String dictPat : segment.getDictionaryPaths()) {
-                        addRequired(requiredResources, dictPat);
-                    }
-                    for (String snapshotPath : segment.getSnapshotPaths()) {
-                        addRequired(requiredResources, snapshotPath);
-                    }
-                    addRequired(requiredResources, segment.getStatisticsResourcePath());
-
-                    if (includeJobs) {
-                        String lastJobId = segment.getLastBuildJobID();
-                        if (!StringUtils.isEmpty(lastJobId)) {
-                            logger.warn("No job exist for segment {}", segment);
-                        } else {
-                            try {
-                                ExecutablePO executablePO = executableDao.getJob(lastJobId);
-                                addRequired(requiredResources, ExecutableDao.pathOfJob(lastJobId));
-                                addRequired(requiredResources, ExecutableDao.pathOfJobOutput(lastJobId));
-                                for (ExecutablePO task : executablePO.getTasks()) {
-                                    addRequired(requiredResources, ExecutableDao.pathOfJob(task.getUuid()));
-                                    addRequired(requiredResources, ExecutableDao.pathOfJobOutput(task.getUuid()));
-                                }
-                            } catch (PersistentException e) {
-                                throw new RuntimeException("PersistentException", e);
-                            }
-                        }
-                    } else {
-                        logger.info("Job info will not be extracted");
-                    }
-                }
-            } else {
-                if (includeJobs) {
-                    logger.warn("It's useless to set includeJobs to true when includeSegments is set to false");
-                }
-
-                throw new IllegalStateException("Does not support skip segments now");
-            }
-        } else if (realization instanceof HybridInstance) {
-            HybridInstance hybridInstance = (HybridInstance) realization;
-            addRequired(requiredResources, HybridInstance.concatResourcePath(hybridInstance.getName()));
-            for (IRealization iRealization : hybridInstance.getRealizations()) {
-                retrieveResourcePath(iRealization, includeSegments, includeJobs, requiredResources, optionalResources);
-            }
-        } else if (realization instanceof IIInstance) {
-            throw new IllegalStateException("Does not support extract II instance or hybrid that contains II");
-        } else {
-            throw new IllegalStateException("Unknown realization type: " + realization.getType());
-        }
-    }
-
-    private void addRequired(List<String> resourcePaths, String record) {
-        logger.info("adding required resource {}", record);
-        resourcePaths.add(record);
-    }
-
-    private void addOptional(List<String> optionalPaths, String record) {
-        logger.info("adding optional resource {}", record);
-        optionalPaths.add(record);
-    }
-
-    public static void main(String[] args) {
-        CubeMetaExtractor extractor = new CubeMetaExtractor();
-        extractor.execute(args);
-    }
-}


[02/50] [abbrv] kylin git commit: KYLIN-1247 set text-overflow for input[placeholder]

Posted by li...@apache.org.
KYLIN-1247 set text-overflow for input[placeholder]


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/1fbc4221
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/1fbc4221
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/1fbc4221

Branch: refs/heads/master
Commit: 1fbc42212d0233ed3a1bdefb6fb883a9df47e465
Parents: 83bf92c
Author: janzhongi <ji...@ebay.com>
Authored: Thu Feb 25 18:14:15 2016 +0800
Committer: janzhongi <ji...@ebay.com>
Committed: Thu Feb 25 18:14:15 2016 +0800

----------------------------------------------------------------------
 webapp/app/less/app.less | 295 +++++++++++++++++++++++++-----------------
 1 file changed, 179 insertions(+), 116 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/1fbc4221/webapp/app/less/app.less
----------------------------------------------------------------------
diff --git a/webapp/app/less/app.less b/webapp/app/less/app.less
index 128457c..40d3280 100644
--- a/webapp/app/less/app.less
+++ b/webapp/app/less/app.less
@@ -19,6 +19,7 @@
 body {
   background-color: #FFF;
 }
+
 /** Fixed bottom footer **/
 .copyright {
   display: table;
@@ -35,8 +36,8 @@ body {
 
 /** port from bootswatch **/
 .bg-primary {
-  color:#fff;
-  background-color:#df691a
+  color: #fff;
+  background-color: #df691a
 }
 
 /***************** Directive ***************/
@@ -154,7 +155,7 @@ body {
 .table-striped > tbody > tr:nth-child(odd).tr-highlight > td,
 .table-striped > tbody > tr:nth-child(odd).tr-highlight > th,
 .table-striped > tbody > tr:nth-child(even).tr-highlight > td,
-.table-striped > tbody > tr:nth-child(even).tr-highlight > th{
+.table-striped > tbody > tr:nth-child(even).tr-highlight > th {
   background-color: #8adbff;
 }
 
@@ -230,11 +231,11 @@ table .radio {
 .cube-detail {
   background-color: white;
 }
+
 .model-detail {
   background-color: white;
 }
 
-
 /* Query content */
 .query-content {
   padding-bottom: 10px;
@@ -306,14 +307,13 @@ table .radio {
 .home_jobs:hover {
 }
 
-
 body .modal-dialog {
   width: 80%; /* desired relative width */
   height: 90%;
   //  left: 5%; /* (100%-width)/2 */
   /* place center */
-  margin-left:auto;
-  margin-right:auto;
+  margin-left: auto;
+  margin-right: auto;
 }
 
 pre {
@@ -332,7 +332,7 @@ pre {
   }
 }
 
-.table>tbody+tbody {
+.table > tbody + tbody {
   border-top: 0px solid #1c1e22;
 }
 
@@ -363,17 +363,17 @@ pre {
   stroke-width: 1.5px;
 }
 
-.nav-tabs>li>a {
+.nav-tabs > li > a {
   color: #333;
 }
 
 .table_text_center > tbody > tr > th,
-.table_text_center > tbody > tr > td{
+.table_text_center > tbody > tr > td {
   text-align: center;
 }
 
 .gridStyle {
-  border: 1px solid rgb(212,212,212);
+  border: 1px solid rgb(212, 212, 212);
 }
 
 .metadata_tree_column {
@@ -381,11 +381,9 @@ pre {
   background: url("../image/icon_table.png") no-repeat;
 }
 
-
-
-.nav-pills>li {
-cursor: pointer;
-//background-color: #094868;
+.nav-pills > li {
+  cursor: pointer;
+  //background-color: #094868;
 }
 
 .alert-info {
@@ -402,19 +400,22 @@ treecontrol {
 }
 
 treecontrol > ul {
-//  width: 310px;
+  //  width: 310px;
   overflow: auto;
 }
 
 treecontrol > ul > li {
   padding-left: 0;
 }
-treecontrol li .tree-label{
+
+treecontrol li .tree-label {
   cursor: default;
 }
-treecontrol li treeitem .tree-label{
+
+treecontrol li treeitem .tree-label {
   cursor: pointer;
 }
+
 treecontrol.tree-light li.tree-collapsed i {
   padding: 1px 10px;
   background: url("../image/database.png") no-repeat;
@@ -442,17 +443,16 @@ treecontrol.tree-light li.tree-leaf i {
   background: none no-repeat;
 }
 
-.navbar_default{
+.navbar_default {
   background-color: #2a2a00 !important;
 }
 
-.font_color{
-  color:#1E4B79;
+.font_color {
+  color: #1E4B79;
   font-size: 20px;
   padding-top: 16px;
 }
 
-
 .table-center {
   th {
     text-align: center;
@@ -464,75 +464,112 @@ treecontrol.tree-light li.tree-leaf i {
 }
 
 .overview-box {
-  width:40%;
-  margin-left:5%;
-  padding:0px;
+  width: 40%;
+  margin-left: 5%;
+  padding: 0px;
 }
 
-.header-button{
-  text-align:center;
-  border-width:4px;
-  background-color:#87b87f!important;
-  border-color:#87b87f!important;
+.header-button {
+  text-align: center;
+  border-width: 4px;
+  background-color: #87b87f !important;
+  border-color: #87b87f !important;
 }
 
-.font-color-default{
-  color:#808080;
+.font-color-default {
+  color: #808080;
 }
 
-.wizard-steps li.active .step,.wizard-steps li.active:before {
-  border-color:yellowgreen;
+.wizard-steps li.active .step, .wizard-steps li.active:before {
+  border-color: yellowgreen;
 }
 
-.modal-content-default{
-background-color:#BED9F7;
-padding:12px;
+.modal-content-default {
+  background-color: #BED9F7;
+  padding: 12px;
 }
 
-.modal-content-default{
-background-color:#ffffff;
+.modal-content-default {
+  background-color: #ffffff;
 }
 
-.tab-style{
+.tab-style {
   padding-top: 15px;
-  padding-bottom:5px;
+  padding-bottom: 5px;
   font-size: 15px;
-  margin-left:0px;
-  border:0px;
-  background-color:#ffffff;
-  }
-
+  margin-left: 0px;
+  border: 0px;
+  background-color: #ffffff;
+}
 
-.table-bordered, .table-bordered>tbody>tr>td, .table-bordered>tbody>tr>th, .table-bordered>tfoot>tr>td, .table-bordered>tfoot>tr>th, .table-bordered>thead>tr>td, .table-bordered>thead>tr>th {
+.table-bordered, .table-bordered > tbody > tr > td, .table-bordered > tbody > tr > th, .table-bordered > tfoot > tr > td, .table-bordered > tfoot > tr > th, .table-bordered > thead > tr > td, .table-bordered > thead > tr > th {
   border: 1px solid #ddd;
 }
-.table>tbody>tr>td, .table>tbody>tr>th, .table>tfoot>tr>td, .table>tfoot>tr>th, .table>thead>tr>td, .table>thead>tr>th {
+
+.table > tbody > tr > td, .table > tbody > tr > th, .table > tfoot > tr > td, .table > tfoot > tr > th, .table > thead > tr > td, .table > thead > tr > th {
   padding: 8px;
   line-height: 1.42857143;
   vertical-align: top;
   border-top: 1px solid #ddd;
 }
-.table>tbody{
-    word-break:break-all;
-    word-wrap:break-word;
+
+.table > tbody {
+  word-break: break-all;
+  word-wrap: break-word;
 }
 
 //overlay for loading request
-.loadingOverlay{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1998;width:100%;height:100%;_padding:0 20px 0 0;background:#f6f4f5;display:none;}
-.showbox{position:fixed;top:0;left:50%;z-index:9999;opacity:0;filter:alpha(opacity=0);margin-left:-80px;}
-*html,*html body{background-image:url(about:blank);background-attachment:fixed;}
+.loadingOverlay {
+  position: fixed;
+  top: 0;
+  right: 0;
+  bottom: 0;
+  left: 0;
+  z-index: 1998;
+  width: 100%;
+  height: 100%;
+  _padding: 0 20px 0 0;
+  background: #f6f4f5;
+  display: none;
+}
+
+.showbox {
+  position: fixed;
+  top: 0;
+  left: 50%;
+  z-index: 9999;
+  opacity: 0;
+  filter: alpha(opacity=0);
+  margin-left: -80px;
+}
+
+*html, *html body {
+  background-image: url(about:blank);
+  background-attachment: fixed;
+}
+
 //*html .showbox,*html .loadingOverlay{position:absolute;top:expression(eval(document.documentElement.scrollTop));}
-#loadingCntnr{
-//  border:1px solid #8CBEDA;color:#37a;font-size:18px;font-weight:bold;
+#loadingCntnr {
+  //  border:1px solid #8CBEDA;color:#37a;font-size:18px;font-weight:bold;
   color: #797979;
   font-size: 16px;
   font-weight: 300;
   line-height: normal;
 }
 
-#loadingCntnr div.loadingWord{width:200px;height:70px;line-height:70px;border:2px solid #D6E7F2;background:#fff;}
-#loadingCntnr img{margin:17px 15px;float:left;display:inline;}
+#loadingCntnr div.loadingWord {
+  width: 200px;
+  height: 70px;
+  line-height: 70px;
+  border: 2px solid #D6E7F2;
+  background: #fff;
+}
 
+#loadingCntnr img {
+  margin: 17px 15px;
+  float: left;
+  display: inline;
+}
 
 /** Customization for Messenger **/
 /** The top lower **/
@@ -541,22 +578,21 @@ ul.messenger.messenger-fixed.messenger-on-top {
 }
 
 /** Set max height and overflow auto **/
-ul.messenger.messenger-fixed.messenger-on-top .messenger-message .messenger-message-inner{
+ul.messenger.messenger-fixed.messenger-on-top .messenger-message .messenger-message-inner {
   max-height: 500px;
   overflow-y: auto;
 }
 
 /** Enable the text selectable on notification of Messenger **/
-ul.messenger .messenger-message-inner,.ngCellText {
+ul.messenger .messenger-message-inner, .ngCellText {
   -moz-user-select: text;
   -webkit-user-select: text;
   -o-user-select: text;
   user-select: text;
 }
 
-
-.load-hive-metawrapper input[type="radio"]{
-  margin-left:0px !important;
+.load-hive-metawrapper input[type="radio"] {
+  margin-left: 0px !important;
 }
 
 /** cube data model and dimensions **/
@@ -604,11 +640,11 @@ ul.messenger .messenger-message-inner,.ngCellText {
   overflow-y: auto;
 }
 
-.models-tree .list-group-item:last-child{
+.models-tree .list-group-item:last-child {
   border-radius: 0px !important;
 }
 
-.models-tree .list-group-item:first-child{
+.models-tree .list-group-item:first-child {
   border-radius: 0px !important;
 }
 
@@ -617,8 +653,9 @@ ul.messenger .messenger-message-inner,.ngCellText {
   animation: slideOutLeft 0.5s;;
   display: block !important;
 }
-.cube-models-cntnr.ng-hide-add+div {
-//  width: 100%;
+
+.cube-models-cntnr.ng-hide-add + div {
+  //  width: 100%;
   right: 0px;
   position: absolute;
   float: right;
@@ -632,7 +669,8 @@ ul.messenger .messenger-message-inner,.ngCellText {
   -webkit-animation: slideInLeft 0.5s;;
   animation: slideInLeft 0.5s;;
 }
-.cube-models-cntnr.ng-hide-remove+div{
+
+.cube-models-cntnr.ng-hide-remove + div {
   position: absolute;
   right: 0;
   width: 75%;
@@ -643,14 +681,16 @@ ul.messenger .messenger-message-inner,.ngCellText {
 }
 
 .form-group.required .control-label:after {
-  content:"*";
-  color:red;
+  content: "*";
+  color: red;
 }
-.streamingParserCtr .chosen-container{
-  width:200px !important;
+
+.streamingParserCtr .chosen-container {
+  width: 200px !important;
 }
+
 .progress.progress-striped {
-  background-color:#DCDCDC;
+  background-color: #DCDCDC;
 }
 
 .clone-cube-window .modal-dialog {
@@ -658,66 +698,89 @@ ul.messenger .messenger-message-inner,.ngCellText {
 }
 
 .tooltip {
-  z-index:1240000 !important;
+  z-index: 1240000 !important;
 }
 
 .paraTree {
-  padding:3px;
-  -webkit-border-radius:4px;
-  -moz-border-radius:4px;
-  border-radius:4px;
-  -webkit-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.05);
-  -moz-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.05);
-  box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.05)
+  padding: 3px;
+  -webkit-border-radius: 4px;
+  -moz-border-radius: 4px;
+  border-radius: 4px;
+  -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.05);
+  -moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.05);
+  box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.05)
 }
+
 .paraTree li {
-  list-style-type:none;
-  margin:0;
-  padding:10px 5px 0 5px;
-  position:relative
+  list-style-type: none;
+  margin: 0;
+  padding: 10px 5px 0 5px;
+  position: relative
 }
+
 .paraTree li::before, .paraTree li::after {
-  content:'';
-  left:-20px;
-  position:absolute;
-  right:auto
+  content: '';
+  left: -20px;
+  position: absolute;
+  right: auto
 }
+
 .paraTree li::before {
-  border-left:1px solid #999;
-  bottom:50px;
-  height:100%;
-  top:0;
-  width:1px
+  border-left: 1px solid #999;
+  bottom: 50px;
+  height: 100%;
+  top: 0;
+  width: 1px
 }
+
 .paraTree li::after {
-  border-top:1px solid #999;
-  height:20px;
-  top:25px;
-  width:25px
+  border-top: 1px solid #999;
+  height: 20px;
+  top: 25px;
+  width: 25px
 }
+
 .paraTree li span {
-  -moz-border-radius:5px;
-  -webkit-border-radius:5px;
-  border:1px solid #999;
-  border-radius:5px;
-  display:inline-block;
-  padding:3px 8px;
-  text-decoration:none
+  -moz-border-radius: 5px;
+  -webkit-border-radius: 5px;
+  border: 1px solid #999;
+  border-radius: 5px;
+  display: inline-block;
+  padding: 3px 8px;
+  text-decoration: none
 }
-.paraTree li.sub_li>span {
-  cursor:pointer
+
+.paraTree li.sub_li > span {
+  cursor: pointer
 }
-.paraTree>ul>li::before, .paraTree>ul>li::after {
-  border:0
+
+.paraTree > ul > li::before, .paraTree > ul > li::after {
+  border: 0
 }
+
 .paraTree li:last-child::before {
-  height:30px
+  height: 30px
 }
-.paraTree li.sub_li>span:hover, .paraTree li.sub_li>span:hover+ul li span {
-  background:#eee;
-  border:1px solid #94a0b4;
-  color:#000
+
+.paraTree li.sub_li > span:hover, .paraTree li.sub_li > span:hover + ul li span {
+  background: #eee;
+  border: 1px solid #94a0b4;
+  color: #000
 }
-.chosen-container .chosen-results li.active-result{
+
+.chosen-container .chosen-results li.active-result {
   word-break: break-all;
 }
+
+input[placeholder] {
+  text-overflow: ellipsis;
+}
+
+::-moz-placeholder {
+  text-overflow: ellipsis;
+}
+
+/* firefox 19+ */
+input:-moz-placeholder {
+  text-overflow: ellipsis;
+}


[34/50] [abbrv] kylin git commit: KYLIN-1474 expose list, remove and cat in metastore.sh

Posted by li...@apache.org.
KYLIN-1474 expose list, remove and cat in metastore.sh

Signed-off-by: Hongbin Ma <ma...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/17c33dc4
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/17c33dc4
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/17c33dc4

Branch: refs/heads/master
Commit: 17c33dc4929121cdb48c3904dc337cd456af845d
Parents: 11dc04b
Author: John Zhao <yu...@ebay.com>
Authored: Mon Mar 7 14:05:59 2016 -0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Tue Mar 8 10:31:12 2016 +0800

----------------------------------------------------------------------
 build/bin/metastore.sh                          | 24 ++++++++++++++++++++
 .../kylin/common/persistence/ResourceTool.java  |  3 +++
 2 files changed, 27 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/17c33dc4/build/bin/metastore.sh
----------------------------------------------------------------------
diff --git a/build/bin/metastore.sh b/build/bin/metastore.sh
index 05041f9..8908351 100755
--- a/build/bin/metastore.sh
+++ b/build/bin/metastore.sh
@@ -47,6 +47,27 @@ then
     echo "Starting restoring $_file"
     ${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.common.persistence.ResourceTool upload $_file
 
+elif [ "$1" == "list" ]
+then
+
+    _file=$2
+    echo "Starting list $_file"
+    ${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.common.persistence.ResourceTool list $_file
+
+elif [ "$1" == "remove" ]
+then
+
+    _file=$2
+    echo "Starting remove $_file"
+    ${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.common.persistence.ResourceTool remove $_file
+
+elif [ "$1" == "cat" ]
+then
+
+    _file=$2
+    echo "Starting cat $_file"
+    ${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.common.persistence.ResourceTool cat $_file
+
 elif [ "$1" == "reset" ]
 then
 
@@ -61,6 +82,9 @@ else
     echo "usage: metastore.sh backup"
     echo "       metastore.sh reset"
     echo "       metastore.sh restore PATH_TO_LOCAL_META"
+    echo "       metastore.sh list RESOURCE_PATH"
+    echo "       metastore.sh cat RESOURCE_PATH"
+    echo "       metastore.sh remove RESOURCE_PATH"
     echo "       metastore.sh clean [--delete true]"
     exit 1
 fi

http://git-wip-us.apache.org/repos/asf/kylin/blob/17c33dc4/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
index 489e45a..2e1d527 100644
--- a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
+++ b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
@@ -41,6 +41,9 @@ public class ResourceTool {
             System.out.println("Usage: ResourceTool list  RESOURCE_PATH");
             System.out.println("Usage: ResourceTool download  LOCAL_DIR");
             System.out.println("Usage: ResourceTool upload    LOCAL_DIR");
+            System.out.println("Usage: ResourceTool reset");
+            System.out.println("Usage: ResourceTool remove RESOURCE_PATH");
+            System.out.println("Usage: ResourceTool cat RESOURCE_PATH");
             return;
         }
 


[35/50] [abbrv] kylin git commit: KYLIN-1038 retry on job failure

Posted by li...@apache.org.
KYLIN-1038 retry on job failure


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/3af7d4a7
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/3af7d4a7
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/3af7d4a7

Branch: refs/heads/master
Commit: 3af7d4a72c8308f00fe95276b08f05709eaa62e5
Parents: 17c33dc
Author: shaofengshi <sh...@apache.org>
Authored: Sun Feb 14 21:17:12 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Tue Mar 8 12:13:16 2016 +0800

----------------------------------------------------------------------
 build/conf/kylin.properties                     |  3 ++
 .../apache/kylin/common/KylinConfigBase.java    |  4 ++
 .../kylin/job/execution/AbstractExecutable.java | 57 +++++++++++++-------
 .../job/execution/DefaultChainedExecutable.java |  5 ++
 4 files changed, 50 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/3af7d4a7/build/conf/kylin.properties
----------------------------------------------------------------------
diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
index 78a564d..d694e9f 100644
--- a/build/conf/kylin.properties
+++ b/build/conf/kylin.properties
@@ -23,6 +23,9 @@ kylin.hbase.cluster.fs=
 
 kylin.job.mapreduce.default.reduce.input.mb=500
 
+# max job retry on error, default 0: no retry
+kylin.job.retry=0
+
 # If true, job engine will not assume that hadoop CLI reside on the same server as it self
 # you will have to specify kylin.job.remote.cli.hostname, kylin.job.remote.cli.username and kylin.job.remote.cli.password
 # It should not be set to "true" unless you're NOT running Kylin.sh on a hadoop client machine 

http://git-wip-us.apache.org/repos/asf/kylin/blob/3af7d4a7/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 826a28c..487f78e 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -538,6 +538,10 @@ public class KylinConfigBase implements Serializable {
         return Boolean.parseBoolean(getOptional("crossdomain.enable", "true"));
     }
 
+    public int getJobRetry() {
+        return Integer.parseInt(this.getOptional("kylin.job.retry", "0"));
+    }
+
     public String toString() {
         return getMetadataUrl();
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/3af7d4a7/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java b/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
index e1d7106..8d5fea5 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
@@ -18,13 +18,10 @@
 
 package org.apache.kylin.job.execution;
 
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
-
+import com.google.common.base.Objects;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.kylin.common.KylinConfig;
@@ -35,10 +32,12 @@ import org.apache.kylin.job.manager.ExecutableManager;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Objects;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
 
 /**
  */
@@ -50,6 +49,7 @@ public abstract class AbstractExecutable implements Executable, Idempotent {
     protected static final String END_TIME = "endTime";
 
     protected static final Logger logger = LoggerFactory.getLogger(AbstractExecutable.class);
+    protected int retry = 0;
 
     private String name;
     private String id;
@@ -99,15 +99,30 @@ public abstract class AbstractExecutable implements Executable, Idempotent {
         logger.info("Executing >>>>>>>>>>>>>   " + this.getName() + "   <<<<<<<<<<<<<");
 
         Preconditions.checkArgument(executableContext instanceof DefaultContext);
-        ExecuteResult result;
-        try {
-            onExecuteStart(executableContext);
-            result = doWork(executableContext);
-        } catch (Throwable e) {
-            logger.error("error running Executable", e);
-            onExecuteError(e, executableContext);
-            throw new ExecuteException(e);
+        ExecuteResult result = null;
+
+        onExecuteStart(executableContext);
+        Throwable exception;
+        do {
+            if (retry > 0) {
+                logger.info("Retry " + retry);
+            }
+            exception = null;
+            result = null;
+            try {
+                result = doWork(executableContext);
+            } catch (Throwable e) {
+                logger.error("error running Executable", e);
+                exception = e;
+            }
+            retry++;
+        } while (((result != null && result.succeed() == false) || exception != null) && needRetry() == true);
+        
+        if (exception != null) {
+            onExecuteError(exception, executableContext);
+            throw new ExecuteException(exception);
         }
+        
         onExecuteFinished(result, executableContext);
         return result;
     }
@@ -301,6 +316,10 @@ public abstract class AbstractExecutable implements Executable, Idempotent {
         return status == ExecutableState.DISCARDED;
     }
 
+    protected boolean needRetry() {
+        return this.retry <= KylinConfig.getInstanceFromEnv().getJobRetry();
+    }
+
     @Override
     public String toString() {
         return Objects.toStringHelper(this).add("id", getId()).add("name", getName()).add("state", getStatus()).toString();

http://git-wip-us.apache.org/repos/asf/kylin/blob/3af7d4a7/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java b/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
index 2e95711..7403715 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
@@ -106,6 +106,11 @@ public class DefaultChainedExecutable extends AbstractExecutable implements Chai
         return subTasks;
     }
 
+    @Override
+    protected boolean needRetry() {
+        return false;
+    }
+
     public final AbstractExecutable getTaskByName(String name) {
         for (AbstractExecutable task : subTasks) {
             if (task.getName() != null && task.getName().equalsIgnoreCase(name)) {


[46/50] [abbrv] kylin git commit: KYLIN-1343 Upgrade to calcite 1.6 (with Edward Zhang)

Posted by li...@apache.org.
KYLIN-1343 Upgrade to calcite 1.6 (with Edward Zhang)


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/f73abf6c
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/f73abf6c
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/f73abf6c

Branch: refs/heads/master
Commit: f73abf6c2df97c1c89e7e6edf3865d82350f8472
Parents: 9021f17
Author: Yang Li <li...@apache.org>
Authored: Sun Mar 6 00:06:05 2016 +0800
Committer: Li Yang <li...@apache.org>
Committed: Tue Mar 8 15:54:05 2016 +0800

----------------------------------------------------------------------
 .../adapter/enumerable/EnumerableJoin.java      |  145 -
 .../apache/calcite/runtime/SqlFunctions.java    | 1317 ---
 .../apache/calcite/sql2rel/RelFieldTrimmer.java |   44 +
 .../calcite/sql2rel/SqlToRelConverter.java      | 8150 ++++++++++--------
 .../engine/mr/steps/MergeCuboidJobTest.java     |    1 -
 .../org/apache/kylin/jdbc/KylinConnection.java  |    2 +-
 .../org/apache/kylin/jdbc/KylinJdbcFactory.java |    5 +-
 .../java/org/apache/kylin/jdbc/KylinMeta.java   |   46 +-
 .../kylin/jdbc/KylinPreparedStatement.java      |    2 +
 .../org/apache/kylin/jdbc/KylinResultSet.java   |    5 +-
 kylin-it/src/test/resources/logging.properties  |    2 +-
 .../src/test/resources/query/sql/query92.sql    |   30 +
 .../resources/query/sql/query92.sql.disabled    |   30 -
 .../src/test/resources/query/sql/query93.sql    |   30 +
 .../resources/query/sql/query93.sql.disabled    |   30 -
 .../src/test/resources/query/sql/query94.sql    |   30 +
 .../resources/query/sql/query94.sql.disabled    |   30 -
 .../src/test/resources/query/sql/query95.sql    |   30 +
 .../resources/query/sql/query95.sql.disabled    |   30 -
 pom.xml                                         |    2 +-
 .../kylin/query/optrule/OLAPJoinRule.java       |    2 +-
 .../kylin/query/relnode/OLAPAggregateRel.java   |   10 +-
 .../kylin/query/relnode/OLAPFilterRel.java      |    5 +-
 .../apache/kylin/query/relnode/OLAPJoinRel.java |   22 +-
 .../kylin/query/relnode/OLAPLimitRel.java       |    5 +-
 .../kylin/query/relnode/OLAPProjectRel.java     |    5 +-
 .../apache/kylin/query/relnode/OLAPSortRel.java |    5 +-
 .../kylin/query/relnode/OLAPTableScan.java      |    9 +-
 .../relnode/OLAPToEnumerableConverter.java      |    6 +-
 .../apache/kylin/query/schema/OLAPTable.java    |   17 +-
 30 files changed, 4836 insertions(+), 5211 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/EnumerableJoin.java
----------------------------------------------------------------------
diff --git a/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/EnumerableJoin.java b/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/EnumerableJoin.java
deleted file mode 100644
index a3c04f8..0000000
--- a/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/EnumerableJoin.java
+++ /dev/null
@@ -1,145 +0,0 @@
-/*
- * OVERRIDE POINT:
- * - constructor was private instead of protected 
- */
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.calcite.adapter.enumerable;
-
-import java.util.Set;
-
-import org.apache.calcite.linq4j.tree.BlockBuilder;
-import org.apache.calcite.linq4j.tree.Expression;
-import org.apache.calcite.linq4j.tree.Expressions;
-import org.apache.calcite.plan.RelOptCluster;
-import org.apache.calcite.plan.RelOptCost;
-import org.apache.calcite.plan.RelOptPlanner;
-import org.apache.calcite.plan.RelTraitSet;
-import org.apache.calcite.rel.InvalidRelException;
-import org.apache.calcite.rel.RelNode;
-import org.apache.calcite.rel.RelNodes;
-import org.apache.calcite.rel.core.EquiJoin;
-import org.apache.calcite.rel.core.JoinInfo;
-import org.apache.calcite.rel.core.JoinRelType;
-import org.apache.calcite.rel.metadata.RelMetadataQuery;
-import org.apache.calcite.rex.RexNode;
-import org.apache.calcite.util.BuiltInMethod;
-import org.apache.calcite.util.ImmutableIntList;
-import org.apache.calcite.util.Util;
-
-import com.google.common.collect.ImmutableList;
-
-/** Implementation of {@link org.apache.calcite.rel.core.Join} in
- * {@link org.apache.calcite.adapter.enumerable.EnumerableConvention enumerable calling convention}. */
-public class EnumerableJoin extends EquiJoin implements EnumerableRel {
-    /** Creates an EnumerableJoin.
-     *
-     * <p>Use {@link #create} unless you know what you're doing. */
-    public EnumerableJoin(RelOptCluster cluster, RelTraitSet traits, RelNode left, RelNode right, RexNode condition, ImmutableIntList leftKeys, ImmutableIntList rightKeys, JoinRelType joinType, Set<String> variablesStopped) throws InvalidRelException {
-        super(cluster, traits, left, right, condition, leftKeys, rightKeys, joinType, variablesStopped);
-    }
-
-    /** Creates an EnumerableJoin. */
-    public static EnumerableJoin create(RelNode left, RelNode right, RexNode condition, ImmutableIntList leftKeys, ImmutableIntList rightKeys, JoinRelType joinType, Set<String> variablesStopped) throws InvalidRelException {
-        final RelOptCluster cluster = left.getCluster();
-        final RelTraitSet traitSet = cluster.traitSetOf(EnumerableConvention.INSTANCE);
-        return new EnumerableJoin(cluster, traitSet, left, right, condition, leftKeys, rightKeys, joinType, variablesStopped);
-    }
-
-    @Override
-    public EnumerableJoin copy(RelTraitSet traitSet, RexNode condition, RelNode left, RelNode right, JoinRelType joinType, boolean semiJoinDone) {
-        final JoinInfo joinInfo = JoinInfo.of(left, right, condition);
-        assert joinInfo.isEqui();
-        try {
-            return new EnumerableJoin(getCluster(), traitSet, left, right, condition, joinInfo.leftKeys, joinInfo.rightKeys, joinType, variablesStopped);
-        } catch (InvalidRelException e) {
-            // Semantic error not possible. Must be a bug. Convert to
-            // internal error.
-            throw new AssertionError(e);
-        }
-    }
-
-    @Override
-    public RelOptCost computeSelfCost(RelOptPlanner planner) {
-        double rowCount = RelMetadataQuery.getRowCount(this);
-
-        // Joins can be flipped, and for many algorithms, both versions are viable
-        // and have the same cost. To make the results stable between versions of
-        // the planner, make one of the versions slightly more expensive.
-        switch (joinType) {
-        case RIGHT:
-            rowCount = addEpsilon(rowCount);
-            break;
-        default:
-            if (RelNodes.COMPARATOR.compare(left, right) > 0) {
-                rowCount = addEpsilon(rowCount);
-            }
-        }
-
-        // Cheaper if the smaller number of rows is coming from the LHS.
-        // Model this by adding L log L to the cost.
-        final double rightRowCount = right.getRows();
-        final double leftRowCount = left.getRows();
-        if (Double.isInfinite(leftRowCount)) {
-            rowCount = leftRowCount;
-        } else {
-            rowCount += Util.nLogN(leftRowCount);
-        }
-        if (Double.isInfinite(rightRowCount)) {
-            rowCount = rightRowCount;
-        } else {
-            rowCount += rightRowCount;
-        }
-        return planner.getCostFactory().makeCost(rowCount, 0, 0);
-    }
-
-    private double addEpsilon(double d) {
-        assert d >= 0d;
-        final double d0 = d;
-        if (d < 10) {
-            // For small d, adding 1 would change the value significantly.
-            d *= 1.001d;
-            if (d != d0) {
-                return d;
-            }
-        }
-        // For medium d, add 1. Keeps integral values integral.
-        ++d;
-        if (d != d0) {
-            return d;
-        }
-        // For large d, adding 1 might not change the value. Add .1%.
-        // If d is NaN, this still will probably not change the value. That's OK.
-        d *= 1.001d;
-        return d;
-    }
-
-    public Result implement(EnumerableRelImplementor implementor, Prefer pref) {
-        BlockBuilder builder = new BlockBuilder();
-        final Result leftResult = implementor.visitChild(this, 0, (EnumerableRel) left, pref);
-        Expression leftExpression = builder.append("left", leftResult.block);
-        final Result rightResult = implementor.visitChild(this, 1, (EnumerableRel) right, pref);
-        Expression rightExpression = builder.append("right", rightResult.block);
-        final PhysType physType = PhysTypeImpl.of(implementor.getTypeFactory(), getRowType(), pref.preferArray());
-        final PhysType keyPhysType = leftResult.physType.project(leftKeys, JavaRowFormat.LIST);
-        return implementor.result(physType, builder.append(Expressions.call(leftExpression, BuiltInMethod.JOIN.method, Expressions.list(rightExpression, leftResult.physType.generateAccessor(leftKeys), rightResult.physType.generateAccessor(rightKeys), EnumUtils.joinSelector(joinType, physType, ImmutableList.of(leftResult.physType, rightResult.physType))).append(Util.first(keyPhysType.comparer(), Expressions.constant(null))).append(Expressions.constant(joinType.generatesNullsOnLeft())).append(Expressions.constant(joinType.generatesNullsOnRight())))).toBlock());
-    }
-
-}
-
-// End EnumerableJoin.java

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/atopcalcite/src/main/java/org/apache/calcite/runtime/SqlFunctions.java
----------------------------------------------------------------------
diff --git a/atopcalcite/src/main/java/org/apache/calcite/runtime/SqlFunctions.java b/atopcalcite/src/main/java/org/apache/calcite/runtime/SqlFunctions.java
deleted file mode 100644
index 236601f..0000000
--- a/atopcalcite/src/main/java/org/apache/calcite/runtime/SqlFunctions.java
+++ /dev/null
@@ -1,1317 +0,0 @@
-/*
- * OVERRIDE POINT:
- * - divide(BigDecimal,BigDecimal), was `b0.divide(b1)`, now `b0.divide(b1, MathContext.DECIMAL64);` 
- */
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.calcite.runtime;
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.math.MathContext;
-import java.sql.SQLException;
-import java.sql.Timestamp;
-import java.text.DecimalFormat;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.TimeZone;
-import java.util.concurrent.atomic.AtomicLong;
-import java.util.regex.Pattern;
-
-import org.apache.calcite.DataContext;
-import org.apache.calcite.avatica.util.ByteString;
-import org.apache.calcite.avatica.util.DateTimeUtils;
-import org.apache.calcite.linq4j.Enumerable;
-import org.apache.calcite.linq4j.Linq4j;
-import org.apache.calcite.linq4j.function.Deterministic;
-import org.apache.calcite.linq4j.function.Function1;
-import org.apache.calcite.linq4j.function.NonDeterministic;
-import org.apache.calcite.linq4j.tree.Primitive;
-
-/**
- * Helper methods to implement SQL functions in generated code.
- *
- * <p>Not present: and, or, not (builtin operators are better, because they
- * use lazy evaluation. Implementations do not check for null values; the
- * calling code must do that.</p>
- *
- * <p>Many of the functions do not check for null values. This is intentional.
- * If null arguments are possible, the code-generation framework checks for
- * nulls before calling the functions.</p>
- */
-@SuppressWarnings("UnnecessaryUnboxing")
-@Deterministic
-public class SqlFunctions {
-    private static final DecimalFormat DOUBLE_FORMAT = new DecimalFormat("0.0E0");
-
-    private static final TimeZone LOCAL_TZ = TimeZone.getDefault();
-
-    private static final Function1<List<Object>, Enumerable<Object>> LIST_AS_ENUMERABLE = new Function1<List<Object>, Enumerable<Object>>() {
-        public Enumerable<Object> apply(List<Object> list) {
-            return Linq4j.asEnumerable(list);
-        }
-    };
-
-    /** Holds, for each thread, a map from sequence name to sequence current
-     * value.
-     *
-     * <p>This is a straw man of an implementation whose main goal is to prove
-     * that sequences can be parsed, validated and planned. A real application
-     * will want persistent values for sequences, shared among threads. */
-    private static final ThreadLocal<Map<String, AtomicLong>> THREAD_SEQUENCES = new ThreadLocal<Map<String, AtomicLong>>() {
-        @Override
-        protected Map<String, AtomicLong> initialValue() {
-            return new HashMap<String, AtomicLong>();
-        }
-    };
-
-    private SqlFunctions() {
-    }
-
-    /** SQL SUBSTRING(string FROM ... FOR ...) function. */
-    public static String substring(String s, int from, int for_) {
-        return s.substring(from - 1, Math.min(from - 1 + for_, s.length()));
-    }
-
-    /** SQL SUBSTRING(string FROM ...) function. */
-    public static String substring(String s, int from) {
-        return s.substring(from - 1);
-    }
-
-    /** SQL UPPER(string) function. */
-    public static String upper(String s) {
-        return s.toUpperCase();
-    }
-
-    /** SQL LOWER(string) function. */
-    public static String lower(String s) {
-        return s.toLowerCase();
-    }
-
-    /** SQL INITCAP(string) function. */
-    public static String initcap(String s) {
-        // Assumes Alpha as [A-Za-z0-9]
-        // white space is treated as everything else.
-        final int len = s.length();
-        boolean start = true;
-        final StringBuilder newS = new StringBuilder();
-
-        for (int i = 0; i < len; i++) {
-            char curCh = s.charAt(i);
-            final int c = (int) curCh;
-            if (start) { // curCh is whitespace or first character of word.
-                if (c > 47 && c < 58) { // 0-9
-                    start = false;
-                } else if (c > 64 && c < 91) { // A-Z
-                    start = false;
-                } else if (c > 96 && c < 123) { // a-z
-                    start = false;
-                    curCh = (char) (c - 32); // Uppercase this character
-                }
-                // else {} whitespace
-            } else { // Inside of a word or white space after end of word.
-                if (c > 47 && c < 58) { // 0-9
-                    // noop
-                } else if (c > 64 && c < 91) { // A-Z
-                    curCh = (char) (c + 32); // Lowercase this character
-                } else if (c > 96 && c < 123) { // a-z
-                    // noop
-                } else { // whitespace
-                    start = true;
-                }
-            }
-            newS.append(curCh);
-        } // for each character in s
-        return newS.toString();
-    }
-
-    /** SQL CHARACTER_LENGTH(string) function. */
-    public static int charLength(String s) {
-        return s.length();
-    }
-
-    /** SQL {@code string || string} operator. */
-    public static String concat(String s0, String s1) {
-        return s0 + s1;
-    }
-
-    /** SQL {@code binary || binary} operator. */
-    public static ByteString concat(ByteString s0, ByteString s1) {
-        return s0.concat(s1);
-    }
-
-    /** SQL {@code RTRIM} function applied to string. */
-    public static String rtrim(String s) {
-        return trim_(s, false, true, ' ');
-    }
-
-    /** SQL {@code LTRIM} function. */
-    public static String ltrim(String s) {
-        return trim_(s, true, false, ' ');
-    }
-
-    /** SQL {@code TRIM(... seek FROM s)} function. */
-    public static String trim(boolean leading, boolean trailing, String seek, String s) {
-        return trim_(s, leading, trailing, seek.charAt(0));
-    }
-
-    /** SQL {@code TRIM} function. */
-    private static String trim_(String s, boolean left, boolean right, char c) {
-        int j = s.length();
-        if (right) {
-            for (;;) {
-                if (j == 0) {
-                    return "";
-                }
-                if (s.charAt(j - 1) != c) {
-                    break;
-                }
-                --j;
-            }
-        }
-        int i = 0;
-        if (left) {
-            for (;;) {
-                if (i == j) {
-                    return "";
-                }
-                if (s.charAt(i) != c) {
-                    break;
-                }
-                ++i;
-            }
-        }
-        return s.substring(i, j);
-    }
-
-    /** SQL {@code TRIM} function applied to binary string. */
-    public static ByteString trim(ByteString s) {
-        return trim_(s, true, true);
-    }
-
-    /** Helper for CAST. */
-    public static ByteString rtrim(ByteString s) {
-        return trim_(s, false, true);
-    }
-
-    /** SQL {@code TRIM} function applied to binary string. */
-    private static ByteString trim_(ByteString s, boolean left, boolean right) {
-        int j = s.length();
-        if (right) {
-            for (;;) {
-                if (j == 0) {
-                    return ByteString.EMPTY;
-                }
-                if (s.byteAt(j - 1) != 0) {
-                    break;
-                }
-                --j;
-            }
-        }
-        int i = 0;
-        if (left) {
-            for (;;) {
-                if (i == j) {
-                    return ByteString.EMPTY;
-                }
-                if (s.byteAt(i) != 0) {
-                    break;
-                }
-                ++i;
-            }
-        }
-        return s.substring(i, j);
-    }
-
-    /** SQL {@code OVERLAY} function. */
-    public static String overlay(String s, String r, int start) {
-        if (s == null || r == null) {
-            return null;
-        }
-        return s.substring(0, start - 1) + r + s.substring(start - 1 + r.length());
-    }
-
-    /** SQL {@code OVERLAY} function. */
-    public static String overlay(String s, String r, int start, int length) {
-        if (s == null || r == null) {
-            return null;
-        }
-        return s.substring(0, start - 1) + r + s.substring(start - 1 + length);
-    }
-
-    /** SQL {@code OVERLAY} function applied to binary strings. */
-    public static ByteString overlay(ByteString s, ByteString r, int start) {
-        if (s == null || r == null) {
-            return null;
-        }
-        return s.substring(0, start - 1).concat(r).concat(s.substring(start - 1 + r.length()));
-    }
-
-    /** SQL {@code OVERLAY} function applied to binary strings. */
-    public static ByteString overlay(ByteString s, ByteString r, int start, int length) {
-        if (s == null || r == null) {
-            return null;
-        }
-        return s.substring(0, start - 1).concat(r).concat(s.substring(start - 1 + length));
-    }
-
-    /** SQL {@code LIKE} function. */
-    public static boolean like(String s, String pattern) {
-        final String regex = Like.sqlToRegexLike(pattern, null);
-        return Pattern.matches(regex, s);
-    }
-
-    /** SQL {@code LIKE} function with escape. */
-    public static boolean like(String s, String pattern, String escape) {
-        final String regex = Like.sqlToRegexLike(pattern, escape);
-        return Pattern.matches(regex, s);
-    }
-
-    /** SQL {@code SIMILAR} function. */
-    public static boolean similar(String s, String pattern) {
-        final String regex = Like.sqlToRegexSimilar(pattern, null);
-        return Pattern.matches(regex, s);
-    }
-
-    /** SQL {@code SIMILAR} function with escape. */
-    public static boolean similar(String s, String pattern, String escape) {
-        final String regex = Like.sqlToRegexSimilar(pattern, escape);
-        return Pattern.matches(regex, s);
-    }
-
-    // =
-
-    /** SQL = operator applied to Object values (including String; neither
-     * side may be null). */
-    public static boolean eq(Object b0, Object b1) {
-        return b0.equals(b1);
-    }
-
-    /** SQL = operator applied to BigDecimal values (neither may be null). */
-    public static boolean eq(BigDecimal b0, BigDecimal b1) {
-        return b0.stripTrailingZeros().equals(b1.stripTrailingZeros());
-    }
-
-    // <>
-
-    /** SQL &lt;&gt; operator applied to Object values (including String;
-     * neither side may be null). */
-    public static boolean ne(Object b0, Object b1) {
-        return !b0.equals(b1);
-    }
-
-    /** SQL &lt;&gt; operator applied to BigDecimal values. */
-    public static boolean ne(BigDecimal b0, BigDecimal b1) {
-        return b0.compareTo(b1) != 0;
-    }
-
-    // <
-
-    /** SQL &lt; operator applied to boolean values. */
-    public static boolean lt(boolean b0, boolean b1) {
-        return compare(b0, b1) < 0;
-    }
-
-    /** SQL &lt; operator applied to String values. */
-    public static boolean lt(String b0, String b1) {
-        return b0.compareTo(b1) < 0;
-    }
-
-    /** SQL &lt; operator applied to ByteString values. */
-    public static boolean lt(ByteString b0, ByteString b1) {
-        return b0.compareTo(b1) < 0;
-    }
-
-    /** SQL &lt; operator applied to BigDecimal values. */
-    public static boolean lt(BigDecimal b0, BigDecimal b1) {
-        return b0.compareTo(b1) < 0;
-    }
-
-    // <=
-
-    /** SQL &le; operator applied to boolean values. */
-    public static boolean le(boolean b0, boolean b1) {
-        return compare(b0, b1) <= 0;
-    }
-
-    /** SQL &le; operator applied to String values. */
-    public static boolean le(String b0, String b1) {
-        return b0.compareTo(b1) <= 0;
-    }
-
-    /** SQL &le; operator applied to ByteString values. */
-    public static boolean le(ByteString b0, ByteString b1) {
-        return b0.compareTo(b1) <= 0;
-    }
-
-    /** SQL &le; operator applied to BigDecimal values. */
-    public static boolean le(BigDecimal b0, BigDecimal b1) {
-        return b0.compareTo(b1) <= 0;
-    }
-
-    // >
-
-    /** SQL &gt; operator applied to boolean values. */
-    public static boolean gt(boolean b0, boolean b1) {
-        return compare(b0, b1) > 0;
-    }
-
-    /** SQL &gt; operator applied to String values. */
-    public static boolean gt(String b0, String b1) {
-        return b0.compareTo(b1) > 0;
-    }
-
-    /** SQL &gt; operator applied to ByteString values. */
-    public static boolean gt(ByteString b0, ByteString b1) {
-        return b0.compareTo(b1) > 0;
-    }
-
-    /** SQL &gt; operator applied to BigDecimal values. */
-    public static boolean gt(BigDecimal b0, BigDecimal b1) {
-        return b0.compareTo(b1) > 0;
-    }
-
-    // >=
-
-    /** SQL &ge; operator applied to boolean values. */
-    public static boolean ge(boolean b0, boolean b1) {
-        return compare(b0, b1) >= 0;
-    }
-
-    /** SQL &ge; operator applied to String values. */
-    public static boolean ge(String b0, String b1) {
-        return b0.compareTo(b1) >= 0;
-    }
-
-    /** SQL &ge; operator applied to ByteString values. */
-    public static boolean ge(ByteString b0, ByteString b1) {
-        return b0.compareTo(b1) >= 0;
-    }
-
-    /** SQL &ge; operator applied to BigDecimal values. */
-    public static boolean ge(BigDecimal b0, BigDecimal b1) {
-        return b0.compareTo(b1) >= 0;
-    }
-
-    // +
-
-    /** SQL <code>+</code> operator applied to int values. */
-    public static int plus(int b0, int b1) {
-        return b0 + b1;
-    }
-
-    /** SQL <code>+</code> operator applied to int values; left side may be
-     * null. */
-    public static Integer plus(Integer b0, int b1) {
-        return b0 == null ? null : (b0 + b1);
-    }
-
-    /** SQL <code>+</code> operator applied to int values; right side may be
-     * null. */
-    public static Integer plus(int b0, Integer b1) {
-        return b1 == null ? null : (b0 + b1);
-    }
-
-    /** SQL <code>+</code> operator applied to nullable int values. */
-    public static Integer plus(Integer b0, Integer b1) {
-        return (b0 == null || b1 == null) ? null : (b0 + b1);
-    }
-
-    /** SQL <code>+</code> operator applied to nullable long and int values. */
-    public static Long plus(Long b0, Integer b1) {
-        return (b0 == null || b1 == null) ? null : (b0.longValue() + b1.longValue());
-    }
-
-    /** SQL <code>+</code> operator applied to nullable int and long values. */
-    public static Long plus(Integer b0, Long b1) {
-        return (b0 == null || b1 == null) ? null : (b0.longValue() + b1.longValue());
-    }
-
-    /** SQL <code>+</code> operator applied to BigDecimal values. */
-    public static BigDecimal plus(BigDecimal b0, BigDecimal b1) {
-        return (b0 == null || b1 == null) ? null : b0.add(b1);
-    }
-
-    // -
-
-    /** SQL <code>-</code> operator applied to int values. */
-    public static int minus(int b0, int b1) {
-        return b0 - b1;
-    }
-
-    /** SQL <code>-</code> operator applied to int values; left side may be
-     * null. */
-    public static Integer minus(Integer b0, int b1) {
-        return b0 == null ? null : (b0 - b1);
-    }
-
-    /** SQL <code>-</code> operator applied to int values; right side may be
-     * null. */
-    public static Integer minus(int b0, Integer b1) {
-        return b1 == null ? null : (b0 - b1);
-    }
-
-    /** SQL <code>-</code> operator applied to nullable int values. */
-    public static Integer minus(Integer b0, Integer b1) {
-        return (b0 == null || b1 == null) ? null : (b0 - b1);
-    }
-
-    /** SQL <code>-</code> operator applied to nullable long and int values. */
-    public static Long minus(Long b0, Integer b1) {
-        return (b0 == null || b1 == null) ? null : (b0.longValue() - b1.longValue());
-    }
-
-    /** SQL <code>-</code> operator applied to nullable int and long values. */
-    public static Long minus(Integer b0, Long b1) {
-        return (b0 == null || b1 == null) ? null : (b0.longValue() - b1.longValue());
-    }
-
-    /** SQL <code>-</code> operator applied to BigDecimal values. */
-    public static BigDecimal minus(BigDecimal b0, BigDecimal b1) {
-        return (b0 == null || b1 == null) ? null : b0.subtract(b1);
-    }
-
-    // /
-
-    /** SQL <code>/</code> operator applied to int values. */
-    public static int divide(int b0, int b1) {
-        return b0 / b1;
-    }
-
-    /** SQL <code>/</code> operator applied to int values; left side may be
-     * null. */
-    public static Integer divide(Integer b0, int b1) {
-        return b0 == null ? null : (b0 / b1);
-    }
-
-    /** SQL <code>/</code> operator applied to int values; right side may be
-     * null. */
-    public static Integer divide(int b0, Integer b1) {
-        return b1 == null ? null : (b0 / b1);
-    }
-
-    /** SQL <code>/</code> operator applied to nullable int values. */
-    public static Integer divide(Integer b0, Integer b1) {
-        return (b0 == null || b1 == null) ? null : (b0 / b1);
-    }
-
-    /** SQL <code>/</code> operator applied to nullable long and int values. */
-    public static Long divide(Long b0, Integer b1) {
-        return (b0 == null || b1 == null) ? null : (b0.longValue() / b1.longValue());
-    }
-
-    /** SQL <code>/</code> operator applied to nullable int and long values. */
-    public static Long divide(Integer b0, Long b1) {
-        return (b0 == null || b1 == null) ? null : (b0.longValue() / b1.longValue());
-    }
-
-    /** SQL <code>/</code> operator applied to BigDecimal values. */
-    public static BigDecimal divide(BigDecimal b0, BigDecimal b1) {
-        return (b0 == null || b1 == null) ? null : b0.divide(b1, MathContext.DECIMAL64);
-    }
-
-    // *
-
-    /** SQL <code>*</code> operator applied to int values. */
-    public static int multiply(int b0, int b1) {
-        return b0 * b1;
-    }
-
-    /** SQL <code>*</code> operator applied to int values; left side may be
-     * null. */
-    public static Integer multiply(Integer b0, int b1) {
-        return b0 == null ? null : (b0 * b1);
-    }
-
-    /** SQL <code>*</code> operator applied to int values; right side may be
-     * null. */
-    public static Integer multiply(int b0, Integer b1) {
-        return b1 == null ? null : (b0 * b1);
-    }
-
-    /** SQL <code>*</code> operator applied to nullable int values. */
-    public static Integer multiply(Integer b0, Integer b1) {
-        return (b0 == null || b1 == null) ? null : (b0 * b1);
-    }
-
-    /** SQL <code>*</code> operator applied to nullable long and int values. */
-    public static Long multiply(Long b0, Integer b1) {
-        return (b0 == null || b1 == null) ? null : (b0.longValue() * b1.longValue());
-    }
-
-    /** SQL <code>*</code> operator applied to nullable int and long values. */
-    public static Long multiply(Integer b0, Long b1) {
-        return (b0 == null || b1 == null) ? null : (b0.longValue() * b1.longValue());
-    }
-
-    /** SQL <code>*</code> operator applied to BigDecimal values. */
-    public static BigDecimal multiply(BigDecimal b0, BigDecimal b1) {
-        return (b0 == null || b1 == null) ? null : b0.multiply(b1);
-    }
-
-    // EXP
-
-    /** SQL <code>EXP</code> operator applied to double values. */
-    public static double exp(double b0) {
-        return Math.exp(b0);
-    }
-
-    public static double exp(long b0) {
-        return Math.exp(b0);
-    }
-
-    // POWER
-
-    /** SQL <code>POWER</code> operator applied to double values. */
-    public static double power(double b0, double b1) {
-        return Math.pow(b0, b1);
-    }
-
-    public static double power(long b0, long b1) {
-        return Math.pow(b0, b1);
-    }
-
-    public static double power(long b0, BigDecimal b1) {
-        return Math.pow(b0, b1.doubleValue());
-    }
-
-    // LN
-
-    /** SQL {@code LN(number)} function applied to double values. */
-    public static double ln(double d) {
-        return Math.log(d);
-    }
-
-    /** SQL {@code LN(number)} function applied to long values. */
-    public static double ln(long b0) {
-        return Math.log(b0);
-    }
-
-    /** SQL {@code LN(number)} function applied to BigDecimal values. */
-    public static double ln(BigDecimal d) {
-        return Math.log(d.doubleValue());
-    }
-
-    // LOG10
-
-    /** SQL <code>LOG10(numeric)</code> operator applied to double values. */
-    public static double log10(double b0) {
-        return Math.log10(b0);
-    }
-
-    /** SQL {@code LOG10(number)} function applied to long values. */
-    public static double log10(long b0) {
-        return Math.log10(b0);
-    }
-
-    /** SQL {@code LOG10(number)} function applied to BigDecimal values. */
-    public static double log10(BigDecimal d) {
-        return Math.log10(d.doubleValue());
-    }
-
-    // MOD
-
-    /** SQL <code>MOD</code> operator applied to byte values. */
-    public static byte mod(byte b0, byte b1) {
-        return (byte) (b0 % b1);
-    }
-
-    /** SQL <code>MOD</code> operator applied to short values. */
-    public static short mod(short b0, short b1) {
-        return (short) (b0 % b1);
-    }
-
-    /** SQL <code>MOD</code> operator applied to int values. */
-    public static int mod(int b0, int b1) {
-        return b0 % b1;
-    }
-
-    /** SQL <code>MOD</code> operator applied to long values. */
-    public static long mod(long b0, long b1) {
-        return b0 % b1;
-    }
-
-    // temporary
-    public static BigDecimal mod(BigDecimal b0, int b1) {
-        return mod(b0, BigDecimal.valueOf(b1));
-    }
-
-    // temporary
-    public static int mod(int b0, BigDecimal b1) {
-        return mod(b0, b1.intValue());
-    }
-
-    public static BigDecimal mod(BigDecimal b0, BigDecimal b1) {
-        final BigDecimal[] bigDecimals = b0.divideAndRemainder(b1);
-        return bigDecimals[1];
-    }
-
-    // FLOOR
-
-    public static double floor(double b0) {
-        return Math.floor(b0);
-    }
-
-    public static float floor(float b0) {
-        return (float) Math.floor(b0);
-    }
-
-    public static BigDecimal floor(BigDecimal b0) {
-        return b0.setScale(0, BigDecimal.ROUND_FLOOR);
-    }
-
-    /** SQL <code>FLOOR</code> operator applied to byte values. */
-    public static byte floor(byte b0, byte b1) {
-        return (byte) floor((int) b0, (int) b1);
-    }
-
-    /** SQL <code>FLOOR</code> operator applied to short values. */
-    public static short floor(short b0, short b1) {
-        return (short) floor((int) b0, (int) b1);
-    }
-
-    /** SQL <code>FLOOR</code> operator applied to int values. */
-    public static int floor(int b0, int b1) {
-        int r = b0 % b1;
-        if (r < 0) {
-            r += b1;
-        }
-        return b0 - r;
-    }
-
-    /** SQL <code>FLOOR</code> operator applied to long values. */
-    public static long floor(long b0, long b1) {
-        long r = b0 % b1;
-        if (r < 0) {
-            r += b1;
-        }
-        return b0 - r;
-    }
-
-    // temporary
-    public static BigDecimal floor(BigDecimal b0, int b1) {
-        return floor(b0, BigDecimal.valueOf(b1));
-    }
-
-    // temporary
-    public static int floor(int b0, BigDecimal b1) {
-        return floor(b0, b1.intValue());
-    }
-
-    public static BigDecimal floor(BigDecimal b0, BigDecimal b1) {
-        final BigDecimal[] bigDecimals = b0.divideAndRemainder(b1);
-        BigDecimal r = bigDecimals[1];
-        if (r.signum() < 0) {
-            r = r.add(b1);
-        }
-        return b0.subtract(r);
-    }
-
-    // CEIL
-
-    public static double ceil(double b0) {
-        return Math.ceil(b0);
-    }
-
-    public static float ceil(float b0) {
-        return (float) Math.ceil(b0);
-    }
-
-    public static BigDecimal ceil(BigDecimal b0) {
-        return b0.setScale(0, BigDecimal.ROUND_CEILING);
-    }
-
-    /** SQL <code>CEIL</code> operator applied to byte values. */
-    public static byte ceil(byte b0, byte b1) {
-        return floor((byte) (b0 + b1 - 1), b1);
-    }
-
-    /** SQL <code>CEIL</code> operator applied to short values. */
-    public static short ceil(short b0, short b1) {
-        return floor((short) (b0 + b1 - 1), b1);
-    }
-
-    /** SQL <code>CEIL</code> operator applied to int values. */
-    public static int ceil(int b0, int b1) {
-        int r = b0 % b1;
-        if (r > 0) {
-            r -= b1;
-        }
-        return b0 - r;
-    }
-
-    /** SQL <code>CEIL</code> operator applied to long values. */
-    public static long ceil(long b0, long b1) {
-        return floor(b0 + b1 - 1, b1);
-    }
-
-    // temporary
-    public static BigDecimal ceil(BigDecimal b0, int b1) {
-        return ceil(b0, BigDecimal.valueOf(b1));
-    }
-
-    // temporary
-    public static int ceil(int b0, BigDecimal b1) {
-        return ceil(b0, b1.intValue());
-    }
-
-    public static BigDecimal ceil(BigDecimal b0, BigDecimal b1) {
-        final BigDecimal[] bigDecimals = b0.divideAndRemainder(b1);
-        BigDecimal r = bigDecimals[1];
-        if (r.signum() > 0) {
-            r = r.subtract(b1);
-        }
-        return b0.subtract(r);
-    }
-
-    // ABS
-
-    /** SQL <code>ABS</code> operator applied to byte values. */
-    public static byte abs(byte b0) {
-        return (byte) Math.abs(b0);
-    }
-
-    /** SQL <code>ABS</code> operator applied to short values. */
-    public static short abs(short b0) {
-        return (short) Math.abs(b0);
-    }
-
-    /** SQL <code>ABS</code> operator applied to int values. */
-    public static int abs(int b0) {
-        return Math.abs(b0);
-    }
-
-    /** SQL <code>ABS</code> operator applied to long values. */
-    public static long abs(long b0) {
-        return Math.abs(b0);
-    }
-
-    /** SQL <code>ABS</code> operator applied to float values. */
-    public static float abs(float b0) {
-        return Math.abs(b0);
-    }
-
-    /** SQL <code>ABS</code> operator applied to double values. */
-    public static double abs(double b0) {
-        return Math.abs(b0);
-    }
-
-    /** SQL <code>ABS</code> operator applied to BigDecimal values. */
-    public static BigDecimal abs(BigDecimal b0) {
-        return b0.abs();
-    }
-
-    // Helpers
-
-    /** Helper for implementing MIN. Somewhat similar to LEAST operator. */
-    public static <T extends Comparable<T>> T lesser(T b0, T b1) {
-        return b0 == null || b0.compareTo(b1) > 0 ? b1 : b0;
-    }
-
-    /** LEAST operator. */
-    public static <T extends Comparable<T>> T least(T b0, T b1) {
-        return b0 == null || b1 != null && b0.compareTo(b1) > 0 ? b1 : b0;
-    }
-
-    public static boolean greater(boolean b0, boolean b1) {
-        return b0 || b1;
-    }
-
-    public static boolean lesser(boolean b0, boolean b1) {
-        return b0 && b1;
-    }
-
-    public static byte greater(byte b0, byte b1) {
-        return b0 > b1 ? b0 : b1;
-    }
-
-    public static byte lesser(byte b0, byte b1) {
-        return b0 > b1 ? b1 : b0;
-    }
-
-    public static char greater(char b0, char b1) {
-        return b0 > b1 ? b0 : b1;
-    }
-
-    public static char lesser(char b0, char b1) {
-        return b0 > b1 ? b1 : b0;
-    }
-
-    public static short greater(short b0, short b1) {
-        return b0 > b1 ? b0 : b1;
-    }
-
-    public static short lesser(short b0, short b1) {
-        return b0 > b1 ? b1 : b0;
-    }
-
-    public static int greater(int b0, int b1) {
-        return b0 > b1 ? b0 : b1;
-    }
-
-    public static int lesser(int b0, int b1) {
-        return b0 > b1 ? b1 : b0;
-    }
-
-    public static long greater(long b0, long b1) {
-        return b0 > b1 ? b0 : b1;
-    }
-
-    public static long lesser(long b0, long b1) {
-        return b0 > b1 ? b1 : b0;
-    }
-
-    public static float greater(float b0, float b1) {
-        return b0 > b1 ? b0 : b1;
-    }
-
-    public static float lesser(float b0, float b1) {
-        return b0 > b1 ? b1 : b0;
-    }
-
-    public static double greater(double b0, double b1) {
-        return b0 > b1 ? b0 : b1;
-    }
-
-    public static double lesser(double b0, double b1) {
-        return b0 > b1 ? b1 : b0;
-    }
-
-    /** Helper for implementing MAX. Somewhat similar to GREATEST operator. */
-    public static <T extends Comparable<T>> T greater(T b0, T b1) {
-        return b0 == null || b0.compareTo(b1) < 0 ? b1 : b0;
-    }
-
-    /** GREATEST operator. */
-    public static <T extends Comparable<T>> T greatest(T b0, T b1) {
-        return b0 == null || b1 != null && b0.compareTo(b1) < 0 ? b1 : b0;
-    }
-
-    /** Boolean comparison. */
-    public static int compare(boolean x, boolean y) {
-        return x == y ? 0 : x ? 1 : -1;
-    }
-
-    /** CAST(FLOAT AS VARCHAR). */
-    public static String toString(float x) {
-        if (x == 0) {
-            return "0E0";
-        }
-        BigDecimal bigDecimal = new BigDecimal(x, MathContext.DECIMAL32).stripTrailingZeros();
-        final String s = bigDecimal.toString();
-        return s.replaceAll("0*E", "E").replace("E+", "E");
-    }
-
-    /** CAST(DOUBLE AS VARCHAR). */
-    public static String toString(double x) {
-        if (x == 0) {
-            return "0E0";
-        }
-        BigDecimal bigDecimal = new BigDecimal(x, MathContext.DECIMAL64).stripTrailingZeros();
-        final String s = bigDecimal.toString();
-        return s.replaceAll("0*E", "E").replace("E+", "E");
-    }
-
-    /** CAST(DECIMAL AS VARCHAR). */
-    public static String toString(BigDecimal x) {
-        final String s = x.toString();
-        if (s.startsWith("0")) {
-            // we want ".1" not "0.1"
-            return s.substring(1);
-        } else if (s.startsWith("-0")) {
-            // we want "-.1" not "-0.1"
-            return "-" + s.substring(2);
-        } else {
-            return s;
-        }
-    }
-
-    /** CAST(BOOLEAN AS VARCHAR). */
-    public static String toString(boolean x) {
-        // Boolean.toString returns lower case -- no good.
-        return x ? "TRUE" : "FALSE";
-    }
-
-    @NonDeterministic
-    private static Object cannotConvert(Object o, Class toType) {
-        throw new RuntimeException("Cannot convert " + o + " to " + toType);
-    }
-
-    /** CAST(VARCHAR AS BOOLEAN). */
-    public static boolean toBoolean(String s) {
-        s = trim_(s, true, true, ' ');
-        if (s.equalsIgnoreCase("TRUE")) {
-            return true;
-        } else if (s.equalsIgnoreCase("FALSE")) {
-            return false;
-        } else {
-            throw new RuntimeException("Invalid character for cast");
-        }
-    }
-
-    public static boolean toBoolean(Number number) {
-        return !number.equals(0);
-    }
-
-    public static boolean toBoolean(Object o) {
-        return o instanceof Boolean ? (Boolean) o : o instanceof Number ? toBoolean((Number) o) : o instanceof String ? toBoolean((String) o) : (Boolean) cannotConvert(o, boolean.class);
-    }
-
-    // Don't need parseByte etc. - Byte.parseByte is sufficient.
-
-    public static byte toByte(Object o) {
-        return o instanceof Byte ? (Byte) o : o instanceof Number ? toByte((Number) o) : Byte.parseByte(o.toString());
-    }
-
-    public static byte toByte(Number number) {
-        return number.byteValue();
-    }
-
-    public static char toChar(String s) {
-        return s.charAt(0);
-    }
-
-    public static Character toCharBoxed(String s) {
-        return s.charAt(0);
-    }
-
-    public static short toShort(String s) {
-        return Short.parseShort(s.trim());
-    }
-
-    public static short toShort(Number number) {
-        return number.shortValue();
-    }
-
-    public static short toShort(Object o) {
-        return o instanceof Short ? (Short) o : o instanceof Number ? toShort((Number) o) : o instanceof String ? toShort((String) o) : (Short) cannotConvert(o, short.class);
-    }
-
-    public static int toInt(java.util.Date v) {
-        return toInt(v, LOCAL_TZ);
-    }
-
-    public static int toInt(java.util.Date v, TimeZone timeZone) {
-        return (int) (toLong(v, timeZone) / DateTimeUtils.MILLIS_PER_DAY);
-    }
-
-    public static Integer toIntOptional(java.util.Date v) {
-        return v == null ? null : toInt(v);
-    }
-
-    public static Integer toIntOptional(java.util.Date v, TimeZone timeZone) {
-        return v == null ? null : toInt(v, timeZone);
-    }
-
-    public static long toLong(Date v) {
-        return toLong(v, LOCAL_TZ);
-    }
-
-    public static int toInt(java.sql.Time v) {
-        return (int) (toLong(v) % DateTimeUtils.MILLIS_PER_DAY);
-    }
-
-    public static Integer toIntOptional(java.sql.Time v) {
-        return v == null ? null : toInt(v);
-    }
-
-    public static int toInt(String s) {
-        return Integer.parseInt(s.trim());
-    }
-
-    public static int toInt(Number number) {
-        return number.intValue();
-    }
-
-    public static int toInt(Object o) {
-        return o instanceof Integer ? (Integer) o : o instanceof Number ? toInt((Number) o) : o instanceof String ? toInt((String) o) : (Integer) cannotConvert(o, int.class);
-    }
-
-    public static long toLong(Timestamp v) {
-        return toLong(v, LOCAL_TZ);
-    }
-
-    // mainly intended for java.sql.Timestamp but works for other dates also
-    public static long toLong(java.util.Date v, TimeZone timeZone) {
-        final long time = v.getTime();
-        return time + timeZone.getOffset(time);
-    }
-
-    // mainly intended for java.sql.Timestamp but works for other dates also
-    public static Long toLongOptional(java.util.Date v) {
-        return v == null ? null : toLong(v, LOCAL_TZ);
-    }
-
-    public static Long toLongOptional(Timestamp v, TimeZone timeZone) {
-        if (v == null) {
-            return null;
-        }
-        return toLong(v, LOCAL_TZ);
-    }
-
-    public static long toLong(String s) {
-        if (s.startsWith("199") && s.contains(":")) {
-            return Timestamp.valueOf(s).getTime();
-        }
-        return Long.parseLong(s.trim());
-    }
-
-    public static long toLong(Number number) {
-        return number.longValue();
-    }
-
-    public static long toLong(Object o) {
-        return o instanceof Long ? (Long) o : o instanceof Number ? toLong((Number) o) : o instanceof String ? toLong((String) o) : (Long) cannotConvert(o, long.class);
-    }
-
-    public static float toFloat(String s) {
-        return Float.parseFloat(s.trim());
-    }
-
-    public static float toFloat(Number number) {
-        return number.floatValue();
-    }
-
-    public static float toFloat(Object o) {
-        return o instanceof Float ? (Float) o : o instanceof Number ? toFloat((Number) o) : o instanceof String ? toFloat((String) o) : (Float) cannotConvert(o, float.class);
-    }
-
-    public static double toDouble(String s) {
-        return Double.parseDouble(s.trim());
-    }
-
-    public static double toDouble(Number number) {
-        return number.doubleValue();
-    }
-
-    public static double toDouble(Object o) {
-        return o instanceof Double ? (Double) o : o instanceof Number ? toDouble((Number) o) : o instanceof String ? toDouble((String) o) : (Double) cannotConvert(o, double.class);
-    }
-
-    public static BigDecimal toBigDecimal(String s) {
-        return new BigDecimal(s.trim());
-    }
-
-    public static BigDecimal toBigDecimal(Number number) {
-        // There are some values of "long" that cannot be represented as "double".
-        // Not so "int". If it isn't a long, go straight to double.
-        return number instanceof BigDecimal ? (BigDecimal) number : number instanceof BigInteger ? new BigDecimal((BigInteger) number) : number instanceof Long ? new BigDecimal(number.longValue()) : new BigDecimal(number.doubleValue());
-    }
-
-    public static BigDecimal toBigDecimal(Object o) {
-        return o instanceof Number ? toBigDecimal((Number) o) : toBigDecimal(o.toString());
-    }
-
-    // Don't need shortValueOf etc. - Short.valueOf is sufficient.
-
-    /** Helper for CAST(... AS VARCHAR(maxLength)). */
-    public static String truncate(String s, int maxLength) {
-        return s == null ? null : s.length() > maxLength ? s.substring(0, maxLength) : s;
-    }
-
-    /** Helper for CAST(... AS VARBINARY(maxLength)). */
-    public static ByteString truncate(ByteString s, int maxLength) {
-        return s == null ? null : s.length() > maxLength ? s.substring(0, maxLength) : s;
-    }
-
-    /** SQL {@code POSITION(seek IN string)} function. */
-    public static int position(String seek, String s) {
-        return s.indexOf(seek) + 1;
-    }
-
-    /** SQL {@code POSITION(seek IN string)} function. */
-    public static int position(ByteString seek, ByteString s) {
-        return s.indexOf(seek) + 1;
-    }
-
-    /** Helper for rounding. Truncate(12345, 1000) returns 12000. */
-    public static long round(long v, long x) {
-        return truncate(v + x / 2, x);
-    }
-
-    /** Helper for rounding. Truncate(12345, 1000) returns 12000. */
-    public static long truncate(long v, long x) {
-        long remainder = v % x;
-        if (remainder < 0) {
-            remainder += x;
-        }
-        return v - remainder;
-    }
-
-    /** Helper for rounding. Truncate(12345, 1000) returns 12000. */
-    public static int round(int v, int x) {
-        return truncate(v + x / 2, x);
-    }
-
-    /** Helper for rounding. Truncate(12345, 1000) returns 12000. */
-    public static int truncate(int v, int x) {
-        int remainder = v % x;
-        if (remainder < 0) {
-            remainder += x;
-        }
-        return v - remainder;
-    }
-
-    /** SQL {@code CURRENT_TIMESTAMP} function. */
-    @NonDeterministic
-    public static long currentTimestamp(DataContext root) {
-        // Cast required for JDK 1.6.
-        return (Long) DataContext.Variable.CURRENT_TIMESTAMP.get(root);
-    }
-
-    /** SQL {@code CURRENT_TIME} function. */
-    @NonDeterministic
-    public static int currentTime(DataContext root) {
-        int time = (int) (currentTimestamp(root) % DateTimeUtils.MILLIS_PER_DAY);
-        if (time < 0) {
-            time += DateTimeUtils.MILLIS_PER_DAY;
-        }
-        return time;
-    }
-
-    /** SQL {@code CURRENT_DATE} function. */
-    @NonDeterministic
-    public static int currentDate(DataContext root) {
-        final long timestamp = currentTimestamp(root);
-        int date = (int) (timestamp / DateTimeUtils.MILLIS_PER_DAY);
-        final int time = (int) (timestamp % DateTimeUtils.MILLIS_PER_DAY);
-        if (time < 0) {
-            --date;
-        }
-        return date;
-    }
-
-    /** SQL {@code LOCAL_TIMESTAMP} function. */
-    @NonDeterministic
-    public static long localTimestamp(DataContext root) {
-        // Cast required for JDK 1.6.
-        return (Long) DataContext.Variable.LOCAL_TIMESTAMP.get(root);
-    }
-
-    /** SQL {@code LOCAL_TIME} function. */
-    @NonDeterministic
-    public static int localTime(DataContext root) {
-        return (int) (localTimestamp(root) % DateTimeUtils.MILLIS_PER_DAY);
-    }
-
-    /** Helper for "array element reference". Caller has already ensured that
-     * array and index are not null. Index is 1-based, per SQL. */
-    public static Object arrayItem(List list, int item) {
-        if (item < 1 || item > list.size()) {
-            return null;
-        }
-        return list.get(item - 1);
-    }
-
-    /** Helper for "map element reference". Caller has already ensured that
-     * array and index are not null. Index is 1-based, per SQL. */
-    public static Object mapItem(Map map, Object item) {
-        return map.get(item);
-    }
-
-    /** Implements the {@code [ ... ]} operator on an object whose type is not
-     * known until runtime.
-     */
-    public static Object item(Object object, Object index) {
-        if (object instanceof Map) {
-            return ((Map) object).get(index);
-        }
-        if (object instanceof List && index instanceof Number) {
-            List list = (List) object;
-            return list.get(((Number) index).intValue());
-        }
-        return null;
-    }
-
-    /** NULL &rarr; FALSE, FALSE &rarr; FALSE, TRUE &rarr; TRUE. */
-    public static boolean isTrue(Boolean b) {
-        return b != null && b;
-    }
-
-    /** NULL &rarr; TRUE, FALSE &rarr; FALSE, TRUE &rarr; TRUE. */
-    public static boolean isNotFalse(Boolean b) {
-        return b == null || b;
-    }
-
-    /** NULL &rarr; NULL, FALSE &rarr; TRUE, TRUE &rarr; FALSE. */
-    public static Boolean not(Boolean b) {
-        return (b == null) ? null : !b;
-    }
-
-    /** Converts a JDBC array to a list. */
-    public static List arrayToList(final java.sql.Array a) {
-        if (a == null) {
-            return null;
-        }
-        try {
-            return Primitive.asList(a.getArray());
-        } catch (SQLException e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    /** Support the {@code CURRENT VALUE OF sequence} operator. */
-    @NonDeterministic
-    public static long sequenceCurrentValue(String key) {
-        return getAtomicLong(key).get();
-    }
-
-    /** Support the {@code NEXT VALUE OF sequence} operator. */
-    @NonDeterministic
-    public static long sequenceNextValue(String key) {
-        return getAtomicLong(key).incrementAndGet();
-    }
-
-    private static AtomicLong getAtomicLong(String key) {
-        final Map<String, AtomicLong> map = THREAD_SEQUENCES.get();
-        AtomicLong atomic = map.get(key);
-        if (atomic == null) {
-            atomic = new AtomicLong();
-            map.put(key, atomic);
-        }
-        return atomic;
-    }
-
-    /** Support the SLICE function. */
-    public static List slice(List list) {
-        return list;
-    }
-
-    /** Support the ELEMENT function. */
-    public static Object element(List list) {
-        switch (list.size()) {
-        case 0:
-            return null;
-        case 1:
-            return list.get(0);
-        default:
-            throw new RuntimeException("more than one value");
-        }
-    }
-
-    /** Returns a lambda that converts a list to an enumerable. */
-    public static <E> Function1<List<E>, Enumerable<E>> listToEnumerable() {
-        //noinspection unchecked
-        return (Function1<List<E>, Enumerable<E>>) (Function1) LIST_AS_ENUMERABLE;
-    }
-
-}
-
-// End SqlFunctions.java

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/atopcalcite/src/main/java/org/apache/calcite/sql2rel/RelFieldTrimmer.java
----------------------------------------------------------------------
diff --git a/atopcalcite/src/main/java/org/apache/calcite/sql2rel/RelFieldTrimmer.java b/atopcalcite/src/main/java/org/apache/calcite/sql2rel/RelFieldTrimmer.java
new file mode 100644
index 0000000..f88157c
--- /dev/null
+++ b/atopcalcite/src/main/java/org/apache/calcite/sql2rel/RelFieldTrimmer.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.calcite.sql2rel;
+
+import org.apache.calcite.plan.RelOptCluster;
+import org.apache.calcite.rel.RelNode;
+import org.apache.calcite.rel.core.RelFactories;
+import org.apache.calcite.sql.validate.SqlValidator;
+import org.apache.calcite.tools.RelBuilder;
+
+/*
+ * OVERRIDE POINT:
+ * - disable the whole RelFieldTrimmer
+ */
+
+public class RelFieldTrimmer {
+
+    public RelFieldTrimmer(SqlValidator validator, RelBuilder relBuilder) {
+    }
+
+    public RelFieldTrimmer(SqlValidator validator, RelOptCluster cluster, RelFactories.ProjectFactory projectFactory, RelFactories.FilterFactory filterFactory, RelFactories.JoinFactory joinFactory, RelFactories.SemiJoinFactory semiJoinFactory, RelFactories.SortFactory sortFactory, RelFactories.AggregateFactory aggregateFactory, RelFactories.SetOpFactory setOpFactory) {
+    }
+
+    public RelNode trim(RelNode rootRel) {
+        return rootRel;
+    }
+
+}


[50/50] [abbrv] kylin git commit: Merge branch 'master' into 2.x-staging

Posted by li...@apache.org.
Merge branch 'master' into 2.x-staging


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/4e48f2ae
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/4e48f2ae
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/4e48f2ae

Branch: refs/heads/master
Commit: 4e48f2ae63b66ebb04147f966fb72008bb939dff
Parents: c4be546 13e6a43
Author: Li Yang <li...@apache.org>
Authored: Thu Mar 10 15:42:41 2016 +0800
Committer: Li Yang <li...@apache.org>
Committed: Thu Mar 10 15:42:41 2016 +0800

----------------------------------------------------------------------

----------------------------------------------------------------------



[45/50] [abbrv] kylin git commit: KYLIN-1343 Upgrade to calcite 1.6 (with Edward Zhang)

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/atopcalcite/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java
----------------------------------------------------------------------
diff --git a/atopcalcite/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java b/atopcalcite/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java
index c184f0b..aed7c27 100644
--- a/atopcalcite/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java
+++ b/atopcalcite/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java
@@ -18,6 +18,7 @@
 
 package org.apache.calcite.sql2rel;
 
+import org.apache.calcite.avatica.util.Spaces;
 import org.apache.calcite.linq4j.Ord;
 import org.apache.calcite.plan.Convention;
 import org.apache.calcite.plan.RelOptCluster;
@@ -25,26 +26,32 @@ import org.apache.calcite.plan.RelOptPlanner;
 import org.apache.calcite.plan.RelOptSamplingParameters;
 import org.apache.calcite.plan.RelOptTable;
 import org.apache.calcite.plan.RelOptUtil;
+import org.apache.calcite.plan.RelTraitSet;
 import org.apache.calcite.prepare.Prepare;
 import org.apache.calcite.prepare.RelOptTableImpl;
 import org.apache.calcite.rel.RelCollation;
+import org.apache.calcite.rel.RelCollationTraitDef;
 import org.apache.calcite.rel.RelCollations;
 import org.apache.calcite.rel.RelFieldCollation;
 import org.apache.calcite.rel.RelNode;
-import org.apache.calcite.rel.RelShuttle;
+import org.apache.calcite.rel.RelRoot;
+import org.apache.calcite.rel.SingleRel;
 import org.apache.calcite.rel.core.Aggregate;
 import org.apache.calcite.rel.core.AggregateCall;
 import org.apache.calcite.rel.core.Collect;
 import org.apache.calcite.rel.core.CorrelationId;
+import org.apache.calcite.rel.core.Filter;
 import org.apache.calcite.rel.core.Join;
 import org.apache.calcite.rel.core.JoinInfo;
 import org.apache.calcite.rel.core.JoinRelType;
 import org.apache.calcite.rel.core.Project;
 import org.apache.calcite.rel.core.RelFactories;
 import org.apache.calcite.rel.core.Sample;
+import org.apache.calcite.rel.core.Sort;
 import org.apache.calcite.rel.core.Uncollect;
 import org.apache.calcite.rel.logical.LogicalAggregate;
 import org.apache.calcite.rel.logical.LogicalCorrelate;
+import org.apache.calcite.rel.logical.LogicalFilter;
 import org.apache.calcite.rel.logical.LogicalIntersect;
 import org.apache.calcite.rel.logical.LogicalJoin;
 import org.apache.calcite.rel.logical.LogicalMinus;
@@ -56,6 +63,7 @@ import org.apache.calcite.rel.logical.LogicalTableScan;
 import org.apache.calcite.rel.logical.LogicalUnion;
 import org.apache.calcite.rel.logical.LogicalValues;
 import org.apache.calcite.rel.metadata.RelColumnMapping;
+import org.apache.calcite.rel.stream.Delta;
 import org.apache.calcite.rel.stream.LogicalDelta;
 import org.apache.calcite.rel.type.RelDataType;
 import org.apache.calcite.rel.type.RelDataTypeFactory;
@@ -72,8 +80,8 @@ import org.apache.calcite.rex.RexLiteral;
 import org.apache.calcite.rex.RexNode;
 import org.apache.calcite.rex.RexRangeRef;
 import org.apache.calcite.rex.RexShuttle;
+import org.apache.calcite.rex.RexSubQuery;
 import org.apache.calcite.rex.RexUtil;
-import org.apache.calcite.rex.RexVisitorImpl;
 import org.apache.calcite.rex.RexWindowBound;
 import org.apache.calcite.schema.ModifiableTable;
 import org.apache.calcite.schema.ModifiableView;
@@ -85,6 +93,7 @@ import org.apache.calcite.sql.SemiJoinType;
 import org.apache.calcite.sql.SqlAggFunction;
 import org.apache.calcite.sql.SqlBasicCall;
 import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlCallBinding;
 import org.apache.calcite.sql.SqlDataTypeSpec;
 import org.apache.calcite.sql.SqlDelete;
 import org.apache.calcite.sql.SqlDynamicParam;
@@ -102,10 +111,12 @@ import org.apache.calcite.sql.SqlNodeList;
 import org.apache.calcite.sql.SqlNumericLiteral;
 import org.apache.calcite.sql.SqlOperator;
 import org.apache.calcite.sql.SqlOperatorTable;
+import org.apache.calcite.sql.SqlOrderBy;
 import org.apache.calcite.sql.SqlSampleSpec;
 import org.apache.calcite.sql.SqlSelect;
 import org.apache.calcite.sql.SqlSelectKeyword;
 import org.apache.calcite.sql.SqlSetOperator;
+import org.apache.calcite.sql.SqlUnnestOperator;
 import org.apache.calcite.sql.SqlUpdate;
 import org.apache.calcite.sql.SqlUtil;
 import org.apache.calcite.sql.SqlValuesOperator;
@@ -138,8 +149,10 @@ import org.apache.calcite.sql.validate.SqlValidatorImpl;
 import org.apache.calcite.sql.validate.SqlValidatorNamespace;
 import org.apache.calcite.sql.validate.SqlValidatorScope;
 import org.apache.calcite.sql.validate.SqlValidatorUtil;
+import org.apache.calcite.tools.RelBuilder;
 import org.apache.calcite.util.ImmutableBitSet;
 import org.apache.calcite.util.ImmutableIntList;
+import org.apache.calcite.util.Litmus;
 import org.apache.calcite.util.NlsString;
 import org.apache.calcite.util.NumberUtil;
 import org.apache.calcite.util.Pair;
@@ -158,15 +171,16 @@ import com.google.common.collect.Sets;
 import java.lang.reflect.Type;
 import java.math.BigDecimal;
 import java.util.AbstractList;
+import java.util.ArrayDeque;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
+import java.util.Deque;
 import java.util.EnumSet;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.Stack;
 import java.util.TreeSet;
 import java.util.logging.Level;
 import java.util.logging.Logger;
@@ -178,7 +192,7 @@ import static org.apache.calcite.util.Static.RESOURCE;
  * OVERRIDE POINT:
  * - getInSubqueryThreshold(), was `20`, now `Integer.MAX_VALUE`
  * - isTrimUnusedFields(), override to false
- * - AggConverter.visit(SqlCall), skip column reading for COUNT(COL), for https://jirap.corp.ebay.com/browse/KYLIN-104
+ * - AggConverter.translateAgg(...), skip column reading for COUNT(COL), for https://jirap.corp.ebay.com/browse/KYLIN-104
  */
 
 /**
@@ -190,3851 +204,4825 @@ import static org.apache.calcite.util.Static.RESOURCE;
  * {@link #convertExpression(SqlNode)}.
  */
 public class SqlToRelConverter {
-    //~ Static fields/initializers ---------------------------------------------
-
-    protected static final Logger SQL2REL_LOGGER = CalciteTrace.getSqlToRelTracer();
-
-    private static final BigDecimal TWO = BigDecimal.valueOf(2L);
-
-    //~ Instance fields --------------------------------------------------------
-
-    protected final SqlValidator validator;
-    protected final RexBuilder rexBuilder;
-    protected final Prepare.CatalogReader catalogReader;
-    protected final RelOptCluster cluster;
-    private DefaultValueFactory defaultValueFactory;
-    private SubqueryConverter subqueryConverter;
-    protected final List<RelNode> leaves = new ArrayList<>();
-    private final List<SqlDynamicParam> dynamicParamSqlNodes = new ArrayList<>();
-    private final SqlOperatorTable opTab;
-    private boolean shouldConvertTableAccess;
-    protected final RelDataTypeFactory typeFactory;
-    private final SqlNodeToRexConverter exprConverter;
-    private boolean decorrelationEnabled;
-    private boolean trimUnusedFields;
-    private boolean shouldCreateValuesRel;
-    private boolean isExplain;
-    private int nDynamicParamsInExplain;
-
-    /**
-     * Fields used in name resolution for correlated subqueries.
-     */
-    private final Map<String, DeferredLookup> mapCorrelToDeferred = new HashMap<>();
-    private int nextCorrel = 0;
-
-    private static final String CORREL_PREFIX = "$cor";
-
-    /**
-     * Stack of names of datasets requested by the <code>
-     * TABLE(SAMPLE(&lt;datasetName&gt;, &lt;query&gt;))</code> construct.
-     */
-    private final Stack<String> datasetStack = new Stack<>();
-
-    /**
-     * Mapping of non-correlated subqueries that have been converted to their
-     * equivalent constants. Used to avoid re-evaluating the subquery if it's
-     * already been evaluated.
-     */
-    private final Map<SqlNode, RexNode> mapConvertedNonCorrSubqs = new HashMap<>();
-
-    public final RelOptTable.ViewExpander viewExpander;
-
-    //~ Constructors -----------------------------------------------------------
-    /**
-     * Creates a converter.
-     *
-     * @param viewExpander    Preparing statement
-     * @param validator       Validator
-     * @param catalogReader   Schema
-     * @param planner         Planner
-     * @param rexBuilder      Rex builder
-     * @param convertletTable Expression converter
-     */
-    @Deprecated
-    // will be removed before 2.0
-    public SqlToRelConverter(RelOptTable.ViewExpander viewExpander, SqlValidator validator, Prepare.CatalogReader catalogReader, RelOptPlanner planner, RexBuilder rexBuilder, SqlRexConvertletTable convertletTable) {
-        this(viewExpander, validator, catalogReader, RelOptCluster.create(planner, rexBuilder), convertletTable);
-    }
-
-    /* Creates a converter. */
-    public SqlToRelConverter(RelOptTable.ViewExpander viewExpander, SqlValidator validator, Prepare.CatalogReader catalogReader, RelOptCluster cluster, SqlRexConvertletTable convertletTable) {
-        this.viewExpander = viewExpander;
-        this.opTab = (validator == null) ? SqlStdOperatorTable.instance() : validator.getOperatorTable();
-        this.validator = validator;
-        this.catalogReader = catalogReader;
-        this.defaultValueFactory = new NullDefaultValueFactory();
-        this.subqueryConverter = new NoOpSubqueryConverter();
-        this.rexBuilder = cluster.getRexBuilder();
-        this.typeFactory = rexBuilder.getTypeFactory();
-        this.cluster = Preconditions.checkNotNull(cluster);
-        this.shouldConvertTableAccess = true;
-        this.exprConverter = new SqlNodeToRexConverterImpl(convertletTable);
-        decorrelationEnabled = true;
-        trimUnusedFields = false;
-        shouldCreateValuesRel = true;
-        isExplain = false;
-        nDynamicParamsInExplain = 0;
-    }
-
-    //~ Methods ----------------------------------------------------------------
+  //~ Static fields/initializers ---------------------------------------------
+
+  protected static final Logger SQL2REL_LOGGER =
+      CalciteTrace.getSqlToRelTracer();
+
+  private static final BigDecimal TWO = BigDecimal.valueOf(2L);
+
+  /** Size of the smallest IN list that will be converted to a semijoin to a
+   * static table. */
+  public static final int IN_SUBQUERY_THRESHOLD = 20;
+
+  //~ Instance fields --------------------------------------------------------
+
+  protected final SqlValidator validator;
+  protected final RexBuilder rexBuilder;
+  protected final Prepare.CatalogReader catalogReader;
+  protected final RelOptCluster cluster;
+  private DefaultValueFactory defaultValueFactory;
+  private SubqueryConverter subqueryConverter;
+  protected final List<RelNode> leaves = new ArrayList<>();
+  private final List<SqlDynamicParam> dynamicParamSqlNodes = new ArrayList<>();
+  private final SqlOperatorTable opTab;
+  private boolean shouldConvertTableAccess;
+  protected final RelDataTypeFactory typeFactory;
+  private final SqlNodeToRexConverter exprConverter;
+  private boolean decorrelationEnabled;
+  private boolean trimUnusedFields;
+  private boolean shouldCreateValuesRel;
+  private boolean isExplain;
+  private int nDynamicParamsInExplain;
+
+  /**
+   * Fields used in name resolution for correlated subqueries.
+   */
+  private final Map<CorrelationId, DeferredLookup> mapCorrelToDeferred =
+      new HashMap<>();
+
+  /**
+   * Stack of names of datasets requested by the <code>
+   * TABLE(SAMPLE(&lt;datasetName&gt;, &lt;query&gt;))</code> construct.
+   */
+  private final Deque<String> datasetStack = new ArrayDeque<>();
+
+  /**
+   * Mapping of non-correlated subqueries that have been converted to their
+   * equivalent constants. Used to avoid re-evaluating the subquery if it's
+   * already been evaluated.
+   */
+  private final Map<SqlNode, RexNode> mapConvertedNonCorrSubqs =
+      new HashMap<>();
+
+  public final RelOptTable.ViewExpander viewExpander;
+
+  /** Whether to expand sub-queries. If false, each sub-query becomes a
+   * {@link org.apache.calcite.rex.RexSubQuery}. */
+  private boolean expand = true;
+
+  //~ Constructors -----------------------------------------------------------
+  /**
+   * Creates a converter.
+   *
+   * @param viewExpander    Preparing statement
+   * @param validator       Validator
+   * @param catalogReader   Schema
+   * @param planner         Planner
+   * @param rexBuilder      Rex builder
+   * @param convertletTable Expression converter
+   */
+  @Deprecated // will be removed before 2.0
+  public SqlToRelConverter(
+      RelOptTable.ViewExpander viewExpander,
+      SqlValidator validator,
+      Prepare.CatalogReader catalogReader,
+      RelOptPlanner planner,
+      RexBuilder rexBuilder,
+      SqlRexConvertletTable convertletTable) {
+    this(viewExpander, validator, catalogReader,
+        RelOptCluster.create(planner, rexBuilder), convertletTable);
+  }
+
+  /* Creates a converter. */
+  public SqlToRelConverter(
+      RelOptTable.ViewExpander viewExpander,
+      SqlValidator validator,
+      Prepare.CatalogReader catalogReader,
+      RelOptCluster cluster,
+      SqlRexConvertletTable convertletTable) {
+    this.viewExpander = viewExpander;
+    this.opTab =
+        (validator
+            == null) ? SqlStdOperatorTable.instance()
+            : validator.getOperatorTable();
+    this.validator = validator;
+    this.catalogReader = catalogReader;
+    this.defaultValueFactory = new NullDefaultValueFactory();
+    this.subqueryConverter = new NoOpSubqueryConverter();
+    this.rexBuilder = cluster.getRexBuilder();
+    this.typeFactory = rexBuilder.getTypeFactory();
+    this.cluster = Preconditions.checkNotNull(cluster);
+    this.shouldConvertTableAccess = true;
+    this.exprConverter =
+        new SqlNodeToRexConverterImpl(convertletTable);
+    decorrelationEnabled = true;
+    trimUnusedFields = false;
+    shouldCreateValuesRel = true;
+    isExplain = false;
+    nDynamicParamsInExplain = 0;
+  }
+
+  //~ Methods ----------------------------------------------------------------
+
+  /**
+   * @return the RelOptCluster in use.
+   */
+  public RelOptCluster getCluster() {
+    return cluster;
+  }
+
+  /**
+   * Returns the row-expression builder.
+   */
+  public RexBuilder getRexBuilder() {
+    return rexBuilder;
+  }
+
+  /**
+   * Returns the number of dynamic parameters encountered during translation;
+   * this must only be called after {@link #convertQuery}.
+   *
+   * @return number of dynamic parameters
+   */
+  public int getDynamicParamCount() {
+    return dynamicParamSqlNodes.size();
+  }
+
+  /**
+   * Returns the type inferred for a dynamic parameter.
+   *
+   * @param index 0-based index of dynamic parameter
+   * @return inferred type, never null
+   */
+  public RelDataType getDynamicParamType(int index) {
+    SqlNode sqlNode = dynamicParamSqlNodes.get(index);
+    if (sqlNode == null) {
+      throw Util.needToImplement("dynamic param type inference");
+    }
+    return validator.getValidatedNodeType(sqlNode);
+  }
+
+  /**
+   * Returns the current count of the number of dynamic parameters in an
+   * EXPLAIN PLAN statement.
+   *
+   * @param increment if true, increment the count
+   * @return the current count before the optional increment
+   */
+  public int getDynamicParamCountInExplain(boolean increment) {
+    int retVal = nDynamicParamsInExplain;
+    if (increment) {
+      ++nDynamicParamsInExplain;
+    }
+    return retVal;
+  }
+
+  /**
+   * @return mapping of non-correlated subqueries that have been converted to
+   * the constants that they evaluate to
+   */
+  public Map<SqlNode, RexNode> getMapConvertedNonCorrSubqs() {
+    return mapConvertedNonCorrSubqs;
+  }
+
+  /**
+   * Adds to the current map of non-correlated converted subqueries the
+   * elements from another map that contains non-correlated subqueries that
+   * have been converted by another SqlToRelConverter.
+   *
+   * @param alreadyConvertedNonCorrSubqs the other map
+   */
+  public void addConvertedNonCorrSubqs(
+      Map<SqlNode, RexNode> alreadyConvertedNonCorrSubqs) {
+    mapConvertedNonCorrSubqs.putAll(alreadyConvertedNonCorrSubqs);
+  }
+
+  /**
+   * Set a new DefaultValueFactory. To have any effect, this must be called
+   * before any convert method.
+   *
+   * @param factory new DefaultValueFactory
+   */
+  public void setDefaultValueFactory(DefaultValueFactory factory) {
+    defaultValueFactory = factory;
+  }
+
+  /**
+   * Sets a new SubqueryConverter. To have any effect, this must be called
+   * before any convert method.
+   *
+   * @param converter new SubqueryConverter
+   */
+  public void setSubqueryConverter(SubqueryConverter converter) {
+    subqueryConverter = converter;
+  }
+
+  /**
+   * Indicates that the current statement is part of an EXPLAIN PLAN statement
+   *
+   * @param nDynamicParams number of dynamic parameters in the statement
+   */
+  public void setIsExplain(int nDynamicParams) {
+    isExplain = true;
+    nDynamicParamsInExplain = nDynamicParams;
+  }
+
+  /**
+   * Controls whether table access references are converted to physical rels
+   * immediately. The optimizer doesn't like leaf rels to have
+   * {@link Convention#NONE}. However, if we are doing further conversion
+   * passes (e.g. {@link RelStructuredTypeFlattener}), then we may need to
+   * defer conversion. To have any effect, this must be called before any
+   * convert method.
+   *
+   * @param enabled true for immediate conversion (the default); false to
+   *                generate logical LogicalTableScan instances
+   */
+  public void enableTableAccessConversion(boolean enabled) {
+    shouldConvertTableAccess = enabled;
+  }
+
+  /**
+   * Controls whether instances of
+   * {@link org.apache.calcite.rel.logical.LogicalValues} are generated. These
+   * may not be supported by all physical implementations. To have any effect,
+   * this must be called before any convert method.
+   *
+   * @param enabled true to allow LogicalValues to be generated (the default);
+   *                false to force substitution of Project+OneRow instead
+   */
+  public void enableValuesRelCreation(boolean enabled) {
+    shouldCreateValuesRel = enabled;
+  }
+
+  private void checkConvertedType(SqlNode query, RelNode result) {
+    if (query.isA(SqlKind.DML)) {
+      return;
+    }
+    // Verify that conversion from SQL to relational algebra did
+    // not perturb any type information.  (We can't do this if the
+    // SQL statement is something like an INSERT which has no
+    // validator type information associated with its result,
+    // hence the namespace check above.)
+    final List<RelDataTypeField> validatedFields =
+        validator.getValidatedNodeType(query).getFieldList();
+    final RelDataType validatedRowType =
+        validator.getTypeFactory().createStructType(
+            Pair.right(validatedFields),
+            SqlValidatorUtil.uniquify(Pair.left(validatedFields)));
+
+    final List<RelDataTypeField> convertedFields =
+        result.getRowType().getFieldList().subList(0, validatedFields.size());
+    final RelDataType convertedRowType =
+        validator.getTypeFactory().createStructType(convertedFields);
+
+    if (!RelOptUtil.equal("validated row type", validatedRowType,
+        "converted row type", convertedRowType, Litmus.IGNORE)) {
+      throw new AssertionError("Conversion to relational algebra failed to "
+          + "preserve datatypes:\n"
+          + "validated type:\n"
+          + validatedRowType.getFullTypeString()
+          + "\nconverted type:\n"
+          + convertedRowType.getFullTypeString()
+          + "\nrel:\n"
+          + RelOptUtil.toString(result));
+    }
+  }
+
+  public RelNode flattenTypes(
+      RelNode rootRel,
+      boolean restructure) {
+    RelStructuredTypeFlattener typeFlattener =
+        new RelStructuredTypeFlattener(rexBuilder, createToRelContext());
+    return typeFlattener.rewrite(rootRel, restructure);
+  }
+
+  /**
+   * If subquery is correlated and decorrelation is enabled, performs
+   * decorrelation.
+   *
+   * @param query   Query
+   * @param rootRel Root relational expression
+   * @return New root relational expression after decorrelation
+   */
+  public RelNode decorrelate(SqlNode query, RelNode rootRel) {
+    if (!enableDecorrelation()) {
+      return rootRel;
+    }
+    final RelNode result = decorrelateQuery(rootRel);
+    if (result != rootRel) {
+      checkConvertedType(query, result);
+    }
+    return result;
+  }
+
+  /**
+   * Walks over a tree of relational expressions, replacing each
+   * {@link RelNode} with a 'slimmed down' relational expression that projects
+   * only the fields required by its consumer.
+   *
+   * <p>This may make things easier for the optimizer, by removing crud that
+   * would expand the search space, but is difficult for the optimizer itself
+   * to do it, because optimizer rules must preserve the number and type of
+   * fields. Hence, this transform that operates on the entire tree, similar
+   * to the {@link RelStructuredTypeFlattener type-flattening transform}.
+   *
+   * <p>Currently this functionality is disabled in farrago/luciddb; the
+   * default implementation of this method does nothing.
+   *
+   * @param ordered Whether the relational expression must produce results in
+   * a particular order (typically because it has an ORDER BY at top level)
+   * @param rootRel Relational expression that is at the root of the tree
+   * @return Trimmed relational expression
+   */
+  public RelNode trimUnusedFields(boolean ordered, RelNode rootRel) {
+    // Trim fields that are not used by their consumer.
+    if (isTrimUnusedFields()) {
+      final RelFieldTrimmer trimmer = newFieldTrimmer();
+      final List<RelCollation> collations =
+          rootRel.getTraitSet().getTraits(RelCollationTraitDef.INSTANCE);
+      rootRel = trimmer.trim(rootRel);
+      if (!ordered
+          && collations != null
+          && !collations.isEmpty()
+          && !collations.equals(ImmutableList.of(RelCollations.EMPTY))) {
+        final RelTraitSet traitSet = rootRel.getTraitSet()
+            .replace(RelCollationTraitDef.INSTANCE, collations);
+        rootRel = rootRel.copy(traitSet, rootRel.getInputs());
+      }
+      boolean dumpPlan = SQL2REL_LOGGER.isLoggable(Level.FINE);
+      if (dumpPlan) {
+        SQL2REL_LOGGER.fine(
+            RelOptUtil.dumpPlan(
+                "Plan after trimming unused fields",
+                rootRel,
+                false,
+                SqlExplainLevel.EXPPLAN_ATTRIBUTES));
+      }
+    }
+    return rootRel;
+  }
+
+  /**
+   * Creates a RelFieldTrimmer.
+   *
+   * @return Field trimmer
+   */
+  protected RelFieldTrimmer newFieldTrimmer() {
+    final RelBuilder relBuilder =
+        RelFactories.LOGICAL_BUILDER.create(cluster, null);
+    return new RelFieldTrimmer(validator, relBuilder);
+  }
+
+  /**
+   * Converts an unvalidated query's parse tree into a relational expression.
+   *
+   * @param query           Query to convert
+   * @param needsValidation Whether to validate the query before converting;
+   *                        <code>false</code> if the query has already been
+   *                        validated.
+   * @param top             Whether the query is top-level, say if its result
+   *                        will become a JDBC result set; <code>false</code> if
+   *                        the query will be part of a view.
+   */
+  public RelRoot convertQuery(
+      SqlNode query,
+      final boolean needsValidation,
+      final boolean top) {
+    if (needsValidation) {
+      query = validator.validate(query);
+    }
 
-    /**
-     * @return the RelOptCluster in use.
-     */
-    public RelOptCluster getCluster() {
-        return cluster;
+    RelNode result = convertQueryRecursive(query, top, null).rel;
+    if (top) {
+      if (isStream(query)) {
+        result = new LogicalDelta(cluster, result.getTraitSet(), result);
+      }
+    }
+    RelCollation collation = RelCollations.EMPTY;
+    if (!query.isA(SqlKind.DML)) {
+      if (isOrdered(query)) {
+        collation = requiredCollation(result);
+      }
+    }
+    checkConvertedType(query, result);
+
+    boolean dumpPlan = SQL2REL_LOGGER.isLoggable(Level.FINE);
+    if (dumpPlan) {
+      SQL2REL_LOGGER.fine(
+          RelOptUtil.dumpPlan(
+              "Plan after converting SqlNode to RelNode",
+              result,
+              false,
+              SqlExplainLevel.EXPPLAN_ATTRIBUTES));
     }
 
-    /**
-     * Returns the row-expression builder.
-     */
-    public RexBuilder getRexBuilder() {
-        return rexBuilder;
+    final RelDataType validatedRowType = validator.getValidatedNodeType(query);
+    return RelRoot.of(result, validatedRowType, query.getKind())
+        .withCollation(collation);
+  }
+
+  private static boolean isStream(SqlNode query) {
+    return query instanceof SqlSelect
+        && ((SqlSelect) query).isKeywordPresent(SqlSelectKeyword.STREAM);
+  }
+
+  public static boolean isOrdered(SqlNode query) {
+    switch (query.getKind()) {
+    case SELECT:
+      return ((SqlSelect) query).getOrderList() != null
+          && ((SqlSelect) query).getOrderList().size() > 0;
+    case WITH:
+      return isOrdered(((SqlWith) query).body);
+    case ORDER_BY:
+      return ((SqlOrderBy) query).orderList.size() > 0;
+    default:
+      return false;
     }
+  }
 
-    /**
-     * Returns the number of dynamic parameters encountered during translation;
-     * this must only be called after {@link #convertQuery}.
-     *
-     * @return number of dynamic parameters
-     */
-    public int getDynamicParamCount() {
-        return dynamicParamSqlNodes.size();
+  private RelCollation requiredCollation(RelNode r) {
+    if (r instanceof Sort) {
+      return ((Sort) r).collation;
+    }
+    if (r instanceof Project) {
+      return requiredCollation(((Project) r).getInput());
+    }
+    if (r instanceof Delta) {
+      return requiredCollation(((Delta) r).getInput());
+    }
+    throw new AssertionError();
+  }
+
+  /**
+   * Converts a SELECT statement's parse tree into a relational expression.
+   */
+  public RelNode convertSelect(SqlSelect select, boolean top) {
+    final SqlValidatorScope selectScope = validator.getWhereScope(select);
+    final Blackboard bb = createBlackboard(selectScope, null, top);
+    convertSelectImpl(bb, select);
+    return bb.root;
+  }
+
+  /**
+   * Factory method for creating translation workspace.
+   */
+  protected Blackboard createBlackboard(SqlValidatorScope scope,
+      Map<String, RexNode> nameToNodeMap, boolean top) {
+    return new Blackboard(scope, nameToNodeMap, top);
+  }
+
+  /**
+   * Implementation of {@link #convertSelect(SqlSelect, boolean)};
+   * derived class may override.
+   */
+  protected void convertSelectImpl(
+      final Blackboard bb,
+      SqlSelect select) {
+    convertFrom(
+        bb,
+        select.getFrom());
+    convertWhere(
+        bb,
+        select.getWhere());
+
+    final List<SqlNode> orderExprList = new ArrayList<>();
+    final List<RelFieldCollation> collationList = new ArrayList<>();
+    gatherOrderExprs(
+        bb,
+        select,
+        select.getOrderList(),
+        orderExprList,
+        collationList);
+    final RelCollation collation =
+        cluster.traitSet().canonize(RelCollations.of(collationList));
+
+    if (validator.isAggregate(select)) {
+      convertAgg(
+          bb,
+          select,
+          orderExprList);
+    } else {
+      convertSelectList(
+          bb,
+          select,
+          orderExprList);
     }
 
-    /**
-     * Returns the type inferred for a dynamic parameter.
-     *
-     * @param index 0-based index of dynamic parameter
-     * @return inferred type, never null
-     */
-    public RelDataType getDynamicParamType(int index) {
-        SqlNode sqlNode = dynamicParamSqlNodes.get(index);
-        if (sqlNode == null) {
-            throw Util.needToImplement("dynamic param type inference");
-        }
-        return validator.getValidatedNodeType(sqlNode);
+    if (select.isDistinct()) {
+      distinctify(bb, true);
+    }
+    convertOrder(
+        select, bb, collation, orderExprList, select.getOffset(),
+        select.getFetch());
+    bb.setRoot(bb.root, true);
+  }
+
+  /**
+   * Having translated 'SELECT ... FROM ... [GROUP BY ...] [HAVING ...]', adds
+   * a relational expression to make the results unique.
+   *
+   * <p>If the SELECT clause contains duplicate expressions, adds
+   * {@link org.apache.calcite.rel.logical.LogicalProject}s so that we are
+   * grouping on the minimal set of keys. The performance gain isn't huge, but
+   * it is difficult to detect these duplicate expressions later.
+   *
+   * @param bb               Blackboard
+   * @param checkForDupExprs Check for duplicate expressions
+   */
+  private void distinctify(
+      Blackboard bb,
+      boolean checkForDupExprs) {
+    // Look for duplicate expressions in the project.
+    // Say we have 'select x, y, x, z'.
+    // Then dups will be {[2, 0]}
+    // and oldToNew will be {[0, 0], [1, 1], [2, 0], [3, 2]}
+    RelNode rel = bb.root;
+    if (checkForDupExprs && (rel instanceof LogicalProject)) {
+      LogicalProject project = (LogicalProject) rel;
+      final List<RexNode> projectExprs = project.getProjects();
+      final List<Integer> origins = new ArrayList<>();
+      int dupCount = 0;
+      for (int i = 0; i < projectExprs.size(); i++) {
+        int x = findExpr(projectExprs.get(i), projectExprs, i);
+        if (x >= 0) {
+          origins.add(x);
+          ++dupCount;
+        } else {
+          origins.add(i);
+        }
+      }
+      if (dupCount == 0) {
+        distinctify(bb, false);
+        return;
+      }
+
+      final Map<Integer, Integer> squished = Maps.newHashMap();
+      final List<RelDataTypeField> fields = rel.getRowType().getFieldList();
+      final List<Pair<RexNode, String>> newProjects = Lists.newArrayList();
+      for (int i = 0; i < fields.size(); i++) {
+        if (origins.get(i) == i) {
+          squished.put(i, newProjects.size());
+          newProjects.add(RexInputRef.of2(i, fields));
+        }
+      }
+      rel =
+          LogicalProject.create(rel, Pair.left(newProjects),
+              Pair.right(newProjects));
+      bb.root = rel;
+      distinctify(bb, false);
+      rel = bb.root;
+
+      // Create the expressions to reverse the mapping.
+      // Project($0, $1, $0, $2).
+      final List<Pair<RexNode, String>> undoProjects = Lists.newArrayList();
+      for (int i = 0; i < fields.size(); i++) {
+        final int origin = origins.get(i);
+        RelDataTypeField field = fields.get(i);
+        undoProjects.add(
+            Pair.of(
+                (RexNode) new RexInputRef(
+                    squished.get(origin), field.getType()),
+                field.getName()));
+      }
+
+      rel =
+          LogicalProject.create(rel, Pair.left(undoProjects),
+              Pair.right(undoProjects));
+      bb.setRoot(
+          rel,
+          false);
+
+      return;
     }
 
-    /**
-     * Returns the current count of the number of dynamic parameters in an
-     * EXPLAIN PLAN statement.
-     *
-     * @param increment if true, increment the count
-     * @return the current count before the optional increment
-     */
-    public int getDynamicParamCountInExplain(boolean increment) {
-        int retVal = nDynamicParamsInExplain;
-        if (increment) {
-            ++nDynamicParamsInExplain;
-        }
-        return retVal;
+    // Usual case: all of the expressions in the SELECT clause are
+    // different.
+    final ImmutableBitSet groupSet =
+        ImmutableBitSet.range(rel.getRowType().getFieldCount());
+    rel =
+        createAggregate(bb, false, groupSet, ImmutableList.of(groupSet),
+            ImmutableList.<AggregateCall>of());
+
+    bb.setRoot(
+        rel,
+        false);
+  }
+
+  private int findExpr(RexNode seek, List<RexNode> exprs, int count) {
+    for (int i = 0; i < count; i++) {
+      RexNode expr = exprs.get(i);
+      if (expr.toString().equals(seek.toString())) {
+        return i;
+      }
+    }
+    return -1;
+  }
+
+  /**
+   * Converts a query's ORDER BY clause, if any.
+   *
+   * @param select        Query
+   * @param bb            Blackboard
+   * @param collation     Collation list
+   * @param orderExprList Method populates this list with orderBy expressions
+   *                      not present in selectList
+   * @param offset        Expression for number of rows to discard before
+   *                      returning first row
+   * @param fetch         Expression for number of rows to fetch
+   */
+  protected void convertOrder(
+      SqlSelect select,
+      Blackboard bb,
+      RelCollation collation,
+      List<SqlNode> orderExprList,
+      SqlNode offset,
+      SqlNode fetch) {
+    if (select.getOrderList() == null
+        || select.getOrderList().getList().isEmpty()) {
+      assert collation.getFieldCollations().isEmpty();
+      if ((offset == null
+            || ((SqlLiteral) offset).bigDecimalValue().equals(BigDecimal.ZERO))
+          && fetch == null) {
+        return;
+      }
     }
 
-    /**
-     * @return mapping of non-correlated subqueries that have been converted to
-     * the constants that they evaluate to
-     */
-    public Map<SqlNode, RexNode> getMapConvertedNonCorrSubqs() {
-        return mapConvertedNonCorrSubqs;
+    // Create a sorter using the previously constructed collations.
+    bb.setRoot(
+        LogicalSort.create(bb.root, collation,
+            offset == null ? null : convertExpression(offset),
+            fetch == null ? null : convertExpression(fetch)),
+        false);
+
+    // If extra expressions were added to the project list for sorting,
+    // add another project to remove them. But make the collation empty, because
+    // we can't represent the real collation.
+    //
+    // If it is the top node, use the real collation, but don't trim fields.
+    if (orderExprList.size() > 0 && !bb.top) {
+      final List<RexNode> exprs = new ArrayList<>();
+      final RelDataType rowType = bb.root.getRowType();
+      final int fieldCount =
+          rowType.getFieldCount() - orderExprList.size();
+      for (int i = 0; i < fieldCount; i++) {
+        exprs.add(rexBuilder.makeInputRef(bb.root, i));
+      }
+      bb.setRoot(
+          LogicalProject.create(bb.root, exprs,
+              rowType.getFieldNames().subList(0, fieldCount)),
+          false);
+    }
+  }
+
+  /**
+   * Returns whether a given node contains a {@link SqlInOperator}.
+   *
+   * @param node a RexNode tree
+   */
+  private static boolean containsInOperator(
+      SqlNode node) {
+    try {
+      SqlVisitor<Void> visitor =
+          new SqlBasicVisitor<Void>() {
+            public Void visit(SqlCall call) {
+              if (call.getOperator() instanceof SqlInOperator) {
+                throw new Util.FoundOne(call);
+              }
+              return super.visit(call);
+            }
+          };
+      node.accept(visitor);
+      return false;
+    } catch (Util.FoundOne e) {
+      Util.swallow(e, null);
+      return true;
     }
+  }
+
+  /**
+   * Push down all the NOT logical operators into any IN/NOT IN operators.
+   *
+   * @param sqlNode the root node from which to look for NOT operators
+   * @return the transformed SqlNode representation with NOT pushed down.
+   */
+  private static SqlNode pushDownNotForIn(SqlNode sqlNode) {
+    if ((sqlNode instanceof SqlCall) && containsInOperator(sqlNode)) {
+      SqlCall sqlCall = (SqlCall) sqlNode;
+      if ((sqlCall.getOperator() == SqlStdOperatorTable.AND)
+          || (sqlCall.getOperator() == SqlStdOperatorTable.OR)) {
+        SqlNode[] sqlOperands = ((SqlBasicCall) sqlCall).operands;
+        for (int i = 0; i < sqlOperands.length; i++) {
+          sqlOperands[i] = pushDownNotForIn(sqlOperands[i]);
+        }
+        return sqlNode;
+      } else if (sqlCall.getOperator() == SqlStdOperatorTable.NOT) {
+        SqlNode childNode = sqlCall.operand(0);
+        assert childNode instanceof SqlCall;
+        SqlBasicCall childSqlCall = (SqlBasicCall) childNode;
+        if (childSqlCall.getOperator() == SqlStdOperatorTable.AND) {
+          SqlNode[] andOperands = childSqlCall.getOperands();
+          SqlNode[] orOperands = new SqlNode[andOperands.length];
+          for (int i = 0; i < orOperands.length; i++) {
+            orOperands[i] =
+                SqlStdOperatorTable.NOT.createCall(
+                    SqlParserPos.ZERO,
+                    andOperands[i]);
+          }
+          for (int i = 0; i < orOperands.length; i++) {
+            orOperands[i] = pushDownNotForIn(orOperands[i]);
+          }
+          return SqlStdOperatorTable.OR.createCall(SqlParserPos.ZERO,
+              orOperands[0], orOperands[1]);
+        } else if (childSqlCall.getOperator() == SqlStdOperatorTable.OR) {
+          SqlNode[] orOperands = childSqlCall.getOperands();
+          SqlNode[] andOperands = new SqlNode[orOperands.length];
+          for (int i = 0; i < andOperands.length; i++) {
+            andOperands[i] =
+                SqlStdOperatorTable.NOT.createCall(
+                    SqlParserPos.ZERO,
+                    orOperands[i]);
+          }
+          for (int i = 0; i < andOperands.length; i++) {
+            andOperands[i] = pushDownNotForIn(andOperands[i]);
+          }
+          return SqlStdOperatorTable.AND.createCall(SqlParserPos.ZERO,
+              andOperands[0], andOperands[1]);
+        } else if (childSqlCall.getOperator() == SqlStdOperatorTable.NOT) {
+          SqlNode[] notOperands = childSqlCall.getOperands();
+          assert notOperands.length == 1;
+          return pushDownNotForIn(notOperands[0]);
+        } else if (childSqlCall.getOperator() instanceof SqlInOperator) {
+          SqlNode[] inOperands = childSqlCall.getOperands();
+          SqlInOperator inOp =
+              (SqlInOperator) childSqlCall.getOperator();
+          if (inOp.isNotIn()) {
+            return SqlStdOperatorTable.IN.createCall(
+                SqlParserPos.ZERO,
+                inOperands[0],
+                inOperands[1]);
+          } else {
+            return SqlStdOperatorTable.NOT_IN.createCall(
+                SqlParserPos.ZERO,
+                inOperands[0],
+                inOperands[1]);
+          }
+        } else {
+          // childSqlCall is "leaf" node in a logical expression tree
+          // (only considering AND, OR, NOT)
+          return sqlNode;
+        }
+      } else {
+        // sqlNode is "leaf" node in a logical expression tree
+        // (only considering AND, OR, NOT)
+        return sqlNode;
+      }
+    } else {
+      // tree rooted at sqlNode does not contain inOperator
+      return sqlNode;
+    }
+  }
+
+  /**
+   * Converts a WHERE clause.
+   *
+   * @param bb    Blackboard
+   * @param where WHERE clause, may be null
+   */
+  private void convertWhere(
+      final Blackboard bb,
+      final SqlNode where) {
+    if (where == null) {
+      return;
+    }
+    SqlNode newWhere = pushDownNotForIn(where);
+    replaceSubqueries(bb, newWhere, RelOptUtil.Logic.UNKNOWN_AS_FALSE);
+    final RexNode convertedWhere = bb.convertExpression(newWhere);
 
-    /**
-     * Adds to the current map of non-correlated converted subqueries the
-     * elements from another map that contains non-correlated subqueries that
-     * have been converted by another SqlToRelConverter.
-     *
-     * @param alreadyConvertedNonCorrSubqs the other map
-     */
-    public void addConvertedNonCorrSubqs(Map<SqlNode, RexNode> alreadyConvertedNonCorrSubqs) {
-        mapConvertedNonCorrSubqs.putAll(alreadyConvertedNonCorrSubqs);
+    // only allocate filter if the condition is not TRUE
+    if (convertedWhere.isAlwaysTrue()) {
+      return;
     }
 
-    /**
-     * Set a new DefaultValueFactory. To have any effect, this must be called
-     * before any convert method.
-     *
-     * @param factory new DefaultValueFactory
-     */
-    public void setDefaultValueFactory(DefaultValueFactory factory) {
-        defaultValueFactory = factory;
+    final RelNode filter = RelOptUtil.createFilter(bb.root, convertedWhere);
+    final RelNode r;
+    final CorrelationUse p = getCorrelationUse(bb, filter);
+    if (p != null) {
+      assert p.r instanceof Filter;
+      Filter f = (Filter) p.r;
+      r = LogicalFilter.create(f.getInput(), f.getCondition(),
+          ImmutableSet.of(p.id));
+    } else {
+      r = filter;
     }
 
-    /**
-     * Sets a new SubqueryConverter. To have any effect, this must be called
-     * before any convert method.
-     *
-     * @param converter new SubqueryConverter
-     */
-    public void setSubqueryConverter(SubqueryConverter converter) {
-        subqueryConverter = converter;
+    bb.setRoot(r, false);
+  }
+
+  private void replaceSubqueries(
+      final Blackboard bb,
+      final SqlNode expr,
+      RelOptUtil.Logic logic) {
+    findSubqueries(bb, expr, logic, false);
+    for (SubQuery node : bb.subqueryList) {
+      substituteSubquery(bb, node);
     }
+  }
 
-    /**
-     * Indicates that the current statement is part of an EXPLAIN PLAN statement
-     *
-     * @param nDynamicParams number of dynamic parameters in the statement
-     */
-    public void setIsExplain(int nDynamicParams) {
-        isExplain = true;
-        nDynamicParamsInExplain = nDynamicParams;
+  private void substituteSubquery(Blackboard bb, SubQuery subQuery) {
+    final RexNode expr = subQuery.expr;
+    if (expr != null) {
+      // Already done.
+      return;
     }
 
-    /**
-     * Controls whether table access references are converted to physical rels
-     * immediately. The optimizer doesn't like leaf rels to have
-     * {@link Convention#NONE}. However, if we are doing further conversion
-     * passes (e.g. {@link RelStructuredTypeFlattener}), then we may need to
-     * defer conversion. To have any effect, this must be called before any
-     * convert method.
-     *
-     * @param enabled true for immediate conversion (the default); false to
-     *                generate logical LogicalTableScan instances
-     */
-    public void enableTableAccessConversion(boolean enabled) {
-        shouldConvertTableAccess = enabled;
+    final SqlBasicCall call;
+    final RelNode rel;
+    final SqlNode query;
+    final Pair<RelNode, Boolean> converted;
+    switch (subQuery.node.getKind()) {
+    case CURSOR:
+      convertCursor(bb, subQuery);
+      return;
+
+    case MULTISET_QUERY_CONSTRUCTOR:
+    case MULTISET_VALUE_CONSTRUCTOR:
+    case ARRAY_QUERY_CONSTRUCTOR:
+      rel = convertMultisets(ImmutableList.of(subQuery.node), bb);
+      subQuery.expr = bb.register(rel, JoinRelType.INNER);
+      return;
+
+    case IN:
+      call = (SqlBasicCall) subQuery.node;
+      query = call.operand(1);
+      if (!expand && !(query instanceof SqlNodeList)) {
+        return;
+      }
+      final SqlNode leftKeyNode = call.operand(0);
+
+      final List<RexNode> leftKeys;
+      switch (leftKeyNode.getKind()) {
+      case ROW:
+        leftKeys = Lists.newArrayList();
+        for (SqlNode sqlExpr : ((SqlBasicCall) leftKeyNode).getOperandList()) {
+          leftKeys.add(bb.convertExpression(sqlExpr));
+        }
+        break;
+      default:
+        leftKeys = ImmutableList.of(bb.convertExpression(leftKeyNode));
+      }
+
+      final boolean isNotIn = ((SqlInOperator) call.getOperator()).isNotIn();
+      if (query instanceof SqlNodeList) {
+        SqlNodeList valueList = (SqlNodeList) query;
+        if (!containsNullLiteral(valueList)
+            && valueList.size() < getInSubqueryThreshold()) {
+          // We're under the threshold, so convert to OR.
+          subQuery.expr =
+              convertInToOr(
+                  bb,
+                  leftKeys,
+                  valueList,
+                  isNotIn);
+          return;
+        }
+
+        // Otherwise, let convertExists translate
+        // values list into an inline table for the
+        // reference to Q below.
+      }
+
+      // Project out the search columns from the left side
+
+      //  Q1:
+      // "select from emp where emp.deptno in (select col1 from T)"
+      //
+      // is converted to
+      //
+      // "select from
+      //   emp inner join (select distinct col1 from T)) q
+      //   on emp.deptno = q.col1
+      //
+      // Q2:
+      // "select from emp where emp.deptno not in (Q)"
+      //
+      // is converted to
+      //
+      // "select from
+      //   emp left outer join (select distinct col1, TRUE from T) q
+      //   on emp.deptno = q.col1
+      //   where emp.deptno <> null
+      //         and q.indicator <> TRUE"
+      //
+      final boolean outerJoin = bb.subqueryNeedsOuterJoin
+          || isNotIn
+          || subQuery.logic == RelOptUtil.Logic.TRUE_FALSE_UNKNOWN;
+      final RelDataType targetRowType =
+          SqlTypeUtil.promoteToRowType(typeFactory,
+              validator.getValidatedNodeType(leftKeyNode), null);
+      converted =
+          convertExists(query, RelOptUtil.SubqueryType.IN, subQuery.logic,
+              outerJoin, targetRowType);
+      if (converted.right) {
+        // Generate
+        //    emp CROSS JOIN (SELECT COUNT(*) AS c,
+        //                       COUNT(deptno) AS ck FROM dept)
+        final RelDataType longType =
+            typeFactory.createSqlType(SqlTypeName.BIGINT);
+        final RelNode seek = converted.left.getInput(0); // fragile
+        final int keyCount = leftKeys.size();
+        final List<Integer> args = ImmutableIntList.range(0, keyCount);
+        LogicalAggregate aggregate =
+            LogicalAggregate.create(seek, false, ImmutableBitSet.of(), null,
+                ImmutableList.of(
+                    AggregateCall.create(SqlStdOperatorTable.COUNT, false,
+                        ImmutableList.<Integer>of(), -1, longType, null),
+                    AggregateCall.create(SqlStdOperatorTable.COUNT, false,
+                        args, -1, longType, null)));
+        LogicalJoin join =
+            LogicalJoin.create(bb.root, aggregate, rexBuilder.makeLiteral(true),
+                ImmutableSet.<CorrelationId>of(), JoinRelType.INNER);
+        bb.setRoot(join, false);
+      }
+      RexNode rex =
+          bb.register(converted.left,
+              outerJoin ? JoinRelType.LEFT : JoinRelType.INNER, leftKeys);
+
+      subQuery.expr = translateIn(subQuery, bb.root, rex);
+      if (isNotIn) {
+        subQuery.expr =
+            rexBuilder.makeCall(SqlStdOperatorTable.NOT, subQuery.expr);
+      }
+      return;
+
+    case EXISTS:
+      // "select from emp where exists (select a from T)"
+      //
+      // is converted to the following if the subquery is correlated:
+      //
+      // "select from emp left outer join (select AGG_TRUE() as indicator
+      // from T group by corr_var) q where q.indicator is true"
+      //
+      // If there is no correlation, the expression is replaced with a
+      // boolean indicating whether the subquery returned 0 or >= 1 row.
+      call = (SqlBasicCall) subQuery.node;
+      query = call.operand(0);
+      if (!expand) {
+        return;
+      }
+      converted = convertExists(query, RelOptUtil.SubqueryType.EXISTS,
+          subQuery.logic, true, null);
+      assert !converted.right;
+      if (convertNonCorrelatedSubQuery(subQuery, bb, converted.left, true)) {
+        return;
+      }
+      subQuery.expr = bb.register(converted.left, JoinRelType.LEFT);
+      return;
+
+    case SCALAR_QUERY:
+      // Convert the subquery.  If it's non-correlated, convert it
+      // to a constant expression.
+      if (!expand) {
+        return;
+      }
+      call = (SqlBasicCall) subQuery.node;
+      query = call.operand(0);
+      converted = convertExists(query, RelOptUtil.SubqueryType.SCALAR,
+          subQuery.logic, true, null);
+      assert !converted.right;
+      if (convertNonCorrelatedSubQuery(subQuery, bb, converted.left, false)) {
+        return;
+      }
+      rel = convertToSingleValueSubq(query, converted.left);
+      subQuery.expr = bb.register(rel, JoinRelType.LEFT);
+      return;
+
+    case SELECT:
+      // This is used when converting multiset queries:
+      //
+      // select * from unnest(select multiset[deptno] from emps);
+      //
+      converted = convertExists(subQuery.node, RelOptUtil.SubqueryType.SCALAR,
+          subQuery.logic, true, null);
+      assert !converted.right;
+      subQuery.expr = bb.register(converted.left, JoinRelType.LEFT);
+      return;
+
+    default:
+      throw Util.newInternal("unexpected kind of subquery :" + subQuery.node);
+    }
+  }
+
+  private RexNode translateIn(SubQuery subQuery, RelNode root,
+      final RexNode rex) {
+    switch (subQuery.logic) {
+    case TRUE:
+      return rexBuilder.makeLiteral(true);
+
+    case UNKNOWN_AS_FALSE:
+      assert rex instanceof RexRangeRef;
+      final int fieldCount = rex.getType().getFieldCount();
+      RexNode rexNode = rexBuilder.makeFieldAccess(rex, fieldCount - 1);
+      rexNode = rexBuilder.makeCall(SqlStdOperatorTable.IS_TRUE, rexNode);
+
+      // Then append the IS NOT NULL(leftKeysForIn).
+      //
+      // RexRangeRef contains the following fields:
+      //   leftKeysForIn,
+      //   rightKeysForIn (the original subquery select list),
+      //   nullIndicator
+      //
+      // The first two lists contain the same number of fields.
+      final int k = (fieldCount - 1) / 2;
+      for (int i = 0; i < k; i++) {
+        rexNode =
+            rexBuilder.makeCall(
+                SqlStdOperatorTable.AND,
+                rexNode,
+                rexBuilder.makeCall(
+                    SqlStdOperatorTable.IS_NOT_NULL,
+                    rexBuilder.makeFieldAccess(rex, i)));
+      }
+      return rexNode;
+
+    case TRUE_FALSE_UNKNOWN:
+    case UNKNOWN_AS_TRUE:
+      // select e.deptno,
+      //   case
+      //   when ct.c = 0 then false
+      //   when dt.i is not null then true
+      //   when e.deptno is null then null
+      //   when ct.ck < ct.c then null
+      //   else false
+      //   end
+      // from e
+      // cross join (select count(*) as c, count(deptno) as ck from v) as ct
+      // left join (select distinct deptno, true as i from v) as dt
+      //   on e.deptno = dt.deptno
+      final Join join = (Join) root;
+      final Project left = (Project) join.getLeft();
+      final RelNode leftLeft = ((Join) left.getInput()).getLeft();
+      final int leftLeftCount = leftLeft.getRowType().getFieldCount();
+      final RelDataType nullableBooleanType =
+          typeFactory.createTypeWithNullability(
+              typeFactory.createSqlType(SqlTypeName.BOOLEAN), true);
+      final RelDataType longType =
+          typeFactory.createSqlType(SqlTypeName.BIGINT);
+      final RexNode cRef = rexBuilder.makeInputRef(root, leftLeftCount);
+      final RexNode ckRef = rexBuilder.makeInputRef(root, leftLeftCount + 1);
+      final RexNode iRef =
+          rexBuilder.makeInputRef(root, root.getRowType().getFieldCount() - 1);
+
+      final RexLiteral zero =
+          rexBuilder.makeExactLiteral(BigDecimal.ZERO, longType);
+      final RexLiteral trueLiteral = rexBuilder.makeLiteral(true);
+      final RexLiteral falseLiteral = rexBuilder.makeLiteral(false);
+      final RexNode unknownLiteral =
+          rexBuilder.makeNullLiteral(SqlTypeName.BOOLEAN);
+
+      final ImmutableList.Builder<RexNode> args = ImmutableList.builder();
+      args.add(rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, cRef, zero),
+          falseLiteral,
+          rexBuilder.makeCall(SqlStdOperatorTable.IS_NOT_NULL, iRef),
+          trueLiteral);
+      final JoinInfo joinInfo = join.analyzeCondition();
+      for (int leftKey : joinInfo.leftKeys) {
+        final RexNode kRef = rexBuilder.makeInputRef(root, leftKey);
+        args.add(rexBuilder.makeCall(SqlStdOperatorTable.IS_NULL, kRef),
+            unknownLiteral);
+      }
+      args.add(rexBuilder.makeCall(SqlStdOperatorTable.LESS_THAN, ckRef, cRef),
+          unknownLiteral,
+          falseLiteral);
+
+      return rexBuilder.makeCall(
+          nullableBooleanType,
+          SqlStdOperatorTable.CASE,
+          args.build());
+
+    default:
+      throw new AssertionError(subQuery.logic);
     }
+  }
 
-    /**
-     * Controls whether instances of
-     * {@link org.apache.calcite.rel.logical.LogicalValues} are generated. These
-     * may not be supported by all physical implementations. To have any effect,
-     * this must be called before any convert method.
-     *
-     * @param enabled true to allow LogicalValues to be generated (the default);
-     *                false to force substitution of Project+OneRow instead
-     */
-    public void enableValuesRelCreation(boolean enabled) {
-        shouldCreateValuesRel = enabled;
-    }
-
-    private void checkConvertedType(SqlNode query, RelNode result) {
-        if (!query.isA(SqlKind.DML)) {
-            // Verify that conversion from SQL to relational algebra did
-            // not perturb any type information.  (We can't do this if the
-            // SQL statement is something like an INSERT which has no
-            // validator type information associated with its result,
-            // hence the namespace check above.)
-            RelDataType convertedRowType = result.getRowType();
-            if (!checkConvertedRowType(query, convertedRowType)) {
-                RelDataType validatedRowType = validator.getValidatedNodeType(query);
-                validatedRowType = uniquifyFields(validatedRowType);
-                throw Util.newInternal("Conversion to relational algebra failed to " + "preserve datatypes:\n" + "validated type:\n" + validatedRowType.getFullTypeString() + "\nconverted type:\n" + convertedRowType.getFullTypeString() + "\nrel:\n" + RelOptUtil.toString(result));
-            }
+  private static boolean containsNullLiteral(SqlNodeList valueList) {
+    for (SqlNode node : valueList.getList()) {
+      if (node instanceof SqlLiteral) {
+        SqlLiteral lit = (SqlLiteral) node;
+        if (lit.getValue() == null) {
+          return true;
         }
+      }
     }
-
-    public RelNode flattenTypes(RelNode rootRel, boolean restructure) {
-        RelStructuredTypeFlattener typeFlattener = new RelStructuredTypeFlattener(rexBuilder, createToRelContext());
-        return typeFlattener.rewrite(rootRel, restructure);
+    return false;
+  }
+
+  /**
+   * Determines if a subquery is non-correlated and if so, converts it to a
+   * constant.
+   *
+   * @param subQuery  the call that references the subquery
+   * @param bb        blackboard used to convert the subquery
+   * @param converted RelNode tree corresponding to the subquery
+   * @param isExists  true if the subquery is part of an EXISTS expression
+   * @return if the subquery can be converted to a constant
+   */
+  private boolean convertNonCorrelatedSubQuery(
+      SubQuery subQuery,
+      Blackboard bb,
+      RelNode converted,
+      boolean isExists) {
+    SqlCall call = (SqlBasicCall) subQuery.node;
+    if (subqueryConverter.canConvertSubquery()
+        && isSubQueryNonCorrelated(converted, bb)) {
+      // First check if the subquery has already been converted
+      // because it's a nested subquery.  If so, don't re-evaluate
+      // it again.
+      RexNode constExpr = mapConvertedNonCorrSubqs.get(call);
+      if (constExpr == null) {
+        constExpr =
+            subqueryConverter.convertSubquery(
+                call,
+                this,
+                isExists,
+                isExplain);
+      }
+      if (constExpr != null) {
+        subQuery.expr = constExpr;
+        mapConvertedNonCorrSubqs.put(call, constExpr);
+        return true;
+      }
+    }
+    return false;
+  }
+
+  /**
+   * Converts the RelNode tree for a select statement to a select that
+   * produces a single value.
+   *
+   * @param query the query
+   * @param plan   the original RelNode tree corresponding to the statement
+   * @return the converted RelNode tree
+   */
+  public RelNode convertToSingleValueSubq(
+      SqlNode query,
+      RelNode plan) {
+    // Check whether query is guaranteed to produce a single value.
+    if (query instanceof SqlSelect) {
+      SqlSelect select = (SqlSelect) query;
+      SqlNodeList selectList = select.getSelectList();
+      SqlNodeList groupList = select.getGroup();
+
+      if ((selectList.size() == 1)
+          && ((groupList == null) || (groupList.size() == 0))) {
+        SqlNode selectExpr = selectList.get(0);
+        if (selectExpr instanceof SqlCall) {
+          SqlCall selectExprCall = (SqlCall) selectExpr;
+          if (Util.isSingleValue(selectExprCall)) {
+            return plan;
+          }
+        }
+
+        // If there is a limit with 0 or 1,
+        // it is ensured to produce a single value
+        if (select.getFetch() != null
+            && select.getFetch() instanceof SqlNumericLiteral) {
+          SqlNumericLiteral limitNum = (SqlNumericLiteral) select.getFetch();
+          if (((BigDecimal) limitNum.getValue()).intValue() < 2) {
+            return plan;
+          }
+        }
+      }
+    } else if (query instanceof SqlCall) {
+      // If the query is (values ...),
+      // it is necessary to look into the operands to determine
+      // whether SingleValueAgg is necessary
+      SqlCall exprCall = (SqlCall) query;
+      if (exprCall.getOperator()
+          instanceof SqlValuesOperator
+              && Util.isSingleValue(exprCall)) {
+        return plan;
+      }
     }
 
-    /**
-     * If subquery is correlated and decorrelation is enabled, performs
-     * decorrelation.
-     *
-     * @param query   Query
-     * @param rootRel Root relational expression
-     * @return New root relational expression after decorrelation
-     */
-    public RelNode decorrelate(SqlNode query, RelNode rootRel) {
-        if (!enableDecorrelation()) {
-            return rootRel;
-        }
-        final RelNode result = decorrelateQuery(rootRel);
-        if (result != rootRel) {
-            checkConvertedType(query, result);
-        }
-        return result;
+    // If not, project SingleValueAgg
+    return RelOptUtil.createSingleValueAggRel(
+        cluster,
+        plan);
+  }
+
+  /**
+   * Converts "x IN (1, 2, ...)" to "x=1 OR x=2 OR ...".
+   *
+   * @param leftKeys   LHS
+   * @param valuesList RHS
+   * @param isNotIn    is this a NOT IN operator
+   * @return converted expression
+   */
+  private RexNode convertInToOr(
+      final Blackboard bb,
+      final List<RexNode> leftKeys,
+      SqlNodeList valuesList,
+      boolean isNotIn) {
+    final List<RexNode> comparisons = new ArrayList<>();
+    for (SqlNode rightVals : valuesList) {
+      RexNode rexComparison;
+      if (leftKeys.size() == 1) {
+        rexComparison =
+            rexBuilder.makeCall(
+                SqlStdOperatorTable.EQUALS,
+                leftKeys.get(0),
+                rexBuilder.ensureType(leftKeys.get(0).getType(),
+                    bb.convertExpression(rightVals), true));
+      } else {
+        assert rightVals instanceof SqlCall;
+        final SqlBasicCall call = (SqlBasicCall) rightVals;
+        assert (call.getOperator() instanceof SqlRowOperator)
+            && call.operandCount() == leftKeys.size();
+        rexComparison =
+            RexUtil.composeConjunction(
+                rexBuilder,
+                Iterables.transform(
+                    Pair.zip(leftKeys, call.getOperandList()),
+                    new Function<Pair<RexNode, SqlNode>, RexNode>() {
+                      public RexNode apply(Pair<RexNode, SqlNode> pair) {
+                        return rexBuilder.makeCall(SqlStdOperatorTable.EQUALS,
+                            pair.left,
+                            rexBuilder.ensureType(pair.left.getType(),
+                                bb.convertExpression(pair.right), true));
+                      }
+                    }),
+                false);
+      }
+      comparisons.add(rexComparison);
     }
 
-    /**
-     * Walks over a tree of relational expressions, replacing each
-     * {@link RelNode} with a 'slimmed down' relational expression that projects
-     * only the fields required by its consumer.
-     *
-     * <p>This may make things easier for the optimizer, by removing crud that
-     * would expand the search space, but is difficult for the optimizer itself
-     * to do it, because optimizer rules must preserve the number and type of
-     * fields. Hence, this transform that operates on the entire tree, similar
-     * to the {@link RelStructuredTypeFlattener type-flattening transform}.
-     *
-     * <p>Currently this functionality is disabled in farrago/luciddb; the
-     * default implementation of this method does nothing.
-     *
-     * @param rootRel Relational expression that is at the root of the tree
-     * @return Trimmed relational expression
-     */
-    public RelNode trimUnusedFields(RelNode rootRel) {
-        // Trim fields that are not used by their consumer.
-        if (isTrimUnusedFields()) {
-            final RelFieldTrimmer trimmer = newFieldTrimmer();
-            rootRel = trimmer.trim(rootRel);
-            boolean dumpPlan = SQL2REL_LOGGER.isLoggable(Level.FINE);
-            if (dumpPlan) {
-                SQL2REL_LOGGER.fine(RelOptUtil.dumpPlan("Plan after trimming unused fields", rootRel, false, SqlExplainLevel.EXPPLAN_ATTRIBUTES));
-            }
-        }
-        return rootRel;
+    RexNode result =
+        RexUtil.composeDisjunction(rexBuilder, comparisons, true);
+    assert result != null;
+
+    if (isNotIn) {
+      result =
+          rexBuilder.makeCall(
+              SqlStdOperatorTable.NOT,
+              result);
     }
 
-    /**
-     * Creates a RelFieldTrimmer.
-     *
-     * @return Field trimmer
-     */
-    protected RelFieldTrimmer newFieldTrimmer() {
-        return new RelFieldTrimmer(validator);
+    return result;
+  }
+
+  /**
+   * Gets the list size threshold under which {@link #convertInToOr} is used.
+   * Lists of this size or greater will instead be converted to use a join
+   * against an inline table
+   * ({@link org.apache.calcite.rel.logical.LogicalValues}) rather than a
+   * predicate. A threshold of 0 forces usage of an inline table in all cases; a
+   * threshold of Integer.MAX_VALUE forces usage of OR in all cases
+   *
+   * @return threshold, default {@link #IN_SUBQUERY_THRESHOLD}
+   */
+  protected int getInSubqueryThreshold() {
+    /* OVERRIDE POINT */
+    return Integer.MAX_VALUE;
+  }
+
+  /**
+   * Converts an EXISTS or IN predicate into a join. For EXISTS, the subquery
+   * produces an indicator variable, and the result is a relational expression
+   * which outer joins that indicator to the original query. After performing
+   * the outer join, the condition will be TRUE if the EXISTS condition holds,
+   * NULL otherwise.
+   *
+   * @param seek           A query, for example 'select * from emp' or
+   *                       'values (1,2,3)' or '('Foo', 34)'.
+   * @param subqueryType   Whether sub-query is IN, EXISTS or scalar
+   * @param logic Whether the answer needs to be in full 3-valued logic (TRUE,
+   *     FALSE, UNKNOWN) will be required, or whether we can accept an
+   *     approximation (say representing UNKNOWN as FALSE)
+   * @param needsOuterJoin Whether an outer join is needed
+   * @return join expression
+   * @pre extraExpr == null || extraName != null
+   */
+  private Pair<RelNode, Boolean> convertExists(
+      SqlNode seek,
+      RelOptUtil.SubqueryType subqueryType,
+      RelOptUtil.Logic logic,
+      boolean needsOuterJoin,
+      RelDataType targetDataType) {
+    final SqlValidatorScope seekScope =
+        (seek instanceof SqlSelect)
+            ? validator.getSelectScope((SqlSelect) seek)
+            : null;
+    final Blackboard seekBb = createBlackboard(seekScope, null, false);
+    RelNode seekRel = convertQueryOrInList(seekBb, seek, targetDataType);
+
+    return RelOptUtil.createExistsPlan(seekRel, subqueryType, logic,
+        needsOuterJoin);
+  }
+
+  private RelNode convertQueryOrInList(
+      Blackboard bb,
+      SqlNode seek,
+      RelDataType targetRowType) {
+    // NOTE: Once we start accepting single-row queries as row constructors,
+    // there will be an ambiguity here for a case like X IN ((SELECT Y FROM
+    // Z)).  The SQL standard resolves the ambiguity by saying that a lone
+    // select should be interpreted as a table expression, not a row
+    // expression.  The semantic difference is that a table expression can
+    // return multiple rows.
+    if (seek instanceof SqlNodeList) {
+      return convertRowValues(
+          bb,
+          seek,
+          ((SqlNodeList) seek).getList(),
+          false,
+          targetRowType);
+    } else {
+      return convertQueryRecursive(seek, false, null).project();
+    }
+  }
+
+  private RelNode convertRowValues(
+      Blackboard bb,
+      SqlNode rowList,
+      Collection<SqlNode> rows,
+      boolean allowLiteralsOnly,
+      RelDataType targetRowType) {
+    // NOTE jvs 30-Apr-2006: We combine all rows consisting entirely of
+    // literals into a single LogicalValues; this gives the optimizer a smaller
+    // input tree.  For everything else (computed expressions, row
+    // subqueries), we union each row in as a projection on top of a
+    // LogicalOneRow.
+
+    final ImmutableList.Builder<ImmutableList<RexLiteral>> tupleList =
+        ImmutableList.builder();
+    final RelDataType rowType;
+    if (targetRowType != null) {
+      rowType = targetRowType;
+    } else {
+      rowType =
+          SqlTypeUtil.promoteToRowType(
+              typeFactory,
+              validator.getValidatedNodeType(rowList),
+              null);
     }
 
-    /**
-     * Converts an unvalidated query's parse tree into a relational expression.
-     *
-     * @param query           Query to convert
-     * @param needsValidation Whether to validate the query before converting;
-     *                        <code>false</code> if the query has already been
-     *                        validated.
-     * @param top             Whether the query is top-level, say if its result
-     *                        will become a JDBC result set; <code>false</code> if
-     *                        the query will be part of a view.
-     */
-    public RelNode convertQuery(SqlNode query, final boolean needsValidation, final boolean top) {
-        if (needsValidation) {
-            query = validator.validate(query);
+    final List<RelNode> unionInputs = new ArrayList<>();
+    for (SqlNode node : rows) {
+      SqlBasicCall call;
+      if (isRowConstructor(node)) {
+        call = (SqlBasicCall) node;
+        ImmutableList.Builder<RexLiteral> tuple = ImmutableList.builder();
+        for (Ord<SqlNode> operand : Ord.zip(call.operands)) {
+          RexLiteral rexLiteral =
+              convertLiteralInValuesList(
+                  operand.e,
+                  bb,
+                  rowType,
+                  operand.i);
+          if ((rexLiteral == null) && allowLiteralsOnly) {
+            return null;
+          }
+          if ((rexLiteral == null) || !shouldCreateValuesRel) {
+            // fallback to convertRowConstructor
+            tuple = null;
+            break;
+          }
+          tuple.add(rexLiteral);
+        }
+        if (tuple != null) {
+          tupleList.add(tuple.build());
+          continue;
+        }
+      } else {
+        RexLiteral rexLiteral =
+            convertLiteralInValuesList(
+                node,
+                bb,
+                rowType,
+                0);
+        if ((rexLiteral != null) && shouldCreateValuesRel) {
+          tupleList.add(ImmutableList.of(rexLiteral));
+          continue;
+        } else {
+          if ((rexLiteral == null) && allowLiteralsOnly) {
+            return null;
+          }
         }
 
-        RelNode result = convertQueryRecursive(query, top, null);
-        if (top && isStream(query)) {
-            result = new LogicalDelta(cluster, result.getTraitSet(), result);
-        }
-        checkConvertedType(query, result);
+        // convert "1" to "row(1)"
+        call =
+            (SqlBasicCall) SqlStdOperatorTable.ROW.createCall(
+                SqlParserPos.ZERO,
+                node);
+      }
+      unionInputs.add(convertRowConstructor(bb, call));
+    }
+    LogicalValues values =
+        LogicalValues.create(cluster, rowType, tupleList.build());
+    RelNode resultRel;
+    if (unionInputs.isEmpty()) {
+      resultRel = values;
+    } else {
+      if (!values.getTuples().isEmpty()) {
+        unionInputs.add(values);
+      }
+      resultRel = LogicalUnion.create(unionInputs, true);
+    }
+    leaves.add(resultRel);
+    return resultRel;
+  }
+
+  private RexLiteral convertLiteralInValuesList(
+      SqlNode sqlNode,
+      Blackboard bb,
+      RelDataType rowType,
+      int iField) {
+    if (!(sqlNode instanceof SqlLiteral)) {
+      return null;
+    }
+    RelDataTypeField field = rowType.getFieldList().get(iField);
+    RelDataType type = field.getType();
+    if (type.isStruct()) {
+      // null literals for weird stuff like UDT's need
+      // special handling during type flattening, so
+      // don't use LogicalValues for those
+      return null;
+    }
 
-        boolean dumpPlan = SQL2REL_LOGGER.isLoggable(Level.FINE);
-        if (dumpPlan) {
-            SQL2REL_LOGGER.fine(RelOptUtil.dumpPlan("Plan after converting SqlNode to RelNode", result, false, SqlExplainLevel.EXPPLAN_ATTRIBUTES));
-        }
+    RexNode literalExpr =
+        exprConverter.convertLiteral(
+            bb,
+            (SqlLiteral) sqlNode);
+
+    if (!(literalExpr instanceof RexLiteral)) {
+      assert literalExpr.isA(SqlKind.CAST);
+      RexNode child = ((RexCall) literalExpr).getOperands().get(0);
+      assert RexLiteral.isNullLiteral(child);
 
-        return result;
+      // NOTE jvs 22-Nov-2006:  we preserve type info
+      // in LogicalValues digest, so it's OK to lose it here
+      return (RexLiteral) child;
     }
 
-    private static boolean isStream(SqlNode query) {
-        return query instanceof SqlSelect && ((SqlSelect) query).isKeywordPresent(SqlSelectKeyword.STREAM);
+    RexLiteral literal = (RexLiteral) literalExpr;
+
+    Comparable value = literal.getValue();
+
+    if (SqlTypeUtil.isExactNumeric(type) && SqlTypeUtil.hasScale(type)) {
+      BigDecimal roundedValue =
+          NumberUtil.rescaleBigDecimal(
+              (BigDecimal) value,
+              type.getScale());
+      return rexBuilder.makeExactLiteral(
+          roundedValue,
+          type);
     }
 
-    protected boolean checkConvertedRowType(SqlNode query, RelDataType convertedRowType) {
-        RelDataType validatedRowType = validator.getValidatedNodeType(query);
-        validatedRowType = uniquifyFields(validatedRowType);
+    if ((value instanceof NlsString)
+        && (type.getSqlTypeName() == SqlTypeName.CHAR)) {
+      // pad fixed character type
+      NlsString unpadded = (NlsString) value;
+      return rexBuilder.makeCharLiteral(
+          new NlsString(
+              Spaces.padRight(unpadded.getValue(), type.getPrecision()),
+              unpadded.getCharsetName(),
+              unpadded.getCollation()));
+    }
+    return literal;
+  }
 
-        return RelOptUtil.equal("validated row type", validatedRowType, "converted row type", convertedRowType, false);
+  private boolean isRowConstructor(SqlNode node) {
+    if (!(node.getKind() == SqlKind.ROW)) {
+      return false;
+    }
+    SqlCall call = (SqlCall) node;
+    return call.getOperator().getName().equalsIgnoreCase("row");
+  }
+
+  /**
+   * Builds a list of all <code>IN</code> or <code>EXISTS</code> operators
+   * inside SQL parse tree. Does not traverse inside queries.
+   *
+   * @param bb                           blackboard
+   * @param node                         the SQL parse tree
+   * @param logic Whether the answer needs to be in full 3-valued logic (TRUE,
+   *              FALSE, UNKNOWN) will be required, or whether we can accept
+   *              an approximation (say representing UNKNOWN as FALSE)
+   * @param registerOnlyScalarSubqueries if set to true and the parse tree
+   *                                     corresponds to a variation of a select
+   *                                     node, only register it if it's a scalar
+   *                                     subquery
+   */
+  private void findSubqueries(
+      Blackboard bb,
+      SqlNode node,
+      RelOptUtil.Logic logic,
+      boolean registerOnlyScalarSubqueries) {
+    final SqlKind kind = node.getKind();
+    switch (kind) {
+    case EXISTS:
+    case SELECT:
+    case MULTISET_QUERY_CONSTRUCTOR:
+    case MULTISET_VALUE_CONSTRUCTOR:
+    case ARRAY_QUERY_CONSTRUCTOR:
+    case CURSOR:
+    case SCALAR_QUERY:
+      if (!registerOnlyScalarSubqueries
+          || (kind == SqlKind.SCALAR_QUERY)) {
+        bb.registerSubquery(node, RelOptUtil.Logic.TRUE_FALSE);
+      }
+      return;
+    case IN:
+      if (((SqlCall) node).getOperator() == SqlStdOperatorTable.NOT_IN) {
+        logic = logic.negate();
+      }
+      break;
+    case NOT:
+      logic = logic.negate();
+      break;
+    }
+    if (node instanceof SqlCall) {
+      if (kind == SqlKind.OR
+          || kind == SqlKind.NOT) {
+        // It's always correct to outer join subquery with
+        // containing query; however, when predicates involve Or
+        // or NOT, outer join might be necessary.
+        bb.subqueryNeedsOuterJoin = true;
+      }
+      for (SqlNode operand : ((SqlCall) node).getOperandList()) {
+        if (operand != null) {
+          // In the case of an IN expression, locate scalar
+          // subqueries so we can convert them to constants
+          findSubqueries(
+              bb,
+              operand,
+              logic,
+              kind == SqlKind.IN || registerOnlyScalarSubqueries);
+        }
+      }
+    } else if (node instanceof SqlNodeList) {
+      for (SqlNode child : (SqlNodeList) node) {
+        findSubqueries(
+            bb,
+            child,
+            logic,
+            kind == SqlKind.IN || registerOnlyScalarSubqueries);
+      }
     }
 
-    protected RelDataType uniquifyFields(RelDataType rowType) {
-        return validator.getTypeFactory().createStructType(RelOptUtil.getFieldTypeList(rowType), SqlValidatorUtil.uniquify(rowType.getFieldNames()));
+    // Now that we've located any scalar subqueries inside the IN
+    // expression, register the IN expression itself.  We need to
+    // register the scalar subqueries first so they can be converted
+    // before the IN expression is converted.
+    if (kind == SqlKind.IN) {
+      if (logic == RelOptUtil.Logic.TRUE_FALSE_UNKNOWN
+          && !validator.getValidatedNodeType(node).isNullable()) {
+        logic = RelOptUtil.Logic.UNKNOWN_AS_FALSE;
+      }
+      // TODO: This conversion is only valid in the WHERE clause
+      if (logic == RelOptUtil.Logic.UNKNOWN_AS_FALSE
+          && !bb.subqueryNeedsOuterJoin) {
+        logic = RelOptUtil.Logic.TRUE;
+      }
+      bb.registerSubquery(node, logic);
+    }
+  }
+
+  /**
+   * Converts an expression from {@link SqlNode} to {@link RexNode} format.
+   *
+   * @param node Expression to translate
+   * @return Converted expression
+   */
+  public RexNode convertExpression(
+      SqlNode node) {
+    Map<String, RelDataType> nameToTypeMap = Collections.emptyMap();
+    final ParameterScope scope =
+        new ParameterScope((SqlValidatorImpl) validator, nameToTypeMap);
+    final Blackboard bb = createBlackboard(scope, null, false);
+    return bb.convertExpression(node);
+  }
+
+  /**
+   * Converts an expression from {@link SqlNode} to {@link RexNode} format,
+   * mapping identifier references to predefined expressions.
+   *
+   * @param node          Expression to translate
+   * @param nameToNodeMap map from String to {@link RexNode}; when an
+   *                      {@link SqlIdentifier} is encountered, it is used as a
+   *                      key and translated to the corresponding value from
+   *                      this map
+   * @return Converted expression
+   */
+  public RexNode convertExpression(
+      SqlNode node,
+      Map<String, RexNode> nameToNodeMap) {
+    final Map<String, RelDataType> nameToTypeMap = new HashMap<>();
+    for (Map.Entry<String, RexNode> entry : nameToNodeMap.entrySet()) {
+      nameToTypeMap.put(entry.getKey(), entry.getValue().getType());
+    }
+    final ParameterScope scope =
+        new ParameterScope((SqlValidatorImpl) validator, nameToTypeMap);
+    final Blackboard bb = createBlackboard(scope, nameToNodeMap, false);
+    return bb.convertExpression(node);
+  }
+
+  /**
+   * Converts a non-standard expression.
+   *
+   * <p>This method is an extension-point that derived classes can override. If
+   * this method returns a null result, the normal expression translation
+   * process will proceed. The default implementation always returns null.
+   *
+   * @param node Expression
+   * @param bb   Blackboard
+   * @return null to proceed with the usual expression translation process
+   */
+  protected RexNode convertExtendedExpression(
+      SqlNode node,
+      Blackboard bb) {
+    return null;
+  }
+
+  private RexNode convertOver(Blackboard bb, SqlNode node) {
+    SqlCall call = (SqlCall) node;
+    SqlCall aggCall = call.operand(0);
+    SqlNode windowOrRef = call.operand(1);
+    final SqlWindow window =
+        validator.resolveWindow(windowOrRef, bb.scope, true);
+    // ROW_NUMBER() expects specific kind of framing.
+    if (aggCall.getOperator() == SqlStdOperatorTable.ROW_NUMBER) {
+      window.setLowerBound(SqlWindow.createUnboundedPreceding(SqlParserPos.ZERO));
+      window.setUpperBound(SqlWindow.createCurrentRow(SqlParserPos.ZERO));
+      window.setRows(SqlLiteral.createBoolean(true, SqlParserPos.ZERO));
+    }
+    final SqlNodeList partitionList = window.getPartitionList();
+    final ImmutableList.Builder<RexNode> partitionKeys =
+        ImmutableList.builder();
+    for (SqlNode partition : partitionList) {
+      partitionKeys.add(bb.convertExpression(partition));
+    }
+    RexNode lowerBound = bb.convertExpression(window.getLowerBound());
+    RexNode upperBound = bb.convertExpression(window.getUpperBound());
+    SqlNodeList orderList = window.getOrderList();
+    if ((orderList.size() == 0) && !window.isRows()) {
+      // A logical range requires an ORDER BY clause. Use the implicit
+      // ordering of this relation. There must be one, otherwise it would
+      // have failed validation.
+      orderList = bb.scope.getOrderList();
+      if (orderList == null) {
+        throw new AssertionError(
+            "Relation should have sort key for implicit ORDER BY");
+      }
+    }
+    final ImmutableList.Builder<RexFieldCollation> orderKeys =
+        ImmutableList.builder();
+    final Set<SqlKind> flags = EnumSet.noneOf(SqlKind.class);
+    for (SqlNode order : orderList) {
+      flags.clear();
+      RexNode e = bb.convertSortExpression(order, flags);
+      orderKeys.add(new RexFieldCollation(e, flags));
+    }
+    try {
+      Util.permAssert(bb.window == null, "already in window agg mode");
+      bb.window = window;
+      RexNode rexAgg = exprConverter.convertCall(bb, aggCall);
+      rexAgg =
+          rexBuilder.ensureType(
+              validator.getValidatedNodeType(call), rexAgg, false);
+
+      // Walk over the tree and apply 'over' to all agg functions. This is
+      // necessary because the returned expression is not necessarily a call
+      // to an agg function. For example, AVG(x) becomes SUM(x) / COUNT(x).
+      final RexShuttle visitor =
+          new HistogramShuttle(
+              partitionKeys.build(), orderKeys.build(),
+              RexWindowBound.create(window.getLowerBound(), lowerBound),
+              RexWindowBound.create(window.getUpperBound(), upperBound),
+              window);
+      return rexAgg.accept(visitor);
+    } finally {
+      bb.window = null;
+    }
+  }
+
+  /**
+   * Converts a FROM clause into a relational expression.
+   *
+   * @param bb   Scope within which to resolve identifiers
+   * @param from FROM clause of a query. Examples include:
+   *
+   *             <ul>
+   *             <li>a single table ("SALES.EMP"),
+   *             <li>an aliased table ("EMP AS E"),
+   *             <li>a list of tables ("EMP, DEPT"),
+   *             <li>an ANSI Join expression ("EMP JOIN DEPT ON EMP.DEPTNO =
+   *             DEPT.DEPTNO"),
+   *             <li>a VALUES clause ("VALUES ('Fred', 20)"),
+   *             <li>a query ("(SELECT * FROM EMP WHERE GENDER = 'F')"),
+   *             <li>or any combination of the above.
+   *             </ul>
+   */
+  protected void convertFrom(
+      Blackboard bb,
+      SqlNode from) {
+    final SqlCall call;
+    final SqlNode[] operands;
+    switch (from.getKind()) {
+    case AS:
+      convertFrom(bb, ((SqlCall) from).operand(0));
+      return;
+
+    case WITH_ITEM:
+      convertFrom(bb, ((SqlWithItem) from).query);
+      return;
+
+    case WITH:
+      convertFrom(bb, ((SqlWith) from).body);
+      return;
+
+    case TABLESAMPLE:
+      operands = ((SqlBasicCall) from).getOperands();
+      SqlSampleSpec sampleSpec = SqlLiteral.sampleValue(operands[1]);
+      if (sampleSpec instanceof SqlSampleSpec.SqlSubstitutionSampleSpec) {
+        String sampleName =
+            ((SqlSampleSpec.SqlSubstitutionSampleSpec) sampleSpec)
+                .getName();
+        datasetStack.push(sampleName);
+        convertFrom(bb, operands[0]);
+        datasetStack.pop();
+      } else if (sampleSpec instanceof SqlSampleSpec.SqlTableSampleSpec) {
+        SqlSampleSpec.SqlTableSampleSpec tableSampleSpec =
+            (SqlSampleSpec.SqlTableSampleSpec) sampleSpec;
+        convertFrom(bb, operands[0]);
+        RelOptSamplingParameters params =
+            new RelOptSamplingParameters(
+                tableSampleSpec.isBernoulli(),
+                tableSampleSpec.getSamplePercentage(),
+                tableSampleSpec.isRepeatable(),
+                tableSampleSpec.getRepeatableSeed());
+        bb.setRoot(new Sample(cluster, bb.root, params), false);
+      } else {
+        throw Util.newInternal(
+            "unknown TABLESAMPLE type: " + sampleSpec);
+      }
+      return;
+
+    case IDENTIFIER:
+      final SqlValidatorNamespace fromNamespace =
+          validator.getNamespace(from).resolve();
+      if (fromNamespace.getNode() != null) {
+        convertFrom(bb, fromNamespace.getNode());
+        return;
+      }
+      final String datasetName =
+          datasetStack.isEmpty() ? null : datasetStack.peek();
+      boolean[] usedDataset = {false};
+      RelOptTable table =
+          SqlValidatorUtil.getRelOptTable(
+              fromNamespace,
+              catalogReader,
+              datasetName,
+              usedDataset);
+      final RelNode tableRel;
+      if (shouldConvertTableAccess) {
+        tableRel = toRel(table);
+      } else {
+        tableRel = LogicalTableScan.create(cluster, table);
+      }
+      bb.setRoot(tableRel, true);
+      if (usedDataset[0]) {
+        bb.setDataset(datasetName);
+      }
+      return;
+
+    case JOIN:
+      final SqlJoin join = (SqlJoin) from;
+      final SqlValidatorScope scope = validator.getJoinScope(from);
+      final Blackboard fromBlackboard = createBlackboard(scope, null, false);
+      SqlNode left = join.getLeft();
+      SqlNode right = join.getRight();
+      final boolean isNatural = join.isNatural();
+      final JoinType joinType = join.getJoinType();
+      final SqlValidatorScope leftScope =
+          Util.first(validator.getJoinScope(left),
+              ((DelegatingScope) bb.scope).getParent());
+      final Blackboard leftBlackboard =
+          createBlackboard(leftScope, null, false);
+      final SqlValidatorScope rightScope =
+          Util.first(validator.getJoinScope(right),
+              ((DelegatingScope) bb.scope).getParent());
+      final Blackboard rightBlackboard =
+          createBlackboard(rightScope, null, false);
+      convertFrom(leftBlackboard, left);
+      RelNode leftRel = leftBlackboard.root;
+      convertFrom(rightBlackboard, right);
+      RelNode rightRel = rightBlackboard.root;
+      JoinRelType convertedJoinType = convertJoinType(joinType);
+      RexNode conditionExp;
+      final SqlValidatorNamespace leftNamespace = validator.getNamespace(left);
+      final SqlValidatorNamespace rightNamespace = validator.getNamespace(right);
+      if (isNatural) {
+        final RelDataType leftRowType = leftNamespace.getRowType();
+        final RelDataType rightRowType = rightNamespace.getRowType();
+        final List<String> columnList =
+            SqlValidatorUtil.deriveNaturalJoinColumnList(leftRowType,
+                rightRowType);
+        conditionExp = convertUsing(leftNamespace, rightNamespace,
+            columnList);
+      } else {
+        conditionExp =
+            convertJoinCondition(
+                fromBlackboard,
+                leftNamespace,
+                rightNamespace,
+                join.getCondition(),
+                join.getConditionType(),
+                leftRel,
+                rightRel);
+      }
+
+      final RelNode joinRel =
+          createJoin(
+              fromBlackboard,
+              leftRel,
+              rightRel,
+              conditionExp,
+              convertedJoinType);
+      bb.setRoot(joinRel, false);
+      return;
+
+    case SELECT:
+    case INTERSECT:
+    case EXCEPT:
+    case UNION:
+      final RelNode rel = convertQueryRecursive(from, false, null).project();
+      bb.setRoot(rel, true);
+      return;
+
+    case VALUES:
+      convertValuesImpl(bb, (SqlCall) from, null);
+      return;
+
+    case UNNEST:
+      call = (SqlCall) from;
+      final SqlNode node = call.operand(0);
+      final SqlUnnestOperator operator = (SqlUnnestOperator) call.getOperator();
+      replaceSubqueries(bb, node, RelOptUtil.Logic.TRUE_FALSE_UNKNOWN);
+      final RelNode childRel =
+          RelOptUtil.createProject(
+              (null != bb.root) ? bb.root : LogicalValues.createOneRow(cluster),
+              Collections.singletonList(bb.convertExpression(node)),
+              Collections.singletonList(validator.deriveAlias(node, 0)),
+              true);
+
+      Uncollect uncollect =
+          new Uncollect(cluster, cluster.traitSetOf(Convention.NONE),
+              childRel, operator.withOrdinality);
+      bb.setRoot(uncollect, true);
+      return;
+
+    case COLLECTION_TABLE:
+      call = (SqlCall) from;
+
+      // Dig out real call; TABLE() wrapper is just syntactic.
+      assert call.getOperandList().size() == 1;
+      final SqlCall call2 = call.operand(0);
+      convertCollectionTable(bb, call2);
+      return;
+
+    default:
+      throw Util.newInternal("not a join operator " + from);
+    }
+  }
+
+  protected void convertCollectionTable(
+      Blackboard bb,
+      SqlCall call) {
+    final SqlOperator operator = call.getOperator();
+    if (operator == SqlStdOperatorTable.TABLESAMPLE) {
+      final String sampleName =
+          SqlLiteral.stringValue(call.operand(0));
+      datasetStack.push(sampleName);
+      SqlCall cursorCall = call.operand(1);
+      SqlNode query = cursorCall.operand(0);
+      RelNode converted = convertQuery(query, false, false).rel;
+      bb.setRoot(converted, false);
+      datasetStack.pop();
+      return;
+    }
+    replaceSubqueries(bb, call, RelOptUtil.Logic.TRUE_FALSE_UNKNOWN);
+
+    // Expand table macro if possible. It's more efficient than
+    // LogicalTableFunctionScan.
+    final SqlCallBinding callBinding =
+        new SqlCallBinding(bb.scope.getValidator(), bb.scope, call);
+    if (operator instanceof SqlUserDefinedTableMacro) {
+      final SqlUserDefinedTableMacro udf =
+          (SqlUserDefinedTableMacro) operator;
+      final TranslatableTable table =
+          udf.getTable(typeFactory, callBinding.operands());
+      final RelDataType rowType = table.getRowType(typeFactory);
+      RelOptTable relOptTable = RelOptTableImpl.create(null, rowType, table);
+      RelNode converted = toRel(relOptTable);
+      bb.setRoot(converted, true);
+      return;
     }
 
-    /**
-     * Converts a SELECT statement's parse tree into a relational expression.
-     */
-    public RelNode convertSelect(SqlSelect select) {
-        final SqlValidatorScope selectScope = validator.getWhereScope(select);
-        final Blackboard bb = createBlackboard(selectScope, null);
-        convertSelectImpl(bb, select);
-        return bb.root;
+    Type elementType;
+    if 

<TRUNCATED>

[11/50] [abbrv] kylin git commit: KYLIN-1323 Improve performance of converting data to hfile

Posted by li...@apache.org.
KYLIN-1323 Improve performance of converting data to hfile


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/ab4d8909
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/ab4d8909
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/ab4d8909

Branch: refs/heads/master
Commit: ab4d8909ac85af87d8b8b443044c49a79f9e3ee4
Parents: 66294d3
Author: sunyerui <su...@gmail.com>
Authored: Sun Feb 28 21:02:25 2016 +0800
Committer: sunyerui <su...@gmail.com>
Committed: Sun Feb 28 21:12:28 2016 +0800

----------------------------------------------------------------------
 build/conf/kylin.properties                     |   4 +
 .../apache/kylin/common/KylinConfigBase.java    |   8 ++
 .../engine/mr/common/AbstractHadoopJob.java     |   2 +-
 .../kylin/engine/mr/common/BatchConstants.java  |   1 +
 .../mr/steps/RangeKeyDistributionJob.java       | 115 ----------------
 .../mr/steps/RangeKeyDistributionMapper.java    |  71 ----------
 .../mr/steps/RangeKeyDistributionReducer.java   | 100 --------------
 kylin-it/pom.xml                                |   3 +
 .../kylin/provision/BuildCubeWithEngine.java    |  41 ++++++
 .../kylin/storage/hbase/steps/CubeHFileJob.java |  37 ++++-
 .../kylin/storage/hbase/steps/HBaseMRSteps.java |   4 +-
 .../hbase/steps/RangeKeyDistributionJob.java    | 127 +++++++++++++++++
 .../hbase/steps/RangeKeyDistributionMapper.java |  76 +++++++++++
 .../steps/RangeKeyDistributionReducer.java      | 136 +++++++++++++++++++
 .../hbase/util/HBaseRegionSizeCalculator.java   |   8 ++
 .../steps/RangeKeyDistributionJobTest.java      |   1 -
 .../steps/RangeKeyDistributionMapperTest.java   |   1 -
 .../steps/RangeKeyDistributionReducerTest.java  |   1 -
 18 files changed, 443 insertions(+), 293 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/build/conf/kylin.properties
----------------------------------------------------------------------
diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
index 44a282e..b220b2d 100644
--- a/build/conf/kylin.properties
+++ b/build/conf/kylin.properties
@@ -65,6 +65,10 @@ kylin.hbase.region.cut.small=5
 kylin.hbase.region.cut.medium=10
 kylin.hbase.region.cut.large=50
 
+# The hfile size of GB, smaller hfile leading to the converting hfile MR has more reducers and be faster
+# set to 0 or comment this config to disable this optimization
+kylin.hbase.hfile.size.gb=5
+
 # Enable/disable ACL check for cube query
 kylin.query.security.enabled=true
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 7707684..3430e0b 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -407,6 +407,14 @@ public class KylinConfigBase implements Serializable {
         return Integer.parseInt(getOptional("kylin.hbase.region.count.max", "500"));
     }
 
+    public void setHBaseHFileSizeGB(int size) {
+        setProperty("kylin.hbase.hfile.size.gb", String.valueOf(size));
+    }
+
+    public int getHBaseHFileSizeGB() {
+        return Integer.parseInt(getOptional("kylin.hbase.hfile.size.gb", "0"));
+    }
+
     public int getScanThreshold() {
         return Integer.parseInt(getOptional("kylin.query.scan.threshold", "10000000"));
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
index 7615269..e4eee96 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
@@ -83,7 +83,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
     protected static final Option OPTION_INPUT_FORMAT = OptionBuilder.withArgName("inputformat").hasArg().isRequired(false).withDescription("Input format").create("inputformat");
     protected static final Option OPTION_OUTPUT_PATH = OptionBuilder.withArgName("path").hasArg().isRequired(true).withDescription("Output path").create("output");
     protected static final Option OPTION_NCUBOID_LEVEL = OptionBuilder.withArgName("level").hasArg().isRequired(true).withDescription("N-Cuboid build level, e.g. 1, 2, 3...").create("level");
-    protected static final Option OPTION_PARTITION_FILE_PATH = OptionBuilder.withArgName("path").hasArg().isRequired(true).withDescription("Partition file path.").create("input");
+    protected static final Option OPTION_PARTITION_FILE_PATH = OptionBuilder.withArgName("path").hasArg().isRequired(true).withDescription("Partition file path.").create("partitions");
     protected static final Option OPTION_HTABLE_NAME = OptionBuilder.withArgName("htable name").hasArg().isRequired(true).withDescription("HTable name").create("htablename");
 
     protected static final Option OPTION_STATISTICS_ENABLED = OptionBuilder.withArgName("statisticsenabled").hasArg().isRequired(false).withDescription("Statistics enabled").create("statisticsenabled");

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BatchConstants.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BatchConstants.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BatchConstants.java
index 400a3aa..6943f18 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BatchConstants.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BatchConstants.java
@@ -42,6 +42,7 @@ public interface BatchConstants {
     String REGION_NUMBER_MIN = "region.number.min";
     String REGION_NUMBER_MAX = "region.number.max";
     String REGION_SPLIT_SIZE = "region.split.size";
+    String HFILE_SIZE_GB = "hfile.size.gb";
     
     String CFG_KYLIN_LOCAL_TEMP_DIR = "/tmp/kylin/";
     String CFG_KYLIN_HDFS_TEMP_DIR = "/tmp/kylin/";

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionJob.java
deleted file mode 100644
index 5632fc1..0000000
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionJob.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.engine.mr.steps;
-
-import org.apache.commons.cli.Options;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.cube.CubeInstance;
-import org.apache.kylin.cube.CubeManager;
-import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
-import org.apache.kylin.engine.mr.common.BatchConstants;
-import org.apache.kylin.metadata.model.DataModelDesc;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * @author xjiang, ysong1
- * 
- */
-
-public class RangeKeyDistributionJob extends AbstractHadoopJob {
-    protected static final Logger logger = LoggerFactory.getLogger(RangeKeyDistributionJob.class);
-
-    /*
-     * (non-Javadoc)
-     * 
-     * @see org.apache.hadoop.util.Tool#run(java.lang.String[])
-     */
-    @Override
-    public int run(String[] args) throws Exception {
-        Options options = new Options();
-
-        try {
-            options.addOption(OPTION_INPUT_PATH);
-            options.addOption(OPTION_OUTPUT_PATH);
-            options.addOption(OPTION_JOB_NAME);
-            options.addOption(OPTION_CUBE_NAME);
-
-            parseOptions(options, args);
-
-            // start job
-            String jobName = getOptionValue(OPTION_JOB_NAME);
-            job = Job.getInstance(getConf(), jobName);
-
-            setJobClasspath(job);
-
-            addInputDirs(getOptionValue(OPTION_INPUT_PATH), job);
-
-            Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
-            FileOutputFormat.setOutputPath(job, output);
-            // job.getConfiguration().set("dfs.block.size", "67108864");
-
-            // Mapper
-            job.setInputFormatClass(SequenceFileInputFormat.class);
-            job.setMapperClass(RangeKeyDistributionMapper.class);
-            job.setMapOutputKeyClass(Text.class);
-            job.setMapOutputValueClass(LongWritable.class);
-
-            // Reducer - only one
-            job.setReducerClass(RangeKeyDistributionReducer.class);
-            job.setOutputFormatClass(SequenceFileOutputFormat.class);
-            job.setOutputKeyClass(Text.class);
-            job.setOutputValueClass(LongWritable.class);
-            job.setNumReduceTasks(1);
-
-            this.deletePath(job.getConfiguration(), output);
-
-            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
-            CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
-            CubeInstance cube = cubeMgr.getCube(cubeName);
-            DataModelDesc.RealizationCapacity cubeCapacity = cube.getDescriptor().getModel().getCapacity();
-            int regionSplitSize = KylinConfig.getInstanceFromEnv().getHBaseRegionCut(cubeCapacity.toString());
-            int maxRegionCount = KylinConfig.getInstanceFromEnv().getHBaseRegionCountMax();
-            int minRegionCount = KylinConfig.getInstanceFromEnv().getHBaseRegionCountMin();
-            
-            job.getConfiguration().set(BatchConstants.REGION_SPLIT_SIZE, String.valueOf(regionSplitSize));
-            job.getConfiguration().set(BatchConstants.REGION_NUMBER_MAX, String.valueOf(maxRegionCount));
-            job.getConfiguration().set(BatchConstants.REGION_NUMBER_MIN, String.valueOf(minRegionCount));
-            
-            return waitForCompletion(job);
-        } catch (Exception e) {
-            printUsage(options);
-            throw e;
-        }
-    }
-
-    public static void main(String[] args) throws Exception {
-        int exitCode = ToolRunner.run(new RangeKeyDistributionJob(), args);
-        System.exit(exitCode);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionMapper.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionMapper.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionMapper.java
deleted file mode 100644
index 47cbc95..0000000
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionMapper.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.engine.mr.steps;
-
-import java.io.IOException;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.kylin.engine.mr.KylinMapper;
-
-/**
- * @author ysong1
- * 
- */
-public class RangeKeyDistributionMapper extends KylinMapper<Text, Text, Text, LongWritable> {
-
-    private static final long ONE_MEGA_BYTES = 1L * 1024L * 1024L;
-
-    private LongWritable outputValue = new LongWritable(0);
-
-    private long bytesRead = 0;
-
-    private Text lastKey;
-
-    @Override
-    protected void setup(Context context) throws IOException {
-        super.bindCurrentConfiguration(context.getConfiguration());
-    }
-
-    @Override
-    public void map(Text key, Text value, Context context) throws IOException, InterruptedException {
-        lastKey = key;
-
-        int bytesLength = key.getLength() + value.getLength();
-        bytesRead += bytesLength;
-
-        if (bytesRead >= ONE_MEGA_BYTES) {
-            outputValue.set(bytesRead);
-            context.write(key, outputValue);
-
-            // reset bytesRead
-            bytesRead = 0;
-        }
-
-    }
-
-    @Override
-    protected void cleanup(Context context) throws IOException, InterruptedException {
-        if (lastKey != null) {
-            outputValue.set(bytesRead);
-            context.write(lastKey, outputValue);
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionReducer.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionReducer.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionReducer.java
deleted file mode 100644
index 68be74e..0000000
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionReducer.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.engine.mr.steps;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.util.StringUtils;
-import org.apache.kylin.engine.mr.KylinReducer;
-import org.apache.kylin.engine.mr.common.BatchConstants;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * @author ysong1
- * 
- */
-public class RangeKeyDistributionReducer extends KylinReducer<Text, LongWritable, Text, LongWritable> {
-
-    public static final long ONE_GIGA_BYTES = 1024L * 1024L * 1024L;
-    private static final Logger logger = LoggerFactory.getLogger(RangeKeyDistributionReducer.class);
-
-    private LongWritable outputValue = new LongWritable(0);
-
-    private int minRegionCount = 1;
-    private int maxRegionCount = 500;
-    private int cut = 10;
-    private long bytesRead = 0;
-    private List<Text> gbPoints = new ArrayList<Text>();
-
-    @Override
-    protected void setup(Context context) throws IOException {
-        super.bindCurrentConfiguration(context.getConfiguration());
-
-        if (context.getConfiguration().get(BatchConstants.REGION_SPLIT_SIZE) != null) {
-            cut = Integer.valueOf(context.getConfiguration().get(BatchConstants.REGION_SPLIT_SIZE));
-        }
-
-        if (context.getConfiguration().get(BatchConstants.REGION_NUMBER_MIN) != null) {
-            minRegionCount = Integer.valueOf(context.getConfiguration().get(BatchConstants.REGION_NUMBER_MIN));
-        }
-
-        if (context.getConfiguration().get(BatchConstants.REGION_NUMBER_MAX) != null) {
-            maxRegionCount = Integer.valueOf(context.getConfiguration().get(BatchConstants.REGION_NUMBER_MAX));
-        }
-
-        logger.info("Chosen cut for htable is " + cut + ", max region count=" + maxRegionCount + ", min region count =" + minRegionCount);
-    }
-
-    @Override
-    public void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
-        for (LongWritable v : values) {
-            bytesRead += v.get();
-        }
-
-        if (bytesRead >= ONE_GIGA_BYTES) {
-            gbPoints.add(new Text(key));
-            bytesRead = 0; // reset bytesRead
-        }
-    }
-
-    @Override
-    protected void cleanup(Context context) throws IOException, InterruptedException {
-        int nRegion = Math.round((float) gbPoints.size() / (float) cut);
-        nRegion = Math.max(minRegionCount, nRegion);
-        nRegion = Math.min(maxRegionCount, nRegion);
-
-        int gbPerRegion = gbPoints.size() / nRegion;
-        gbPerRegion = Math.max(1, gbPerRegion);
-
-        System.out.println(nRegion + " regions");
-        System.out.println(gbPerRegion + " GB per region");
-
-        for (int i = gbPerRegion; i < gbPoints.size(); i += gbPerRegion) {
-            Text key = gbPoints.get(i);
-            outputValue.set(i);
-            System.out.println(StringUtils.byteToHexString(key.getBytes()) + "\t" + outputValue.get());
-            context.write(key, outputValue);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/kylin-it/pom.xml
----------------------------------------------------------------------
diff --git a/kylin-it/pom.xml b/kylin-it/pom.xml
index 6cb44a5..99b650c 100644
--- a/kylin-it/pom.xml
+++ b/kylin-it/pom.xml
@@ -301,6 +301,7 @@
                                     <classpathScope>test</classpathScope>
                                     <executable>java</executable>
                                     <arguments>
+                                        <argument>-DuseSandbox=true</argument>
                                         <argument>-Dhdp.version=${hdp.version}</argument>
                                         <argument>-DfastBuildMode=${fastBuildMode}</argument>
                                         <argument>-classpath</argument>
@@ -321,6 +322,7 @@
                                     <classpathScope>test</classpathScope>
                                     <executable>java</executable>
                                     <arguments>
+                                        <argument>-DuseSandbox=true</argument>
                                         <argument>-Dhdp.version=${hdp.version}</argument>
                                         <argument>-DfastBuildMode=${fastBuildMode}</argument>
                                         <argument>-classpath</argument>
@@ -341,6 +343,7 @@
                                     <classpathScope>test</classpathScope>
                                     <executable>java</executable>
                                     <arguments>
+                                        <argument>-DuseSandbox=true</argument>
                                         <argument>-Dhdp.version=${hdp.version}</argument>
                                         <argument>-DfastBuildMode=${fastBuildMode}</argument>
                                         <argument>-classpath</argument>

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
index 28808df..cfefef3 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
@@ -23,6 +23,7 @@ import java.io.IOException;
 import java.lang.reflect.Method;
 import java.text.SimpleDateFormat;
 import java.util.List;
+import java.util.Map;
 import java.util.TimeZone;
 import java.util.concurrent.Callable;
 import java.util.concurrent.CountDownLatch;
@@ -36,11 +37,14 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.AbstractKylinTestCase;
 import org.apache.kylin.common.util.ClassUtil;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
+import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.cube.CubeSegment;
@@ -55,6 +59,8 @@ import org.apache.kylin.job.execution.DefaultChainedExecutable;
 import org.apache.kylin.job.execution.ExecutableState;
 import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
 import org.apache.kylin.job.manager.ExecutableManager;
+import org.apache.kylin.metadata.model.IEngineAware;
+import org.apache.kylin.storage.hbase.util.HBaseRegionSizeCalculator;
 import org.apache.kylin.storage.hbase.util.StorageCleanupJob;
 import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
 
@@ -143,8 +149,10 @@ public class BuildCubeWithEngine {
 
     public void build() throws Exception {
         DeployUtil.prepareTestDataForNormalCubes("test_kylin_cube_with_slr_left_join_empty");
+        KylinConfig.getInstanceFromEnv().setHBaseHFileSizeGB(1);
         testInner();
         testLeft();
+        KylinConfig.getInstanceFromEnv().setHBaseHFileSizeGB(0);
     }
 
     protected void waitForJob(String jobId) {
@@ -345,6 +353,9 @@ public class BuildCubeWithEngine {
         DefaultChainedExecutable job = EngineFactory.createBatchCubingJob(segment, "TEST");
         jobService.addJob(job);
         waitForJob(job.getId());
+        if (segment.getCubeDesc().getEngineType() == IEngineAware.ID_MR_V1) {
+            checkHFilesInHBase(segment);
+        }
         return job.getId();
     }
 
@@ -355,4 +366,34 @@ public class BuildCubeWithEngine {
         return exitCode;
     }
 
+    private void checkHFilesInHBase(CubeSegment segment) throws IOException {
+        Configuration conf = HBaseConfiguration.create(HadoopUtil.getCurrentConfiguration());
+        String tableName = segment.getStorageLocationIdentifier();
+        HTable table = new HTable(conf, tableName);
+        HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(table);
+        Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
+        long totalSize = 0;
+        for (Long size : sizeMap.values()) {
+            totalSize += size;
+        }
+        if (totalSize == 0) {
+            return;
+        }
+        Map<byte[], Pair<Integer, Integer>> countMap = cal.getRegionHFileCountMap();
+        // check if there's region contains more than one hfile, which means the hfile config take effects
+        boolean hasMultiHFileRegions = false;
+        for (Pair<Integer, Integer> count : countMap.values()) {
+            // check if hfile count is greater than store count
+            if (count.getSecond() > count.getFirst()) {
+                hasMultiHFileRegions = true;
+                break;
+            }
+        }
+        if (KylinConfig.getInstanceFromEnv().getHBaseHFileSizeGB() == 0 && hasMultiHFileRegions) {
+            throw new IOException("hfile size set to 0, but found region contains more than one hfiles");
+        } else if (KylinConfig.getInstanceFromEnv().getHBaseHFileSizeGB() > 0 && !hasMultiHFileRegions) {
+            throw new IOException("hfile size set greater than 0, but all regions still has only one hfile");
+        }
+    }
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java
index 1f0b1a0..a302daf 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java
@@ -18,17 +18,24 @@
 
 package org.apache.kylin.storage.hbase.steps;
 
+import java.io.IOException;
+
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat;
 import org.apache.hadoop.hbase.mapreduce.KeyValueSortReducer;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
+import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeInstance;
@@ -51,11 +58,14 @@ public class CubeHFileJob extends AbstractHadoopJob {
         try {
             options.addOption(OPTION_JOB_NAME);
             options.addOption(OPTION_CUBE_NAME);
+            options.addOption(OPTION_PARTITION_FILE_PATH);
             options.addOption(OPTION_INPUT_PATH);
             options.addOption(OPTION_OUTPUT_PATH);
             options.addOption(OPTION_HTABLE_NAME);
             parseOptions(options, args);
 
+            Path partitionFilePath = new Path(getOptionValue(OPTION_PARTITION_FILE_PATH));
+
             Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
             String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
 
@@ -82,8 +92,9 @@ public class CubeHFileJob extends AbstractHadoopJob {
             String tableName = getOptionValue(OPTION_HTABLE_NAME).toUpperCase();
             HTable htable = new HTable(conf, tableName);
 
-            //Automatic config !
+            // Automatic config !
             HFileOutputFormat.configureIncrementalLoad(job, htable);
+            reconfigurePartitions(conf, partitionFilePath);
 
             // set block replication to 3 for hfiles
             conf.set(DFSConfigKeys.DFS_REPLICATION_KEY, "3");
@@ -101,6 +112,30 @@ public class CubeHFileJob extends AbstractHadoopJob {
         }
     }
 
+    /**
+     * Check if there's partition files for hfile, if yes replace the table splits, to make the job more reducers
+     * @param conf the job configuration
+     * @param path the hfile partition file
+     * @throws IOException
+     */
+    @SuppressWarnings("deprecation")
+    private void reconfigurePartitions(Configuration conf, Path path) throws IOException {
+        FileSystem fs = path.getFileSystem(conf);
+        if (fs.exists(path)) {
+            try (SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, conf)) {
+                int partitionCount = 0;
+                Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
+                Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
+                while (reader.next(key, value)) {
+                    partitionCount++;
+                }
+                TotalOrderPartitioner.setPartitionFile(job.getConfiguration(), path);
+                // The reduce tasks should be one more than partition keys
+                job.setNumReduceTasks(partitionCount+1);
+            }
+        }
+    }
+
     public static void main(String[] args) throws Exception {
         int exitCode = ToolRunner.run(new CubeHFileJob(), args);
         System.exit(exitCode);

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
index c3bd7b5..2a21640 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
@@ -12,7 +12,6 @@ import org.apache.kylin.engine.mr.common.MapReduceExecutable;
 import org.apache.kylin.storage.hbase.ii.IIBulkLoadJob;
 import org.apache.kylin.storage.hbase.ii.IICreateHFileJob;
 import org.apache.kylin.storage.hbase.ii.IICreateHTableJob;
-import org.apache.kylin.engine.mr.steps.RangeKeyDistributionJob;
 import org.apache.kylin.job.constant.ExecutableConstants;
 import org.apache.kylin.job.execution.DefaultChainedExecutable;
 import org.apache.kylin.metadata.realization.IRealizationSegment;
@@ -72,7 +71,7 @@ public class HBaseMRSteps extends JobBuilderSupport {
         StringBuilder cmd = new StringBuilder();
         appendExecCmdParameters(cmd, "cubename", seg.getRealization().getName());
         appendExecCmdParameters(cmd, "segmentname", seg.getName());
-        appendExecCmdParameters(cmd, "input", getRowkeyDistributionOutputPath(jobId) + "/part-r-00000");
+        appendExecCmdParameters(cmd, "partitions", getRowkeyDistributionOutputPath(jobId) + "/part-r-00000");
         appendExecCmdParameters(cmd, "statisticsenabled", String.valueOf(withStats));
 
         createHtableStep.setJobParams(cmd.toString());
@@ -90,6 +89,7 @@ public class HBaseMRSteps extends JobBuilderSupport {
 
         appendMapReduceParameters(cmd, seg.getRealization().getDataModelDesc());
         appendExecCmdParameters(cmd, "cubename", seg.getRealization().getName());
+        appendExecCmdParameters(cmd, "partitions", getRowkeyDistributionOutputPath(jobId) + "/part-r-00000_hfile");
         appendExecCmdParameters(cmd, "input", inputPath);
         appendExecCmdParameters(cmd, "output", getHFilePath(jobId));
         appendExecCmdParameters(cmd, "htablename", seg.getStorageLocationIdentifier());

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJob.java
new file mode 100644
index 0000000..2ff7356
--- /dev/null
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJob.java
@@ -0,0 +1,127 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.storage.hbase.steps;
+
+import org.apache.commons.cli.Options;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
+import org.apache.kylin.engine.mr.common.BatchConstants;
+import org.apache.kylin.metadata.model.DataModelDesc;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @author xjiang, ysong1
+ * 
+ */
+
+public class RangeKeyDistributionJob extends AbstractHadoopJob {
+    protected static final Logger logger = LoggerFactory.getLogger(RangeKeyDistributionJob.class);
+
+    /*
+     * (non-Javadoc)
+     * 
+     * @see org.apache.hadoop.util.Tool#run(java.lang.String[])
+     */
+    @Override
+    public int run(String[] args) throws Exception {
+        Options options = new Options();
+
+        try {
+            options.addOption(OPTION_INPUT_PATH);
+            options.addOption(OPTION_OUTPUT_PATH);
+            options.addOption(OPTION_JOB_NAME);
+            options.addOption(OPTION_CUBE_NAME);
+
+            parseOptions(options, args);
+
+            // start job
+            String jobName = getOptionValue(OPTION_JOB_NAME);
+            job = Job.getInstance(getConf(), jobName);
+
+            setJobClasspath(job);
+
+            addInputDirs(getOptionValue(OPTION_INPUT_PATH), job);
+
+            Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
+            FileOutputFormat.setOutputPath(job, output);
+            // job.getConfiguration().set("dfs.block.size", "67108864");
+
+            // Mapper
+            job.setInputFormatClass(SequenceFileInputFormat.class);
+            job.setMapperClass(RangeKeyDistributionMapper.class);
+            job.setMapOutputKeyClass(Text.class);
+            job.setMapOutputValueClass(LongWritable.class);
+
+            // Reducer - only one
+            job.setReducerClass(RangeKeyDistributionReducer.class);
+            job.setOutputFormatClass(SequenceFileOutputFormat.class);
+            job.setOutputKeyClass(Text.class);
+            job.setOutputValueClass(LongWritable.class);
+            job.setNumReduceTasks(1);
+
+            this.deletePath(job.getConfiguration(), output);
+
+            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
+            CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
+            CubeInstance cube = cubeMgr.getCube(cubeName);
+            int hfileSizeGB = KylinConfig.getInstanceFromEnv().getHBaseHFileSizeGB();
+            DataModelDesc.RealizationCapacity cubeCapacity = cube.getDescriptor().getModel().getCapacity();
+            int regionSplitSize = KylinConfig.getInstanceFromEnv().getHBaseRegionCut(cubeCapacity.toString());
+            int maxRegionCount = KylinConfig.getInstanceFromEnv().getHBaseRegionCountMax();
+            int minRegionCount = KylinConfig.getInstanceFromEnv().getHBaseRegionCountMin();
+            job.getConfiguration().set(BatchConstants.OUTPUT_PATH, output.toString());
+            job.getConfiguration().set(BatchConstants.HFILE_SIZE_GB, String.valueOf(hfileSizeGB));
+            job.getConfiguration().set(BatchConstants.REGION_SPLIT_SIZE, String.valueOf(regionSplitSize));
+            job.getConfiguration().set(BatchConstants.REGION_NUMBER_MAX, String.valueOf(maxRegionCount));
+            job.getConfiguration().set(BatchConstants.REGION_NUMBER_MIN, String.valueOf(minRegionCount));
+            // The partition file for hfile is sequenece file consists of ImmutableBytesWritable and NullWritable
+            TableMapReduceUtil.addDependencyJars(job.getConfiguration(), ImmutableBytesWritable.class, NullWritable.class);
+
+            // Passed the sandbox property to mapper, to simulate large dataset
+            if (System.getProperty("useSandbox") != null && System.getProperty("useSandbox").equals("true")) {
+                job.getConfiguration().setBoolean("useSandbox", true);
+            }
+            
+            return waitForCompletion(job);
+        } catch (Exception e) {
+            printUsage(options);
+            throw e;
+        }
+    }
+
+    public static void main(String[] args) throws Exception {
+        int exitCode = ToolRunner.run(new RangeKeyDistributionJob(), args);
+        System.exit(exitCode);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapper.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapper.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapper.java
new file mode 100644
index 0000000..6f2d2bc
--- /dev/null
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapper.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.storage.hbase.steps;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.kylin.engine.mr.KylinMapper;
+
+/**
+ * @author ysong1
+ * 
+ */
+public class RangeKeyDistributionMapper extends KylinMapper<Text, Text, Text, LongWritable> {
+
+    private static final long ONE_MEGA_BYTES = 1L * 1024L * 1024L;
+
+    private LongWritable outputValue = new LongWritable(0);
+
+    private long bytesRead = 0;
+
+    private Text lastKey;
+
+    private Long scaleFactorForSandbox = 1L;
+
+    @Override
+    protected void setup(Context context) throws IOException {
+        super.bindCurrentConfiguration(context.getConfiguration());
+        if (context.getConfiguration().getBoolean("useSandbox", false)) {
+            scaleFactorForSandbox = 1024L;
+        }
+    }
+
+    @Override
+    public void map(Text key, Text value, Context context) throws IOException, InterruptedException {
+        lastKey = key;
+
+        int bytesLength = key.getLength() + value.getLength();
+        bytesRead += bytesLength;
+
+        if ((bytesRead * scaleFactorForSandbox) >= ONE_MEGA_BYTES) {
+            outputValue.set(bytesRead * scaleFactorForSandbox);
+            context.write(key, outputValue);
+
+            // reset bytesRead
+            bytesRead = 0;
+        }
+
+    }
+
+    @Override
+    protected void cleanup(Context context) throws IOException, InterruptedException {
+        if (lastKey != null) {
+            outputValue.set(bytesRead);
+            context.write(lastKey, outputValue);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducer.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducer.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducer.java
new file mode 100644
index 0000000..acdab62
--- /dev/null
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducer.java
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.storage.hbase.steps;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.kylin.engine.mr.KylinReducer;
+import org.apache.kylin.engine.mr.common.BatchConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @author ysong1
+ * 
+ */
+public class RangeKeyDistributionReducer extends KylinReducer<Text, LongWritable, Text, LongWritable> {
+
+    public static final long ONE_GIGA_BYTES = 1024L * 1024L * 1024L;
+    private static final Logger logger = LoggerFactory.getLogger(RangeKeyDistributionReducer.class);
+
+    private LongWritable outputValue = new LongWritable(0);
+
+    private int minRegionCount = 1;
+    private int maxRegionCount = 500;
+    private int cut = 10;
+    private int hfileSizeGB = 1;
+    private long bytesRead = 0;
+    private List<Text> gbPoints = new ArrayList<Text>();
+    private String output = null;
+
+    @Override
+    protected void setup(Context context) throws IOException {
+        super.bindCurrentConfiguration(context.getConfiguration());
+
+        if (context.getConfiguration().get(BatchConstants.OUTPUT_PATH) != null) {
+            output = context.getConfiguration().get(BatchConstants.OUTPUT_PATH);
+        }
+
+        if (context.getConfiguration().get(BatchConstants.HFILE_SIZE_GB) != null) {
+            hfileSizeGB = Integer.valueOf(context.getConfiguration().get(BatchConstants.HFILE_SIZE_GB));
+        }
+
+        if (context.getConfiguration().get(BatchConstants.REGION_SPLIT_SIZE) != null) {
+            cut = Integer.valueOf(context.getConfiguration().get(BatchConstants.REGION_SPLIT_SIZE));
+        }
+
+        if (context.getConfiguration().get(BatchConstants.REGION_NUMBER_MIN) != null) {
+            minRegionCount = Integer.valueOf(context.getConfiguration().get(BatchConstants.REGION_NUMBER_MIN));
+        }
+
+        if (context.getConfiguration().get(BatchConstants.REGION_NUMBER_MAX) != null) {
+            maxRegionCount = Integer.valueOf(context.getConfiguration().get(BatchConstants.REGION_NUMBER_MAX));
+        }
+
+        logger.info("Chosen cut for htable is " + cut + ", max region count=" + maxRegionCount
+            + ", min region count=" + minRegionCount + ", hfile size=" + hfileSizeGB);
+
+        // add empty key at position 0
+        gbPoints.add(new Text());
+    }
+
+    @Override
+    public void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
+        for (LongWritable v : values) {
+            bytesRead += v.get();
+        }
+
+        if (bytesRead >= ONE_GIGA_BYTES) {
+            gbPoints.add(new Text(key));
+            bytesRead = 0; // reset bytesRead
+        }
+    }
+
+    @Override
+    protected void cleanup(Context context) throws IOException, InterruptedException {
+        int nRegion = Math.round((float) gbPoints.size() / (float) cut);
+        nRegion = Math.max(minRegionCount, nRegion);
+        nRegion = Math.min(maxRegionCount, nRegion);
+
+        int gbPerRegion = gbPoints.size() / nRegion;
+        gbPerRegion = Math.max(1, gbPerRegion);
+
+        if (hfileSizeGB <= 0) {
+            hfileSizeGB = gbPerRegion;
+        }
+        int hfilePerRegion = gbPerRegion / hfileSizeGB;
+        hfilePerRegion = Math.max(1, hfilePerRegion);
+
+        System.out.println(nRegion + " regions");
+        System.out.println(gbPerRegion + " GB per region");
+        System.out.println(hfilePerRegion + " hfile per region");
+
+        Path hfilePartitionFile = new Path(output + "/part-r-00000_hfile");
+        try (SequenceFile.Writer hfilePartitionWriter = new SequenceFile.Writer(
+                hfilePartitionFile.getFileSystem(context.getConfiguration()),
+                context.getConfiguration(), hfilePartitionFile, ImmutableBytesWritable.class, NullWritable.class)) {
+            int hfileCountInOneRegion = 0;
+            for (int i = hfileSizeGB; i < gbPoints.size(); i += hfileSizeGB) {
+                hfilePartitionWriter.append(new ImmutableBytesWritable(gbPoints.get(i).getBytes()), NullWritable.get());
+                if (++hfileCountInOneRegion >= hfilePerRegion) {
+                    Text key = gbPoints.get(i);
+                    outputValue.set(i);
+                    System.out.println(StringUtils.byteToHexString(key.getBytes()) + "\t" + outputValue.get());
+                    context.write(key, outputValue);
+
+                    hfileCountInOneRegion = 0;
+                }
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
index ba0da00..346c3a2 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.kylin.common.util.Pair;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -49,6 +50,8 @@ public class HBaseRegionSizeCalculator {
      **/
     private final Map<byte[], Long> sizeMap = new TreeMap<byte[], Long>(Bytes.BYTES_COMPARATOR);
 
+    private final Map<byte[], Pair<Integer, Integer>> countMap = new TreeMap<>(Bytes.BYTES_COMPARATOR);
+
     static final String ENABLE_REGIONSIZECALCULATOR = "hbase.regionsizecalculator.enable";
 
     /**
@@ -93,6 +96,7 @@ public class HBaseRegionSizeCalculator {
 
                         long regionSizeBytes = regionLoad.getStorefileSizeMB() * megaByte;
                         sizeMap.put(regionId, regionSizeBytes);
+                        countMap.put(regionId, new Pair<>(regionLoad.getStores(), regionLoad.getStorefiles()));
 
                         // logger.info("Region " + regionLoad.getNameAsString()
                         // + " has size " + regionSizeBytes);
@@ -125,4 +129,8 @@ public class HBaseRegionSizeCalculator {
     public Map<byte[], Long> getRegionSizeMap() {
         return Collections.unmodifiableMap(sizeMap);
     }
+
+    public Map<byte[], Pair<Integer, Integer>> getRegionHFileCountMap() {
+        return Collections.unmodifiableMap(countMap);
+    }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJobTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJobTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJobTest.java
index 7f5b24b..70e1ac7 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJobTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJobTest.java
@@ -27,7 +27,6 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
 import org.apache.kylin.engine.mr.HadoopUtil;
-import org.apache.kylin.engine.mr.steps.RangeKeyDistributionJob;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapperTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapperTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapperTest.java
index ca716c3..03a3cba 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapperTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapperTest.java
@@ -29,7 +29,6 @@ import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mrunit.mapreduce.MapDriver;
 import org.apache.hadoop.mrunit.types.Pair;
-import org.apache.kylin.engine.mr.steps.RangeKeyDistributionMapper;
 import org.junit.Before;
 import org.junit.Test;
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducerTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducerTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducerTest.java
index cbf0657..c027c40 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducerTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducerTest.java
@@ -24,7 +24,6 @@ import java.io.IOException;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mrunit.mapreduce.ReduceDriver;
-import org.apache.kylin.engine.mr.steps.RangeKeyDistributionReducer;
 import org.junit.Before;
 import org.junit.Test;
 


[06/50] [abbrv] kylin git commit: KYLIN-1453 cuboid sharding based on specific column

Posted by li...@apache.org.
KYLIN-1453 cuboid sharding based on specific column


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/294fc707
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/294fc707
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/294fc707

Branch: refs/heads/master
Commit: 294fc70785ab009e6b4d8a12cbeb609d46f89a93
Parents: 3f5074e
Author: honma <ho...@ebay.com>
Authored: Thu Feb 25 19:02:28 2016 +0800
Committer: honma <ho...@ebay.com>
Committed: Fri Feb 26 17:54:37 2016 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/common/util/BytesUtil.java |  18 +-
 .../dict/TupleFilterFunctionTransformer.java    | 170 +++++++++++++++++++
 .../dict/TupleFilterFunctionTranslator.java     | 166 ------------------
 .../kylin/metadata/filter/CaseTupleFilter.java  |   7 +-
 .../metadata/filter/ColumnTupleFilter.java      |  11 +-
 .../metadata/filter/CompareTupleFilter.java     |  10 +-
 .../metadata/filter/ConstantTupleFilter.java    |  16 +-
 .../metadata/filter/DynamicTupleFilter.java     |   9 +-
 .../metadata/filter/ExtractTupleFilter.java     |   7 +-
 .../metadata/filter/FunctionTupleFilter.java    |  16 +-
 .../filter/ITupleFilterTransformer.java         |  23 +++
 .../metadata/filter/ITupleFilterTranslator.java |  26 ---
 .../metadata/filter/LogicalTupleFilter.java     |  10 +-
 .../kylin/metadata/filter/TupleFilter.java      |   5 +-
 .../metadata/filter/TupleFilterSerializer.java  |  25 ++-
 .../common/coprocessor/FilterDecorator.java     |   8 +-
 .../hbase/cube/v2/CubeSegmentScanner.java       |  10 +-
 .../common/coprocessor/FilterBaseTest.java      |  46 ++++-
 .../common/coprocessor/FilterEvaluateTest.java  |   4 +-
 .../common/coprocessor/FilterSerializeTest.java |  26 ++-
 20 files changed, 325 insertions(+), 288 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/294fc707/core-common/src/main/java/org/apache/kylin/common/util/BytesUtil.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/BytesUtil.java b/core-common/src/main/java/org/apache/kylin/common/util/BytesUtil.java
index e01ce4f..4e0701c 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/BytesUtil.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/BytesUtil.java
@@ -57,7 +57,6 @@ public class BytesUtil {
         return integer;
     }
 
-
     public static long readLong(ByteBuffer buffer, int size) {
         long integer = 0;
         for (int i = 0; i < size; i++) {
@@ -133,11 +132,15 @@ public class BytesUtil {
     // from WritableUtils
     // ============================================================================
 
+
     public static void writeVInt(int i, ByteBuffer out) {
+
         writeVLong(i, out);
+
     }
 
     public static void writeVLong(long i, ByteBuffer out) {
+
         if (i >= -112 && i <= 127) {
             out.put((byte) i);
             return;
@@ -203,6 +206,8 @@ public class BytesUtil {
     }
 
     public static void writeUnsigned(int num, int size, ByteBuffer out) {
+
+
         int mask = 0xff << ((size - 1) * 8);
         for (int i = size; i > 0; i--) {
             int v = (num & mask) >> (i - 1) * 8;
@@ -222,6 +227,7 @@ public class BytesUtil {
     }
 
     public static void writeLong(long num, ByteBuffer out) {
+
         for (int i = 0; i < 8; i++) {
             out.put((byte) num);
             num >>>= 8;
@@ -257,6 +263,8 @@ public class BytesUtil {
         }
         int len = str.length();
         BytesUtil.writeVInt(len, out);
+
+
         for (int i = 0; i < len; i++) {
             out.put((byte) str.charAt(i));
         }
@@ -335,7 +343,7 @@ public class BytesUtil {
             writeVInt(-1, out);
             return;
         }
-        writeVInt(array.length, out);
+        writeVInt(length, out);
         out.put(array, offset, length);
     }
 
@@ -348,7 +356,7 @@ public class BytesUtil {
         in.get(array);
         return array;
     }
-
+    
     public static int peekByteArrayLength(ByteBuffer in) {
         int start = in.position();
         int arrayLen = readVInt(in);
@@ -369,6 +377,7 @@ public class BytesUtil {
         writeVInt(array.length, out);
         byte b_true = (byte) 1;
         byte b_false = (byte) 0;
+
         for (int i = 0; i < array.length; i++) {
             if (array[i])
                 out.put(b_true);
@@ -428,7 +437,4 @@ public class BytesUtil {
         return sb.toString();
     }
 
-    public static void main(String[] args) throws Exception {
-    }
-
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/294fc707/core-dictionary/src/main/java/org/apache/kylin/dict/TupleFilterFunctionTransformer.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/TupleFilterFunctionTransformer.java b/core-dictionary/src/main/java/org/apache/kylin/dict/TupleFilterFunctionTransformer.java
new file mode 100644
index 0000000..096a28d
--- /dev/null
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/TupleFilterFunctionTransformer.java
@@ -0,0 +1,170 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.dict;
+
+import java.util.Collection;
+import java.util.ListIterator;
+
+import org.apache.kylin.common.util.Dictionary;
+import org.apache.kylin.metadata.filter.ColumnTupleFilter;
+import org.apache.kylin.metadata.filter.CompareTupleFilter;
+import org.apache.kylin.metadata.filter.ConstantTupleFilter;
+import org.apache.kylin.metadata.filter.FunctionTupleFilter;
+import org.apache.kylin.metadata.filter.ITupleFilterTransformer;
+import org.apache.kylin.metadata.filter.LogicalTupleFilter;
+import org.apache.kylin.metadata.filter.TupleFilter;
+import org.apache.kylin.metadata.filter.TupleFilter.FilterOperatorEnum;
+import org.apache.kylin.metadata.model.TblColRef;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Lists;
+import com.google.common.primitives.Primitives;
+
+/**
+ * only take effect when the compare filter has function
+ */
+public class TupleFilterFunctionTransformer implements ITupleFilterTransformer {
+    public static final Logger logger = LoggerFactory.getLogger(TupleFilterFunctionTransformer.class);
+
+    private IDictionaryAware dictionaryAware;
+
+    public TupleFilterFunctionTransformer(IDictionaryAware dictionaryAware) {
+        this.dictionaryAware = dictionaryAware;
+    }
+
+    @Override
+    public TupleFilter transform(TupleFilter tupleFilter) {
+        TupleFilter translated = null;
+        if (tupleFilter instanceof CompareTupleFilter) {
+            translated = translateCompareTupleFilter((CompareTupleFilter) tupleFilter);
+            if (translated != null) {
+                logger.info("Translated {" + tupleFilter + "} to IN clause: {" + translated + "}");
+            }
+        } else if (tupleFilter instanceof FunctionTupleFilter) {
+            translated = translateFunctionTupleFilter((FunctionTupleFilter) tupleFilter);
+            if (translated != null) {
+                logger.info("Translated {" + tupleFilter + "} to IN clause: {" + translated + "}");
+            }
+        } else if (tupleFilter instanceof LogicalTupleFilter) {
+            ListIterator<TupleFilter> childIterator = (ListIterator<TupleFilter>) tupleFilter.getChildren().listIterator();
+            while (childIterator.hasNext()) {
+                TupleFilter transformed = transform(childIterator.next());
+                if (transformed != null)
+                    childIterator.set(transformed);
+            }
+        }
+        return translated == null ? tupleFilter : translated;
+    }
+
+    private TupleFilter translateFunctionTupleFilter(FunctionTupleFilter functionTupleFilter) {
+        if (!functionTupleFilter.isValid())
+            return null;
+
+        TblColRef columnRef = functionTupleFilter.getColumn();
+        Dictionary<?> dict = dictionaryAware.getDictionary(columnRef);
+        if (dict == null)
+            return null;
+
+        CompareTupleFilter translated = new CompareTupleFilter(FilterOperatorEnum.IN);
+        translated.addChild(new ColumnTupleFilter(columnRef));
+
+        try {
+            for (int i = dict.getMinId(); i <= dict.getMaxId(); i++) {
+                Object dictVal = dict.getValueFromId(i);
+                if ((Boolean) functionTupleFilter.invokeFunction(dictVal)) {
+                    translated.addChild(new ConstantTupleFilter(dictVal));
+                }
+            }
+        } catch (Exception e) {
+            logger.debug(e.getMessage());
+            return null;
+        }
+        return translated;
+    }
+
+    @SuppressWarnings("unchecked")
+    private TupleFilter translateCompareTupleFilter(CompareTupleFilter compTupleFilter) {
+        if (compTupleFilter.getFunction() == null)
+            return null;
+
+        FunctionTupleFilter functionTupleFilter = compTupleFilter.getFunction();
+        if (!functionTupleFilter.isValid())
+            return null;
+
+        TblColRef columnRef = functionTupleFilter.getColumn();
+        Dictionary<?> dict = dictionaryAware.getDictionary(columnRef);
+        if (dict == null)
+            return null;
+
+        CompareTupleFilter translated = new CompareTupleFilter(FilterOperatorEnum.IN);
+        translated.addChild(new ColumnTupleFilter(columnRef));
+
+        try {
+            Collection<Object> inValues = Lists.newArrayList();
+            for (int i = dict.getMinId(); i <= dict.getMaxId(); i++) {
+                Object dictVal = dict.getValueFromId(i);
+                Object computedVal = functionTupleFilter.invokeFunction(dictVal);
+                Class clazz = Primitives.wrap(computedVal.getClass());
+                Object targetVal = compTupleFilter.getFirstValue();
+                if (Primitives.isWrapperType(clazz))
+                    targetVal = clazz.cast(clazz.getDeclaredMethod("valueOf", String.class).invoke(null, compTupleFilter.getFirstValue()));
+
+                int comp = ((Comparable) computedVal).compareTo(targetVal);
+                boolean compResult = false;
+                switch (compTupleFilter.getOperator()) {
+                case EQ:
+                    compResult = comp == 0;
+                    break;
+                case NEQ:
+                    compResult = comp != 0;
+                    break;
+                case LT:
+                    compResult = comp < 0;
+                    break;
+                case LTE:
+                    compResult = comp <= 0;
+                    break;
+                case GT:
+                    compResult = comp > 0;
+                    break;
+                case GTE:
+                    compResult = comp >= 0;
+                    break;
+                case IN:
+                    compResult = compTupleFilter.getValues().contains(computedVal.toString());
+                    break;
+                case NOTIN:
+                    compResult = !compTupleFilter.getValues().contains(computedVal.toString());
+                    break;
+                default:
+                    break;
+                }
+                if (compResult) {
+                    inValues.add(dictVal);
+                }
+            }
+            translated.addChild(new ConstantTupleFilter(inValues));
+        } catch (Exception e) {
+            logger.debug(e.getMessage());
+            return null;
+        }
+        return translated;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/294fc707/core-dictionary/src/main/java/org/apache/kylin/dict/TupleFilterFunctionTranslator.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/TupleFilterFunctionTranslator.java b/core-dictionary/src/main/java/org/apache/kylin/dict/TupleFilterFunctionTranslator.java
deleted file mode 100644
index 1c96dd4..0000000
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/TupleFilterFunctionTranslator.java
+++ /dev/null
@@ -1,166 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.dict;
-
-import java.util.ListIterator;
-
-import org.apache.kylin.common.util.Dictionary;
-import org.apache.kylin.metadata.filter.ColumnTupleFilter;
-import org.apache.kylin.metadata.filter.CompareTupleFilter;
-import org.apache.kylin.metadata.filter.ConstantTupleFilter;
-import org.apache.kylin.metadata.filter.FunctionTupleFilter;
-import org.apache.kylin.metadata.filter.ITupleFilterTranslator;
-import org.apache.kylin.metadata.filter.LogicalTupleFilter;
-import org.apache.kylin.metadata.filter.TupleFilter;
-import org.apache.kylin.metadata.filter.TupleFilter.FilterOperatorEnum;
-import org.apache.kylin.metadata.model.TblColRef;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.primitives.Primitives;
-
-/**
- * only take effect when the compare filter has function
- */
-public class TupleFilterFunctionTranslator implements ITupleFilterTranslator {
-    public static final Logger logger = LoggerFactory.getLogger(TupleFilterFunctionTranslator.class);
-
-    private IDictionaryAware dictionaryAware;
-
-    public TupleFilterFunctionTranslator(IDictionaryAware dictionaryAware) {
-        this.dictionaryAware = dictionaryAware;
-    }
-
-    @Override
-    public TupleFilter translate(TupleFilter tupleFilter) {
-        TupleFilter translated = null;
-        if (tupleFilter instanceof CompareTupleFilter) {
-            translated = translateCompareTupleFilter((CompareTupleFilter) tupleFilter);
-            if (translated != null) {
-                logger.info("Translated {" + tupleFilter + "} to IN clause: {" + translated + "}");
-            }
-        } else if (tupleFilter instanceof FunctionTupleFilter) {
-            translated = translateFunctionTupleFilter((FunctionTupleFilter) tupleFilter);
-            if (translated != null) {
-                logger.info("Translated {" + tupleFilter + "} to IN clause: {" + translated + "}");
-            }
-        } else if (tupleFilter instanceof LogicalTupleFilter) {
-            ListIterator<TupleFilter> childIterator = (ListIterator<TupleFilter>) tupleFilter.getChildren().listIterator();
-            while (childIterator.hasNext()) {
-                TupleFilter tempTranslated = translate(childIterator.next());
-                if (tempTranslated != null)
-                    childIterator.set(tempTranslated);
-            }
-        }
-        return translated == null ? tupleFilter : translated;
-    }
-
-    private TupleFilter translateFunctionTupleFilter(FunctionTupleFilter functionTupleFilter) {
-        if (!functionTupleFilter.isValid())
-            return null;
-
-        TblColRef columnRef = functionTupleFilter.getColumn();
-        Dictionary<?> dict = dictionaryAware.getDictionary(columnRef);
-        if (dict == null)
-            return null;
-
-        CompareTupleFilter translated = new CompareTupleFilter(FilterOperatorEnum.IN);
-        translated.addChild(new ColumnTupleFilter(columnRef));
-
-        try {
-            for (int i = dict.getMinId(); i <= dict.getMaxId(); i++) {
-                Object dictVal = dict.getValueFromId(i);
-                if ((Boolean) functionTupleFilter.invokeFunction(dictVal)) {
-                    translated.addChild(new ConstantTupleFilter(dictVal));
-                }
-            }
-        } catch (Exception e) {
-            logger.debug(e.getMessage());
-            return null;
-        }
-        return translated;
-    }
-
-    @SuppressWarnings("unchecked")
-    private TupleFilter translateCompareTupleFilter(CompareTupleFilter compTupleFilter) {
-        if (compTupleFilter.getFunction() == null)
-            return null;
-
-        FunctionTupleFilter functionTupleFilter = compTupleFilter.getFunction();
-        if (!functionTupleFilter.isValid())
-            return null;
-
-        TblColRef columnRef = functionTupleFilter.getColumn();
-        Dictionary<?> dict = dictionaryAware.getDictionary(columnRef);
-        if (dict == null)
-            return null;
-
-        CompareTupleFilter translated = new CompareTupleFilter(FilterOperatorEnum.IN);
-        translated.addChild(new ColumnTupleFilter(columnRef));
-
-        try {
-            for (int i = dict.getMinId(); i <= dict.getMaxId(); i++) {
-                Object dictVal = dict.getValueFromId(i);
-                Object computedVal = functionTupleFilter.invokeFunction(dictVal);
-                Class clazz = Primitives.wrap(computedVal.getClass());
-                Object targetVal = compTupleFilter.getFirstValue();
-                if (Primitives.isWrapperType(clazz))
-                    targetVal = clazz.cast(clazz.getDeclaredMethod("valueOf", String.class).invoke(null, compTupleFilter.getFirstValue()));
-
-                int comp = ((Comparable) computedVal).compareTo(targetVal);
-                boolean compResult = false;
-                switch (compTupleFilter.getOperator()) {
-                case EQ:
-                    compResult = comp == 0;
-                    break;
-                case NEQ:
-                    compResult = comp != 0;
-                    break;
-                case LT:
-                    compResult = comp < 0;
-                    break;
-                case LTE:
-                    compResult = comp <= 0;
-                    break;
-                case GT:
-                    compResult = comp > 0;
-                    break;
-                case GTE:
-                    compResult = comp >= 0;
-                    break;
-                case IN:
-                    compResult = compTupleFilter.getValues().contains(computedVal.toString());
-                    break;
-                case NOTIN:
-                    compResult = !compTupleFilter.getValues().contains(computedVal.toString());
-                    break;
-                default:
-                    break;
-                }
-                if (compResult) {
-                    translated.addChild(new ConstantTupleFilter(dictVal));
-                }
-            }
-        } catch (Exception e) {
-            logger.debug(e.getMessage());
-            return null;
-        }
-        return translated;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/294fc707/core-metadata/src/main/java/org/apache/kylin/metadata/filter/CaseTupleFilter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/CaseTupleFilter.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/CaseTupleFilter.java
index 6dbc614..2b00d69 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/CaseTupleFilter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/CaseTupleFilter.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.metadata.filter;
 
+import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -101,12 +102,12 @@ public class CaseTupleFilter extends TupleFilter {
     }
 
     @Override
-    public byte[] serialize(IFilterCodeSystem<?> cs) {
-        return new byte[0];
+    public void serialize(IFilterCodeSystem<?> cs, ByteBuffer buffer) {
+        //serialize nothing
     }
 
     @Override
-    public void deserialize(byte[] bytes, IFilterCodeSystem<?> cs) {
+    public void deserialize(IFilterCodeSystem<?> cs, ByteBuffer buffer) {
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/294fc707/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ColumnTupleFilter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ColumnTupleFilter.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ColumnTupleFilter.java
index 0d2a73d..029233d 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ColumnTupleFilter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ColumnTupleFilter.java
@@ -84,8 +84,7 @@ public class ColumnTupleFilter extends TupleFilter {
     }
 
     @Override
-    public byte[] serialize(IFilterCodeSystem<?> cs) {
-        ByteBuffer buffer = ByteBuffer.allocate(BUFFER_SIZE);
+    public void serialize(IFilterCodeSystem<?> cs,ByteBuffer buffer) {
         String table = columnRef.getTable();
         BytesUtil.writeUTFString(table, buffer);
 
@@ -97,17 +96,13 @@ public class ColumnTupleFilter extends TupleFilter {
 
         String dataType = columnRef.getDatatype();
         BytesUtil.writeUTFString(dataType, buffer);
-
-        byte[] result = new byte[buffer.position()];
-        System.arraycopy(buffer.array(), 0, result, 0, buffer.position());
-        return result;
     }
 
     @Override
-    public void deserialize(byte[] bytes, IFilterCodeSystem<?> cs) {
+    public void deserialize(IFilterCodeSystem<?> cs, ByteBuffer buffer) {
+
         TableDesc table = null;
         ColumnDesc column = new ColumnDesc();
-        ByteBuffer buffer = ByteBuffer.wrap(bytes);
 
         String tableName = BytesUtil.readUTFString(buffer);
         if (tableName != null) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/294fc707/core-metadata/src/main/java/org/apache/kylin/metadata/filter/CompareTupleFilter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/CompareTupleFilter.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/CompareTupleFilter.java
index 248ab3b..fc0bab7 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/CompareTupleFilter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/CompareTupleFilter.java
@@ -214,23 +214,19 @@ public class CompareTupleFilter extends TupleFilter {
 
     @SuppressWarnings({ "unchecked", "rawtypes" })
     @Override
-    public byte[] serialize(IFilterCodeSystem cs) {
-        ByteBuffer buffer = ByteBuffer.allocate(BUFFER_SIZE);
+    public void serialize(IFilterCodeSystem cs, ByteBuffer buffer) {
         int size = this.dynamicVariables.size();
         BytesUtil.writeVInt(size, buffer);
         for (Map.Entry<String, Object> entry : this.dynamicVariables.entrySet()) {
             BytesUtil.writeUTFString(entry.getKey(), buffer);
             cs.serialize(entry.getValue(), buffer);
         }
-        byte[] result = new byte[buffer.position()];
-        System.arraycopy(buffer.array(), 0, result, 0, buffer.position());
-        return result;
     }
 
     @Override
-    public void deserialize(byte[] bytes, IFilterCodeSystem<?> cs) {
+    public void deserialize(IFilterCodeSystem<?> cs, ByteBuffer buffer) {
+
         this.dynamicVariables.clear();
-        ByteBuffer buffer = ByteBuffer.wrap(bytes);
         int size = BytesUtil.readVInt(buffer);
         for (int i = 0; i < size; i++) {
             String name = BytesUtil.readUTFString(buffer);

http://git-wip-us.apache.org/repos/asf/kylin/blob/294fc707/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ConstantTupleFilter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ConstantTupleFilter.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ConstantTupleFilter.java
index 3056a9c..db3eb4f 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ConstantTupleFilter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ConstantTupleFilter.java
@@ -21,8 +21,10 @@ package org.apache.kylin.metadata.filter;
 import java.nio.ByteBuffer;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.HashSet;
+import java.util.TreeSet;
 
+import com.google.common.collect.Lists;
+import org.apache.commons.collections.comparators.NullComparator;
 import org.apache.kylin.common.util.BytesUtil;
 import org.apache.kylin.metadata.tuple.IEvaluatableTuple;
 
@@ -40,7 +42,7 @@ public class ConstantTupleFilter extends TupleFilter {
 
     public ConstantTupleFilter() {
         super(Collections.<TupleFilter> emptyList(), FilterOperatorEnum.CONSTANT);
-        this.constantValues = new HashSet<Object>();
+        this.constantValues = Lists.newArrayList();
     }
 
     public ConstantTupleFilter(Object value) {
@@ -89,22 +91,18 @@ public class ConstantTupleFilter extends TupleFilter {
 
     @SuppressWarnings({ "unchecked", "rawtypes" })
     @Override
-    public byte[] serialize(IFilterCodeSystem cs) {
-        ByteBuffer buffer = ByteBuffer.allocate(BUFFER_SIZE);
+    public void serialize(IFilterCodeSystem cs, ByteBuffer buffer) {
         int size = this.constantValues.size();
         BytesUtil.writeVInt(size, buffer);
         for (Object val : this.constantValues) {
             cs.serialize(val, buffer);
         }
-        byte[] result = new byte[buffer.position()];
-        System.arraycopy(buffer.array(), 0, result, 0, buffer.position());
-        return result;
     }
 
     @Override
-    public void deserialize(byte[] bytes, IFilterCodeSystem<?> cs) {
+    public void deserialize(IFilterCodeSystem<?> cs, ByteBuffer buffer) {
+
         this.constantValues.clear();
-        ByteBuffer buffer = ByteBuffer.wrap(bytes);
         int size = BytesUtil.readVInt(buffer);
         for (int i = 0; i < size; i++) {
             this.constantValues.add(cs.deserialize(buffer));

http://git-wip-us.apache.org/repos/asf/kylin/blob/294fc707/core-metadata/src/main/java/org/apache/kylin/metadata/filter/DynamicTupleFilter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/DynamicTupleFilter.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/DynamicTupleFilter.java
index a482519..d9dc52a 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/DynamicTupleFilter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/DynamicTupleFilter.java
@@ -69,17 +69,12 @@ public class DynamicTupleFilter extends TupleFilter {
     }
 
     @Override
-    public byte[] serialize(IFilterCodeSystem<?> cs) {
-        ByteBuffer buffer = ByteBuffer.allocate(BUFFER_SIZE);
+    public void serialize(IFilterCodeSystem<?> cs, ByteBuffer buffer) {
         BytesUtil.writeUTFString(variableName, buffer);
-        byte[] result = new byte[buffer.position()];
-        System.arraycopy(buffer.array(), 0, result, 0, buffer.position());
-        return result;
     }
 
     @Override
-    public void deserialize(byte[] bytes, IFilterCodeSystem<?> cs) {
-        ByteBuffer buffer = ByteBuffer.wrap(bytes);
+    public void deserialize(IFilterCodeSystem<?> cs, ByteBuffer buffer) {
         this.variableName = BytesUtil.readUTFString(buffer);
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/294fc707/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ExtractTupleFilter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ExtractTupleFilter.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ExtractTupleFilter.java
index 6f7dfaf..591e64b 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ExtractTupleFilter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ExtractTupleFilter.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.metadata.filter;
 
+import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
@@ -113,12 +114,12 @@ public class ExtractTupleFilter extends TupleFilter {
     }
 
     @Override
-    public byte[] serialize(IFilterCodeSystem<?> cs) {
-        return new byte[0];
+    public void serialize(IFilterCodeSystem<?> cs, ByteBuffer buffer) {
+        //do nothing
     }
 
     @Override
-    public void deserialize(byte[] bytes, IFilterCodeSystem<?> cs) {
+    public void deserialize(IFilterCodeSystem<?> cs, ByteBuffer buffer) {
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/294fc707/core-metadata/src/main/java/org/apache/kylin/metadata/filter/FunctionTupleFilter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/FunctionTupleFilter.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/FunctionTupleFilter.java
index 30bef97..2a08728 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/FunctionTupleFilter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/FunctionTupleFilter.java
@@ -123,32 +123,20 @@ public class FunctionTupleFilter extends TupleFilter {
     }
 
     @Override
-    byte[] serialize(IFilterCodeSystem<?> cs) {
-        ByteBuffer buffer = ByteBuffer.allocate(BUFFER_SIZE);
+    void serialize(IFilterCodeSystem<?> cs, ByteBuffer buffer) {
         BytesUtil.writeUTFString(name, buffer);
         BytesUtil.writeVInt(colPosition, buffer);
         BytesUtil.writeVInt(isValid ? 1 : 0, buffer);
-        BytesUtil.writeByteArray(TupleFilterSerializer.serialize(columnContainerFilter, cs), buffer);
-
-        byte[] result = new byte[buffer.position()];
-        System.arraycopy(buffer.array(), 0, result, 0, buffer.position());
-        return result;
     }
 
     @Override
-    void deserialize(byte[] bytes, IFilterCodeSystem<?> cs) {
-        ByteBuffer buffer = ByteBuffer.wrap(bytes);
+    public void deserialize(IFilterCodeSystem<?> cs, ByteBuffer buffer) {
 
         this.name = BytesUtil.readUTFString(buffer);
         this.initMethod();
 
         this.colPosition = BytesUtil.readVInt(buffer);
         this.isValid = BytesUtil.readVInt(buffer) == 1;
-
-        byte[] columnFilter = BytesUtil.readByteArray(buffer);
-        if (columnFilter != null) {
-            this.columnContainerFilter = TupleFilterSerializer.deserialize(columnFilter, cs);
-        }
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/294fc707/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ITupleFilterTransformer.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ITupleFilterTransformer.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ITupleFilterTransformer.java
new file mode 100644
index 0000000..d3d5076
--- /dev/null
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ITupleFilterTransformer.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.metadata.filter;
+
+public interface ITupleFilterTransformer {
+    TupleFilter transform(TupleFilter tupleFilter);
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/294fc707/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ITupleFilterTranslator.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ITupleFilterTranslator.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ITupleFilterTranslator.java
deleted file mode 100644
index aed284c..0000000
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ITupleFilterTranslator.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.metadata.filter;
-
-/**
- * Created by dongli on 1/7/16.
- */
-public interface ITupleFilterTranslator {
-    TupleFilter translate(TupleFilter tupleFilter);
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/294fc707/core-metadata/src/main/java/org/apache/kylin/metadata/filter/LogicalTupleFilter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/LogicalTupleFilter.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/LogicalTupleFilter.java
index 0929cf1..61657fb 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/LogicalTupleFilter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/LogicalTupleFilter.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.metadata.filter;
 
+import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -46,7 +47,6 @@ public class LogicalTupleFilter extends TupleFilter {
         return cloneTuple;
     }
 
-
     //    private TupleFilter reverseNestedNots(TupleFilter filter, int depth) {
     //        if ((filter instanceof LogicalTupleFilter) && (filter.operator == FilterOperatorEnum.NOT)) {
     //            assert (filter.children.size() == 1);
@@ -60,7 +60,6 @@ public class LogicalTupleFilter extends TupleFilter {
     //        }
     //    }
 
-
     @Override
     public TupleFilter reverse() {
         switch (operator) {
@@ -151,12 +150,13 @@ public class LogicalTupleFilter extends TupleFilter {
     }
 
     @Override
-    public byte[] serialize(IFilterCodeSystem<?> cs) {
-        return new byte[0];
+    public void serialize(IFilterCodeSystem<?> cs, ByteBuffer buffer) {
+        //do nothing
     }
 
     @Override
-    public void deserialize(byte[] bytes, IFilterCodeSystem<?> cs) {
+    public void deserialize(IFilterCodeSystem<?> cs, ByteBuffer buffer) {
+
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/294fc707/core-metadata/src/main/java/org/apache/kylin/metadata/filter/TupleFilter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/TupleFilter.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/TupleFilter.java
index 944ddd0..1e23499 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/TupleFilter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/TupleFilter.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.metadata.filter;
 
+import java.nio.ByteBuffer;
 import java.util.Collection;
 import java.util.LinkedList;
 import java.util.List;
@@ -204,9 +205,9 @@ public abstract class TupleFilter {
 
     public abstract Collection<?> getValues();
 
-    abstract byte[] serialize(IFilterCodeSystem<?> cs);
+    abstract void serialize(IFilterCodeSystem<?> cs,ByteBuffer buffer);
 
-    abstract void deserialize(byte[] bytes, IFilterCodeSystem<?> cs);
+    abstract void deserialize(IFilterCodeSystem<?> cs,ByteBuffer buffer);
 
     public static boolean isEvaluableRecursively(TupleFilter filter) {
         if (filter == null)

http://git-wip-us.apache.org/repos/asf/kylin/blob/294fc707/core-metadata/src/main/java/org/apache/kylin/metadata/filter/TupleFilterSerializer.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/TupleFilterSerializer.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/TupleFilterSerializer.java
index a394a51..39ccb15 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/TupleFilterSerializer.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/TupleFilterSerializer.java
@@ -18,12 +18,15 @@
 
 package org.apache.kylin.metadata.filter;
 
+import java.nio.BufferOverflowException;
 import java.nio.ByteBuffer;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Stack;
 
 import org.apache.kylin.common.util.BytesUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * http://eli.thegreenplace.net/2011/09/29/an-interesting-tree-serialization-algorithm-from-dwarf
@@ -33,6 +36,8 @@ import org.apache.kylin.common.util.BytesUtil;
  */
 public class TupleFilterSerializer {
 
+    private static final Logger logger = LoggerFactory.getLogger(TupleFilterSerializer.class);
+
     public interface Decorator {
         TupleFilter onSerialize(TupleFilter filter);
     }
@@ -51,8 +56,18 @@ public class TupleFilterSerializer {
     }
 
     public static byte[] serialize(TupleFilter rootFilter, Decorator decorator, IFilterCodeSystem<?> cs) {
-        ByteBuffer buffer = ByteBuffer.allocate(BUFFER_SIZE);
-        internalSerialize(rootFilter, decorator, buffer, cs);
+        ByteBuffer buffer;
+        int bufferSize = BUFFER_SIZE;
+        while (true) {
+            try {
+                buffer = ByteBuffer.allocate(bufferSize);
+                internalSerialize(rootFilter, decorator, buffer, cs);
+                break;
+            } catch (BufferOverflowException e) {
+                logger.info("Buffer size {} cannot hold the filter, resizing to 4 times", bufferSize);
+                bufferSize *= 4;
+            }
+        }
         byte[] result = new byte[buffer.position()];
         System.arraycopy(buffer.array(), 0, result, 0, buffer.position());
         return result;
@@ -86,10 +101,9 @@ public class TupleFilterSerializer {
         if (flag < 0) {
             BytesUtil.writeVInt(-1, buffer);
         } else {
-            byte[] bytes = filter.serialize(cs);
             int opVal = filter.getOperator().getValue();
             BytesUtil.writeVInt(opVal, buffer);
-            BytesUtil.writeByteArray(bytes, buffer);
+            filter.serialize(cs, buffer);
             BytesUtil.writeVInt(flag, buffer);
         }
     }
@@ -107,8 +121,7 @@ public class TupleFilterSerializer {
 
             // deserialize filter
             TupleFilter filter = createTupleFilter(opVal);
-            byte[] filterBytes = BytesUtil.readByteArray(buffer);
-            filter.deserialize(filterBytes, cs);
+            filter.deserialize(cs, buffer);
 
             if (rootFilter == null) {
                 // push root to stack

http://git-wip-us.apache.org/repos/asf/kylin/blob/294fc707/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterDecorator.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterDecorator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterDecorator.java
index 01d3041..5208ba7 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterDecorator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterDecorator.java
@@ -8,11 +8,11 @@ import org.apache.kylin.common.util.Dictionary;
 import org.apache.kylin.cube.kv.RowKeyColumnIO;
 import org.apache.kylin.dict.DictCodeSystem;
 import org.apache.kylin.dict.IDictionaryAware;
-import org.apache.kylin.dict.TupleFilterFunctionTranslator;
+import org.apache.kylin.dict.TupleFilterFunctionTransformer;
 import org.apache.kylin.metadata.filter.ColumnTupleFilter;
 import org.apache.kylin.metadata.filter.CompareTupleFilter;
 import org.apache.kylin.metadata.filter.ConstantTupleFilter;
-import org.apache.kylin.metadata.filter.ITupleFilterTranslator;
+import org.apache.kylin.metadata.filter.ITupleFilterTransformer;
 import org.apache.kylin.metadata.filter.TupleFilter;
 import org.apache.kylin.metadata.filter.TupleFilterSerializer;
 import org.apache.kylin.metadata.model.TblColRef;
@@ -131,8 +131,8 @@ public class FilterDecorator implements TupleFilterSerializer.Decorator {
         if (filter == null)
             return null;
 
-        ITupleFilterTranslator translator = new TupleFilterFunctionTranslator(columnIO.getIDictionaryAware());
-        filter = translator.translate(filter);
+        ITupleFilterTransformer translator = new TupleFilterFunctionTransformer(columnIO.getIDictionaryAware());
+        filter = translator.transform(filter);
 
         // un-evaluatable filter is replaced with TRUE
         if (!filter.isEvaluable()) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/294fc707/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeSegmentScanner.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeSegmentScanner.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeSegmentScanner.java
index e96c602..3f00566 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeSegmentScanner.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeSegmentScanner.java
@@ -23,7 +23,7 @@ import org.apache.kylin.cube.gridtable.CubeGridTable;
 import org.apache.kylin.cube.gridtable.CuboidToGridTableMapping;
 import org.apache.kylin.cube.gridtable.NotEnoughGTInfoException;
 import org.apache.kylin.cube.model.CubeDesc;
-import org.apache.kylin.dict.TupleFilterFunctionTranslator;
+import org.apache.kylin.dict.TupleFilterFunctionTransformer;
 import org.apache.kylin.gridtable.EmptyGTScanner;
 import org.apache.kylin.gridtable.GTInfo;
 import org.apache.kylin.gridtable.GTRecord;
@@ -33,7 +33,7 @@ import org.apache.kylin.gridtable.GTScanRequest;
 import org.apache.kylin.gridtable.GTUtil;
 import org.apache.kylin.gridtable.IGTScanner;
 import org.apache.kylin.metadata.datatype.DataType;
-import org.apache.kylin.metadata.filter.ITupleFilterTranslator;
+import org.apache.kylin.metadata.filter.ITupleFilterTransformer;
 import org.apache.kylin.metadata.filter.TupleFilter;
 import org.apache.kylin.metadata.model.FunctionDesc;
 import org.apache.kylin.metadata.model.TblColRef;
@@ -63,9 +63,9 @@ public class CubeSegmentScanner implements IGTScanner {
 
         CuboidToGridTableMapping mapping = cuboid.getCuboidToGridTableMapping();
 
-        // translate FunctionTupleFilter to IN clause
-        ITupleFilterTranslator translator = new TupleFilterFunctionTranslator(this.cubeSeg);
-        filter = translator.translate(filter);
+        // transform FunctionTupleFilter to equivalent IN clause
+        ITupleFilterTransformer translator = new TupleFilterFunctionTransformer(this.cubeSeg);
+        filter = translator.transform(filter);
 
         //replace the constant values in filter to dictionary codes 
         TupleFilter gtFilter = GTUtil.convertFilterColumnsAndConstants(filter, info, mapping.getCuboidDimensionsInGTOrder(), groups);

http://git-wip-us.apache.org/repos/asf/kylin/blob/294fc707/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
index 92e9699..e7ed1a8 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
@@ -18,8 +18,12 @@
 
 package org.apache.kylin.storage.hbase.common.coprocessor;
 
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
 import java.util.ArrayList;
+import java.util.Calendar;
 import java.util.Collection;
+import java.util.Date;
 import java.util.List;
 import java.util.Random;
 
@@ -39,6 +43,8 @@ import org.apache.kylin.metadata.model.TblColRef;
 import org.apache.kylin.metadata.tuple.Tuple;
 import org.apache.kylin.metadata.tuple.TupleInfo;
 
+import com.google.common.collect.Lists;
+
 /**
  * @author xjiang
  * 
@@ -64,7 +70,7 @@ public class FilterBaseTest {
         return groups;
     }
 
-    protected CompareTupleFilter buildCompareFilter(List<TblColRef> groups, int index) {
+    protected CompareTupleFilter buildEQCompareFilter(List<TblColRef> groups, int index) {
         TblColRef column = groups.get(index);
         CompareTupleFilter compareFilter = new CompareTupleFilter(FilterOperatorEnum.EQ);
         ColumnTupleFilter columnFilter = new ColumnTupleFilter(column);
@@ -79,9 +85,31 @@ public class FilterBaseTest {
         return compareFilter;
     }
 
+    protected CompareTupleFilter buildINCompareFilter(TblColRef dateColumn) throws ParseException {
+        CompareTupleFilter compareFilter = new CompareTupleFilter(FilterOperatorEnum.IN);
+        ColumnTupleFilter columnFilter = new ColumnTupleFilter(dateColumn);
+        compareFilter.addChild(columnFilter);
+
+        List<String> inValues = Lists.newArrayList();
+        SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd");
+        Date startDate = simpleDateFormat.parse("1970-01-01");
+        Date endDate = simpleDateFormat.parse("2100-01-01");
+        Calendar start = Calendar.getInstance();
+        start.setTime(startDate);
+        Calendar end = Calendar.getInstance();
+        end.setTime(endDate);
+        for (Date date = start.getTime(); start.before(end); start.add(Calendar.DATE, 1), date = start.getTime()) {
+            inValues.add(simpleDateFormat.format(date));
+        }
+
+        ConstantTupleFilter constantFilter = new ConstantTupleFilter(inValues);
+        compareFilter.addChild(constantFilter);
+        return compareFilter;
+    }
+
     protected TupleFilter buildAndFilter(List<TblColRef> columns) {
-        CompareTupleFilter compareFilter1 = buildCompareFilter(columns, 0);
-        CompareTupleFilter compareFilter2 = buildCompareFilter(columns, 1);
+        CompareTupleFilter compareFilter1 = buildEQCompareFilter(columns, 0);
+        CompareTupleFilter compareFilter2 = buildEQCompareFilter(columns, 1);
         LogicalTupleFilter andFilter = new LogicalTupleFilter(FilterOperatorEnum.AND);
         andFilter.addChild(compareFilter1);
         andFilter.addChild(compareFilter2);
@@ -89,8 +117,8 @@ public class FilterBaseTest {
     }
 
     protected TupleFilter buildOrFilter(List<TblColRef> columns) {
-        CompareTupleFilter compareFilter1 = buildCompareFilter(columns, 0);
-        CompareTupleFilter compareFilter2 = buildCompareFilter(columns, 1);
+        CompareTupleFilter compareFilter1 = buildEQCompareFilter(columns, 0);
+        CompareTupleFilter compareFilter2 = buildEQCompareFilter(columns, 1);
         LogicalTupleFilter logicFilter = new LogicalTupleFilter(FilterOperatorEnum.OR);
         logicFilter.addChild(compareFilter1);
         logicFilter.addChild(compareFilter2);
@@ -105,12 +133,12 @@ public class FilterBaseTest {
         TupleFilter then0 = new ConstantTupleFilter("0");
         caseFilter.addChild(then0);
 
-        TupleFilter when1 = buildCompareFilter(groups, 0);
+        TupleFilter when1 = buildEQCompareFilter(groups, 0);
         caseFilter.addChild(when1);
         TupleFilter then1 = new ConstantTupleFilter("1");
         caseFilter.addChild(then1);
 
-        TupleFilter when2 = buildCompareFilter(groups, 1);
+        TupleFilter when2 = buildEQCompareFilter(groups, 1);
         caseFilter.addChild(when2);
         TupleFilter then2 = new ConstantTupleFilter("2");
         caseFilter.addChild(then2);
@@ -153,9 +181,9 @@ public class FilterBaseTest {
         }
 
         String str1 = f1.toString();
-        System.out.println("f1=" + str1);
+        //System.out.println("f1=" + str1);
         String str2 = f2.toString();
-        System.out.println("f2=" + str2);
+        //System.out.println("f2=" + str2);
         if (!str1.equals(str2)) {
             throw new IllegalStateException("f1=" + str1 + ", f2=" + str2);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/294fc707/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterEvaluateTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterEvaluateTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterEvaluateTest.java
index bde8dd2..aac09b7 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterEvaluateTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterEvaluateTest.java
@@ -38,7 +38,7 @@ public class FilterEvaluateTest extends FilterBaseTest {
     @Test
     public void testEvaluate00() {
         List<TblColRef> groups = buildGroups();
-        TupleFilter filter = buildCompareFilter(groups, 0);
+        TupleFilter filter = buildEQCompareFilter(groups, 0);
 
         byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
         TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
@@ -54,7 +54,7 @@ public class FilterEvaluateTest extends FilterBaseTest {
     @Test
     public void testEvaluate01() {
         List<TblColRef> groups = buildGroups();
-        TupleFilter filter = buildCompareFilter(groups, 1);
+        TupleFilter filter = buildEQCompareFilter(groups, 1);
 
         byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
         TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);

http://git-wip-us.apache.org/repos/asf/kylin/blob/294fc707/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterSerializeTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterSerializeTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterSerializeTest.java
index 8bf8ecb..0a21598 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterSerializeTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterSerializeTest.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.storage.hbase.common.coprocessor;
 
+import java.text.ParseException;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -40,7 +41,7 @@ public class FilterSerializeTest extends FilterBaseTest {
     @Test
     public void testSerialize01() {
         List<TblColRef> groups = buildGroups();
-        TupleFilter filter = buildCompareFilter(groups, 0);
+        TupleFilter filter = buildEQCompareFilter(groups, 0);
 
         byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
         TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
@@ -51,7 +52,7 @@ public class FilterSerializeTest extends FilterBaseTest {
     @Test
     public void testSerialize02() {
         List<TblColRef> groups = buildGroups();
-        TupleFilter filter = buildCompareFilter(groups, 1);
+        TupleFilter filter = buildEQCompareFilter(groups, 1);
 
         byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
         TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
@@ -88,7 +89,7 @@ public class FilterSerializeTest extends FilterBaseTest {
         TblColRef colRef = new TblColRef(column);
         List<TblColRef> groups = new ArrayList<TblColRef>();
         groups.add(colRef);
-        TupleFilter filter = buildCompareFilter(groups, 0);
+        TupleFilter filter = buildEQCompareFilter(groups, 0);
 
         byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
         TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
@@ -103,7 +104,7 @@ public class FilterSerializeTest extends FilterBaseTest {
         TblColRef colRef = new TblColRef(column);
         List<TblColRef> groups = new ArrayList<TblColRef>();
         groups.add(colRef);
-        TupleFilter filter = buildCompareFilter(groups, 0);
+        TupleFilter filter = buildEQCompareFilter(groups, 0);
 
         byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
         TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
@@ -122,7 +123,7 @@ public class FilterSerializeTest extends FilterBaseTest {
         TblColRef colRef = new TblColRef(column);
         List<TblColRef> groups = new ArrayList<TblColRef>();
         groups.add(colRef);
-        TupleFilter filter = buildCompareFilter(groups, 0);
+        TupleFilter filter = buildEQCompareFilter(groups, 0);
 
         byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
         TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
@@ -140,7 +141,7 @@ public class FilterSerializeTest extends FilterBaseTest {
         TblColRef colRef = new TblColRef(column);
         List<TblColRef> groups = new ArrayList<TblColRef>();
         groups.add(colRef);
-        TupleFilter filter = buildCompareFilter(groups, 0);
+        TupleFilter filter = buildEQCompareFilter(groups, 0);
 
         byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
         TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
@@ -203,6 +204,19 @@ public class FilterSerializeTest extends FilterBaseTest {
     }
 
     @Test
+    public void testSerialize14() throws ParseException {
+        List<TblColRef> groups = buildGroups();
+        TupleFilter filter = buildINCompareFilter(groups.get(0));
+
+        long start = System.currentTimeMillis();
+        byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
+        System.out.println("Size of serialized filter " + bytes.length + ", serialize time: " + (System.currentTimeMillis() - start));
+        TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
+
+        compareFilter(filter, newFilter);
+    }
+
+    @Test
     public void testDynamic() {
         final CompareTupleFilter compareDynamicFilter = buildCompareDynamicFilter(buildGroups());
 


[21/50] [abbrv] kylin git commit: change ‘float’ to ‘double’ when sync a hive table into kylin

Posted by li...@apache.org.
change ‘float’ to ‘double’ when sync a hive table into kylin


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/3fb67ca7
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/3fb67ca7
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/3fb67ca7

Branch: refs/heads/master
Commit: 3fb67ca78a6059aa0f350607299cc3551042b1b5
Parents: bc7d4f5
Author: shaofengshi <sh...@apache.org>
Authored: Wed Mar 2 17:12:10 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Wed Mar 2 17:12:10 2016 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/source/hive/HiveSourceTableLoader.java   | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/3fb67ca7/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
index f2f2d2a..2aef4e6 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
@@ -123,7 +123,12 @@ public class HiveSourceTableLoader {
                 FieldSchema field = fields.get(i);
                 ColumnDesc cdesc = new ColumnDesc();
                 cdesc.setName(field.getName().toUpperCase());
-                cdesc.setDatatype(field.getType());
+                // use "double" in kylin for "float"
+                if ("float".equalsIgnoreCase(field.getType())) {
+                    cdesc.setDatatype("double");
+                } else {
+                    cdesc.setDatatype(field.getType());
+                }
                 cdesc.setId(String.valueOf(i + 1));
                 columns.add(cdesc);
             }


[10/50] [abbrv] kylin git commit: fix hardcoded path in FactDistinctColumnsReducerTest.java

Posted by li...@apache.org.
fix hardcoded path in FactDistinctColumnsReducerTest.java


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/66294d3e
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/66294d3e
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/66294d3e

Branch: refs/heads/master
Commit: 66294d3e73cc618bdb9e19f7f1848f85e263d680
Parents: 294fc70
Author: shaofengshi <sh...@apache.org>
Authored: Sat Feb 27 22:49:41 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Sat Feb 27 23:18:28 2016 +0800

----------------------------------------------------------------------
 .../kylin/engine/mr/steps/FactDistinctColumnsReducerTest.java    | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/66294d3e/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducerTest.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducerTest.java b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducerTest.java
index f46683e..928db74 100644
--- a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducerTest.java
+++ b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducerTest.java
@@ -1,5 +1,6 @@
 package org.apache.kylin.engine.mr.steps;
 
+import java.io.File;
 import java.io.IOException;
 import java.util.Map;
 import java.util.UUID;
@@ -22,7 +23,8 @@ public class FactDistinctColumnsReducerTest {
     public void testWriteCuboidStatistics() throws IOException {
 
         final Configuration conf = HadoopUtil.getCurrentConfiguration();
-        final Path outputPath = new Path("file:///tmp/kylin/cuboidstatistics/" + UUID.randomUUID().toString());
+        File tmp = File.createTempFile("cuboidstatistics", "");
+        final Path outputPath = new Path(tmp.getParent().toString() + File.separator + UUID.randomUUID().toString());
         if (!FileSystem.getLocal(conf).exists(outputPath)) {
             //            FileSystem.getLocal(conf).create(outputPath);
         }


[44/50] [abbrv] kylin git commit: KYLIN-1343 Upgrade to calcite 1.6 (with Edward Zhang)

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/MergeCuboidJobTest.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/MergeCuboidJobTest.java b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/MergeCuboidJobTest.java
index 7e530db..97b1ef2 100644
--- a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/MergeCuboidJobTest.java
+++ b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/MergeCuboidJobTest.java
@@ -44,7 +44,6 @@ public class MergeCuboidJobTest extends LocalFileMetadataTestCase {
         conf.set("fs.default.name", "file:///");
         conf.set("mapreduce.framework.name", "local");
         conf.set("mapreduce.application.framework.path", "");
-        
 
         // for local runner out-of-memory issue
         conf.set("mapreduce.task.io.sort.mb", "10");

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/jdbc/src/main/java/org/apache/kylin/jdbc/KylinConnection.java
----------------------------------------------------------------------
diff --git a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinConnection.java b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinConnection.java
index 86c8d1d..6852998 100644
--- a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinConnection.java
+++ b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinConnection.java
@@ -118,7 +118,7 @@ public class KylinConnection extends AvaticaConnection {
         ArrayList<ColumnMetaData> columns = new ArrayList<ColumnMetaData>();
         Map<String, Object> internalParams = Collections.<String, Object> emptyMap();
 
-        return new Meta.Signature(columns, sql, params, internalParams, CursorFactory.ARRAY);
+        return new Meta.Signature(columns, sql, params, internalParams, CursorFactory.ARRAY, Meta.StatementType.SELECT);
     }
 
     private KylinJdbcFactory factory() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/jdbc/src/main/java/org/apache/kylin/jdbc/KylinJdbcFactory.java
----------------------------------------------------------------------
diff --git a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinJdbcFactory.java b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinJdbcFactory.java
index f1a4939..6aae983 100644
--- a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinJdbcFactory.java
+++ b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinJdbcFactory.java
@@ -33,6 +33,7 @@ import org.apache.calcite.avatica.AvaticaStatement;
 import org.apache.calcite.avatica.Meta.Frame;
 import org.apache.calcite.avatica.Meta.Signature;
 import org.apache.calcite.avatica.Meta.StatementHandle;
+import org.apache.calcite.avatica.QueryState;
 import org.apache.calcite.avatica.UnregisteredDriver;
 
 /**
@@ -93,9 +94,9 @@ public class KylinJdbcFactory implements AvaticaFactory {
     }
 
     @Override
-    public AvaticaResultSet newResultSet(AvaticaStatement statement, Signature signature, TimeZone timeZone, Frame firstFrame) throws SQLException {
+    public AvaticaResultSet newResultSet(AvaticaStatement statement, QueryState state, Signature signature, TimeZone timeZone, Frame firstFrame) throws SQLException {
         AvaticaResultSetMetaData resultSetMetaData = new AvaticaResultSetMetaData(statement, null, signature);
-        return new KylinResultSet(statement, signature, resultSetMetaData, timeZone, firstFrame);
+        return new KylinResultSet(statement, state, signature, resultSetMetaData, timeZone, firstFrame);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/jdbc/src/main/java/org/apache/kylin/jdbc/KylinMeta.java
----------------------------------------------------------------------
diff --git a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinMeta.java b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinMeta.java
index cae39ad..05f7983 100644
--- a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinMeta.java
+++ b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinMeta.java
@@ -30,6 +30,10 @@ import java.util.regex.Pattern;
 import org.apache.calcite.avatica.AvaticaUtils;
 import org.apache.calcite.avatica.ColumnMetaData;
 import org.apache.calcite.avatica.MetaImpl;
+import org.apache.calcite.avatica.MissingResultsException;
+import org.apache.calcite.avatica.NoSuchStatementException;
+import org.apache.calcite.avatica.QueryState;
+import org.apache.calcite.avatica.remote.TypedValue;
 
 import com.google.common.collect.ImmutableList;
 
@@ -55,6 +59,13 @@ public class KylinMeta extends MetaImpl {
         result.signature = connection().mockPreparedSignature(sql);
         return result;
     }
+    
+    // real execution happens in KylinResultSet.execute()
+    @Override
+    public ExecuteResult execute(StatementHandle sh, List<TypedValue> parameterValues, long maxRowCount) throws NoSuchStatementException {
+        final MetaResultSet metaResultSet = MetaResultSet.create(sh.connectionId, sh.id, false, sh.signature, null);
+        return new ExecuteResult(ImmutableList.of(metaResultSet));
+    }
 
     // mimic from CalciteMetaImpl, real execution happens via callback in KylinResultSet.execute()
     @Override
@@ -91,24 +102,24 @@ public class KylinMeta extends MetaImpl {
     }
 
     @Override
-    public MetaResultSet getTableTypes() {
+    public MetaResultSet getTableTypes(ConnectionHandle ch) {
         return createResultSet(metaTableTypes, MetaTableType.class, "TABLE_TYPE");
     }
 
     @Override
-    public MetaResultSet getCatalogs() {
+    public MetaResultSet getCatalogs(ConnectionHandle ch) {
         List<KMetaCatalog> catalogs = getMetaProject().catalogs;
         return createResultSet(catalogs, KMetaCatalog.class, "TABLE_CAT");
     }
 
     @Override
-    public MetaResultSet getSchemas(String catalog, Pat schemaPattern) {
+    public MetaResultSet getSchemas(ConnectionHandle ch, String catalog, Pat schemaPattern) {
         List<KMetaSchema> schemas = getMetaProject().getSchemas(catalog, schemaPattern);
         return createResultSet(schemas, KMetaSchema.class, "TABLE_SCHEM", "TABLE_CATALOG");
     }
 
     @Override
-    public MetaResultSet getTables(String catalog, Pat schemaPattern, Pat tableNamePattern, List<String> typeList) {
+    public MetaResultSet getTables(ConnectionHandle ch, String catalog, Pat schemaPattern, Pat tableNamePattern, List<String> typeList) {
         List<KMetaTable> tables = getMetaProject().getTables(catalog, schemaPattern, tableNamePattern, typeList);
         return createResultSet(tables, KMetaTable.class, //
                 "TABLE_CAT", //
@@ -124,7 +135,7 @@ public class KylinMeta extends MetaImpl {
     }
 
     @Override
-    public MetaResultSet getColumns(String catalog, Pat schemaPattern, Pat tableNamePattern, Pat columnNamePattern) {
+    public MetaResultSet getColumns(ConnectionHandle ch, String catalog, Pat schemaPattern, Pat tableNamePattern, Pat columnNamePattern) {
         List<KMetaColumn> columns = getMetaProject().getColumns(catalog, schemaPattern, tableNamePattern, columnNamePattern);
         return createResultSet(columns, KMetaColumn.class, //
                 "TABLE_CAT", //
@@ -172,7 +183,7 @@ public class KylinMeta extends MetaImpl {
         }
 
         CursorFactory cursorFactory = CursorFactory.record(clazz, fields, fieldNames);
-        Signature signature = new Signature(columns, "", null, Collections.<String, Object> emptyMap(), cursorFactory);
+        Signature signature = new Signature(columns, "", null, Collections.<String, Object> emptyMap(), cursorFactory, StatementType.SELECT);
         StatementHandle sh = this.createStatement(connection().handle);
         Frame frame = new Frame(0, true, iterable);
 
@@ -355,4 +366,27 @@ public class KylinMeta extends MetaImpl {
         }
     }
 
+    @Override
+    public Frame fetch(StatementHandle h, long offset, int fetchMaxRowCount) throws NoSuchStatementException, MissingResultsException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public boolean syncResults(StatementHandle sh, QueryState state, long offset) throws NoSuchStatementException {
+        // TODO Auto-generated method stub
+        return false;
+    }
+
+    @Override
+    public void commit(ConnectionHandle ch) {
+        // TODO Auto-generated method stub
+
+    }
+
+    @Override
+    public void rollback(ConnectionHandle ch) {
+        // TODO Auto-generated method stub
+
+    }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/jdbc/src/main/java/org/apache/kylin/jdbc/KylinPreparedStatement.java
----------------------------------------------------------------------
diff --git a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinPreparedStatement.java b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinPreparedStatement.java
index b14865b..04567cb 100644
--- a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinPreparedStatement.java
+++ b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinPreparedStatement.java
@@ -18,6 +18,8 @@ public class KylinPreparedStatement extends AvaticaPreparedStatement {
 
     protected KylinPreparedStatement(AvaticaConnection connection, StatementHandle h, Signature signature, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
         super(connection, h, signature, resultSetType, resultSetConcurrency, resultSetHoldability);
+        if (this.handle.signature == null)
+            this.handle.signature = signature;
     }
 
     protected List<Object> getParameterValues2() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/jdbc/src/main/java/org/apache/kylin/jdbc/KylinResultSet.java
----------------------------------------------------------------------
diff --git a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinResultSet.java b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinResultSet.java
index b6ac261..2b186bf 100644
--- a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinResultSet.java
+++ b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinResultSet.java
@@ -30,12 +30,13 @@ import org.apache.calcite.avatica.AvaticaStatement;
 import org.apache.calcite.avatica.Meta.Frame;
 import org.apache.calcite.avatica.Meta.Signature;
 import org.apache.calcite.avatica.MetaImpl;
+import org.apache.calcite.avatica.QueryState;
 import org.apache.kylin.jdbc.IRemoteClient.QueryResult;
 
 public class KylinResultSet extends AvaticaResultSet {
 
-    public KylinResultSet(AvaticaStatement statement, Signature signature, ResultSetMetaData resultSetMetaData, TimeZone timeZone, Frame firstFrame) {
-        super(statement, signature, resultSetMetaData, timeZone, firstFrame);
+    public KylinResultSet(AvaticaStatement statement, QueryState state, Signature signature, ResultSetMetaData resultSetMetaData, TimeZone timeZone, Frame firstFrame) {
+        super(statement, state, signature, resultSetMetaData, timeZone, firstFrame);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/kylin-it/src/test/resources/logging.properties
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/logging.properties b/kylin-it/src/test/resources/logging.properties
index a925478..5d2afe2 100644
--- a/kylin-it/src/test/resources/logging.properties
+++ b/kylin-it/src/test/resources/logging.properties
@@ -1,5 +1,5 @@
 handlers=java.util.logging.ConsoleHandler
 .level=INFO
-#org.eigenbase.relopt.RelOptPlanner.level=FINEST
+#org.apache.calcite.plan.RelOptPlanner.level=FINE
 java.util.logging.ConsoleHandler.level=ALL
 java.util.logging.ConsoleHandler.formatter=org.apache.kylin.common.util.MyLogFormatter
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/kylin-it/src/test/resources/query/sql/query92.sql
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql/query92.sql b/kylin-it/src/test/resources/query/sql/query92.sql
new file mode 100644
index 0000000..e551a45
--- /dev/null
+++ b/kylin-it/src/test/resources/query/sql/query92.sql
@@ -0,0 +1,30 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements.  See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership.  The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+
+select meta_categ_name, count(1) as cnt, sum(price) as GMV 
+
+ from test_kylin_fact 
+ left JOIN edw.test_cal_dt as test_cal_dt
+ ON test_kylin_fact.cal_dt = test_cal_dt.cal_dt
+ left JOIN test_category_groupings
+ ON test_kylin_fact.leaf_categ_id = test_category_groupings.leaf_categ_id AND test_kylin_fact.lstg_site_id = test_category_groupings.site_id
+ left JOIN edw.test_sites as test_sites
+ ON test_kylin_fact.lstg_site_id = test_sites.site_id
+
+ where meta_categ_name not in ('', 'a')
+ group by meta_categ_name 

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/kylin-it/src/test/resources/query/sql/query92.sql.disabled
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql/query92.sql.disabled b/kylin-it/src/test/resources/query/sql/query92.sql.disabled
deleted file mode 100644
index e551a45..0000000
--- a/kylin-it/src/test/resources/query/sql/query92.sql.disabled
+++ /dev/null
@@ -1,30 +0,0 @@
---
--- Licensed to the Apache Software Foundation (ASF) under one
--- or more contributor license agreements.  See the NOTICE file
--- distributed with this work for additional information
--- regarding copyright ownership.  The ASF licenses this file
--- to you under the Apache License, Version 2.0 (the
--- "License"); you may not use this file except in compliance
--- with the License.  You may obtain a copy of the License at
---
---     http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
---
-
-select meta_categ_name, count(1) as cnt, sum(price) as GMV 
-
- from test_kylin_fact 
- left JOIN edw.test_cal_dt as test_cal_dt
- ON test_kylin_fact.cal_dt = test_cal_dt.cal_dt
- left JOIN test_category_groupings
- ON test_kylin_fact.leaf_categ_id = test_category_groupings.leaf_categ_id AND test_kylin_fact.lstg_site_id = test_category_groupings.site_id
- left JOIN edw.test_sites as test_sites
- ON test_kylin_fact.lstg_site_id = test_sites.site_id
-
- where meta_categ_name not in ('', 'a')
- group by meta_categ_name 

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/kylin-it/src/test/resources/query/sql/query93.sql
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql/query93.sql b/kylin-it/src/test/resources/query/sql/query93.sql
new file mode 100644
index 0000000..cc6dca5
--- /dev/null
+++ b/kylin-it/src/test/resources/query/sql/query93.sql
@@ -0,0 +1,30 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements.  See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership.  The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+
+select meta_categ_name, count(1) as cnt, sum(price) as GMV 
+
+ from test_kylin_fact 
+ left JOIN edw.test_cal_dt as test_cal_dt
+ ON test_kylin_fact.cal_dt = test_cal_dt.cal_dt
+ left JOIN test_category_groupings
+ ON test_kylin_fact.leaf_categ_id = test_category_groupings.leaf_categ_id AND test_kylin_fact.lstg_site_id = test_category_groupings.site_id
+ left JOIN edw.test_sites as test_sites
+ ON test_kylin_fact.lstg_site_id = test_sites.site_id
+
+ where meta_categ_name is not null
+ group by meta_categ_name 

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/kylin-it/src/test/resources/query/sql/query93.sql.disabled
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql/query93.sql.disabled b/kylin-it/src/test/resources/query/sql/query93.sql.disabled
deleted file mode 100644
index cc6dca5..0000000
--- a/kylin-it/src/test/resources/query/sql/query93.sql.disabled
+++ /dev/null
@@ -1,30 +0,0 @@
---
--- Licensed to the Apache Software Foundation (ASF) under one
--- or more contributor license agreements.  See the NOTICE file
--- distributed with this work for additional information
--- regarding copyright ownership.  The ASF licenses this file
--- to you under the Apache License, Version 2.0 (the
--- "License"); you may not use this file except in compliance
--- with the License.  You may obtain a copy of the License at
---
---     http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
---
-
-select meta_categ_name, count(1) as cnt, sum(price) as GMV 
-
- from test_kylin_fact 
- left JOIN edw.test_cal_dt as test_cal_dt
- ON test_kylin_fact.cal_dt = test_cal_dt.cal_dt
- left JOIN test_category_groupings
- ON test_kylin_fact.leaf_categ_id = test_category_groupings.leaf_categ_id AND test_kylin_fact.lstg_site_id = test_category_groupings.site_id
- left JOIN edw.test_sites as test_sites
- ON test_kylin_fact.lstg_site_id = test_sites.site_id
-
- where meta_categ_name is not null
- group by meta_categ_name 

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/kylin-it/src/test/resources/query/sql/query94.sql
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql/query94.sql b/kylin-it/src/test/resources/query/sql/query94.sql
new file mode 100644
index 0000000..c7899fd
--- /dev/null
+++ b/kylin-it/src/test/resources/query/sql/query94.sql
@@ -0,0 +1,30 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements.  See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership.  The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+
+select meta_categ_name, count(1) as cnt, sum(price) as GMV 
+
+ from test_kylin_fact 
+ left JOIN edw.test_cal_dt as test_cal_dt
+ ON test_kylin_fact.cal_dt = test_cal_dt.cal_dt
+ left JOIN test_category_groupings
+ ON test_kylin_fact.leaf_categ_id = test_category_groupings.leaf_categ_id AND test_kylin_fact.lstg_site_id = test_category_groupings.site_id
+ left JOIN edw.test_sites as test_sites
+ ON test_kylin_fact.lstg_site_id = test_sites.site_id
+
+ where meta_categ_name not in ('Unknown')
+ group by meta_categ_name 

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/kylin-it/src/test/resources/query/sql/query94.sql.disabled
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql/query94.sql.disabled b/kylin-it/src/test/resources/query/sql/query94.sql.disabled
deleted file mode 100644
index c7899fd..0000000
--- a/kylin-it/src/test/resources/query/sql/query94.sql.disabled
+++ /dev/null
@@ -1,30 +0,0 @@
---
--- Licensed to the Apache Software Foundation (ASF) under one
--- or more contributor license agreements.  See the NOTICE file
--- distributed with this work for additional information
--- regarding copyright ownership.  The ASF licenses this file
--- to you under the Apache License, Version 2.0 (the
--- "License"); you may not use this file except in compliance
--- with the License.  You may obtain a copy of the License at
---
---     http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
---
-
-select meta_categ_name, count(1) as cnt, sum(price) as GMV 
-
- from test_kylin_fact 
- left JOIN edw.test_cal_dt as test_cal_dt
- ON test_kylin_fact.cal_dt = test_cal_dt.cal_dt
- left JOIN test_category_groupings
- ON test_kylin_fact.leaf_categ_id = test_category_groupings.leaf_categ_id AND test_kylin_fact.lstg_site_id = test_category_groupings.site_id
- left JOIN edw.test_sites as test_sites
- ON test_kylin_fact.lstg_site_id = test_sites.site_id
-
- where meta_categ_name not in ('Unknown')
- group by meta_categ_name 

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/kylin-it/src/test/resources/query/sql/query95.sql
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql/query95.sql b/kylin-it/src/test/resources/query/sql/query95.sql
new file mode 100644
index 0000000..578b93f
--- /dev/null
+++ b/kylin-it/src/test/resources/query/sql/query95.sql
@@ -0,0 +1,30 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements.  See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership.  The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+
+select meta_categ_name, count(1) as cnt, sum(price) as GMV 
+
+ from test_kylin_fact 
+ left JOIN edw.test_cal_dt as test_cal_dt
+ ON test_kylin_fact.cal_dt = test_cal_dt.cal_dt
+ left JOIN test_category_groupings
+ ON test_kylin_fact.leaf_categ_id = test_category_groupings.leaf_categ_id AND test_kylin_fact.lstg_site_id = test_category_groupings.site_id
+ left JOIN edw.test_sites as test_sites
+ ON test_kylin_fact.lstg_site_id = test_sites.site_id
+
+ where meta_categ_name not in ('Unknown', 'ToyHobbies', '', 'a', 'BookMagazines')
+ group by meta_categ_name 

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/kylin-it/src/test/resources/query/sql/query95.sql.disabled
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql/query95.sql.disabled b/kylin-it/src/test/resources/query/sql/query95.sql.disabled
deleted file mode 100644
index 578b93f..0000000
--- a/kylin-it/src/test/resources/query/sql/query95.sql.disabled
+++ /dev/null
@@ -1,30 +0,0 @@
---
--- Licensed to the Apache Software Foundation (ASF) under one
--- or more contributor license agreements.  See the NOTICE file
--- distributed with this work for additional information
--- regarding copyright ownership.  The ASF licenses this file
--- to you under the Apache License, Version 2.0 (the
--- "License"); you may not use this file except in compliance
--- with the License.  You may obtain a copy of the License at
---
---     http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
---
-
-select meta_categ_name, count(1) as cnt, sum(price) as GMV 
-
- from test_kylin_fact 
- left JOIN edw.test_cal_dt as test_cal_dt
- ON test_kylin_fact.cal_dt = test_cal_dt.cal_dt
- left JOIN test_category_groupings
- ON test_kylin_fact.leaf_categ_id = test_category_groupings.leaf_categ_id AND test_kylin_fact.lstg_site_id = test_category_groupings.site_id
- left JOIN edw.test_sites as test_sites
- ON test_kylin_fact.lstg_site_id = test_sites.site_id
-
- where meta_categ_name not in ('Unknown', 'ToyHobbies', '', 'a', 'BookMagazines')
- group by meta_categ_name 

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 2e42841..501bf77 100644
--- a/pom.xml
+++ b/pom.xml
@@ -99,7 +99,7 @@
         <spring.boot.version>1.2.7.RELEASE</spring.boot.version>
 
         <!-- Calcite Version -->
-        <calcite.version>1.4.0-incubating</calcite.version>
+        <calcite.version>1.6.0</calcite.version>
 
         <!-- Curator.version Version -->
         <curator.version>2.6.0</curator.version>

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/query/src/main/java/org/apache/kylin/query/optrule/OLAPJoinRule.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/optrule/OLAPJoinRule.java b/query/src/main/java/org/apache/kylin/query/optrule/OLAPJoinRule.java
index 10047e6..c31d1d0 100644
--- a/query/src/main/java/org/apache/kylin/query/optrule/OLAPJoinRule.java
+++ b/query/src/main/java/org/apache/kylin/query/optrule/OLAPJoinRule.java
@@ -63,7 +63,7 @@ public class OLAPJoinRule extends ConverterRule {
         try {
             newRel = new OLAPJoinRel(cluster, traitSet, left, right, //
                     info.getEquiCondition(left, right, cluster.getRexBuilder()), //
-                    info.leftKeys, info.rightKeys, join.getJoinType(), join.getVariablesStopped());
+                    info.leftKeys, info.rightKeys, join.getVariablesSet(), join.getJoinType());
         } catch (InvalidRelException e) {
             // Semantic error not possible. Must be a bug. Convert to internal error.
             throw new AssertionError(e);

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/query/src/main/java/org/apache/kylin/query/relnode/OLAPAggregateRel.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPAggregateRel.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPAggregateRel.java
index eed5636..9414757 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPAggregateRel.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPAggregateRel.java
@@ -37,6 +37,7 @@ import org.apache.calcite.rel.InvalidRelException;
 import org.apache.calcite.rel.RelNode;
 import org.apache.calcite.rel.core.Aggregate;
 import org.apache.calcite.rel.core.AggregateCall;
+import org.apache.calcite.rel.metadata.RelMetadataQuery;
 import org.apache.calcite.rel.type.RelDataType;
 import org.apache.calcite.rel.type.RelDataTypeFactory;
 import org.apache.calcite.rel.type.RelDataTypeField;
@@ -60,6 +61,7 @@ import org.apache.kylin.metadata.model.MeasureDesc;
 import org.apache.kylin.metadata.model.ParameterDesc;
 import org.apache.kylin.metadata.model.TableDesc;
 import org.apache.kylin.metadata.model.TblColRef;
+import org.apache.kylin.query.schema.OLAPTable;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
@@ -122,8 +124,8 @@ public class OLAPAggregateRel extends Aggregate implements OLAPRel {
     }
 
     @Override
-    public RelOptCost computeSelfCost(RelOptPlanner planner) {
-        return super.computeSelfCost(planner).multiplyBy(.05);
+    public RelOptCost computeSelfCost(RelOptPlanner planner, RelMetadataQuery mq) {
+        return super.computeSelfCost(planner, mq).multiplyBy(.05);
     }
 
     @Override
@@ -280,6 +282,7 @@ public class OLAPAggregateRel extends Aggregate implements OLAPRel {
         fillbackOptimizedColumn();
         
         ColumnRowType inputColumnRowType = ((OLAPRel) getInput()).getColumnRowType();
+        RelDataTypeFactory typeFactory = getCluster().getTypeFactory();
         for (int i = 0; i < this.aggregations.size(); i++) {
             FunctionDesc aggFunc = this.aggregations.get(i);
             
@@ -290,7 +293,8 @@ public class OLAPAggregateRel extends Aggregate implements OLAPRel {
             
             if (aggFunc.needRewrite()) {
                 String rewriteFieldName = aggFunc.getRewriteFieldName();
-                this.context.rewriteFields.put(rewriteFieldName, null);
+                RelDataType rewriteFieldType = OLAPTable.createSqlType(typeFactory, aggFunc.getRewriteFieldType(), true);
+                this.context.rewriteFields.put(rewriteFieldName, rewriteFieldType);
 
                 TblColRef column = buildRewriteColumn(aggFunc);
                 this.context.metricsColumns.add(column);

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/query/src/main/java/org/apache/kylin/query/relnode/OLAPFilterRel.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPFilterRel.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPFilterRel.java
index a847890..5ea138f 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPFilterRel.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPFilterRel.java
@@ -36,6 +36,7 @@ import org.apache.calcite.plan.RelTrait;
 import org.apache.calcite.plan.RelTraitSet;
 import org.apache.calcite.rel.RelNode;
 import org.apache.calcite.rel.core.Filter;
+import org.apache.calcite.rel.metadata.RelMetadataQuery;
 import org.apache.calcite.rel.type.RelDataType;
 import org.apache.calcite.rex.RexBuilder;
 import org.apache.calcite.rex.RexCall;
@@ -252,8 +253,8 @@ public class OLAPFilterRel extends Filter implements OLAPRel {
     }
 
     @Override
-    public RelOptCost computeSelfCost(RelOptPlanner planner) {
-        return super.computeSelfCost(planner).multiplyBy(.05);
+    public RelOptCost computeSelfCost(RelOptPlanner planner, RelMetadataQuery mq) {
+        return super.computeSelfCost(planner, mq).multiplyBy(.05);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/query/src/main/java/org/apache/kylin/query/relnode/OLAPJoinRel.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPJoinRel.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPJoinRel.java
index 09a0b17..efe404b 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPJoinRel.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPJoinRel.java
@@ -43,8 +43,10 @@ import org.apache.calcite.plan.RelTrait;
 import org.apache.calcite.plan.RelTraitSet;
 import org.apache.calcite.rel.InvalidRelException;
 import org.apache.calcite.rel.RelNode;
+import org.apache.calcite.rel.core.CorrelationId;
 import org.apache.calcite.rel.core.JoinInfo;
 import org.apache.calcite.rel.core.JoinRelType;
+import org.apache.calcite.rel.metadata.RelMetadataQuery;
 import org.apache.calcite.rel.type.RelDataType;
 import org.apache.calcite.rel.type.RelDataTypeFactory.FieldInfoBuilder;
 import org.apache.calcite.rel.type.RelDataTypeField;
@@ -73,8 +75,8 @@ public class OLAPJoinRel extends EnumerableJoin implements OLAPRel {
 
     public OLAPJoinRel(RelOptCluster cluster, RelTraitSet traits, RelNode left, RelNode right, //
             RexNode condition, ImmutableIntList leftKeys, ImmutableIntList rightKeys, //
-            JoinRelType joinType, Set<String> variablesStopped) throws InvalidRelException {
-        super(cluster, traits, left, right, condition, leftKeys, rightKeys, joinType, variablesStopped);
+            Set<CorrelationId> variablesSet, JoinRelType joinType) throws InvalidRelException {
+        super(cluster, traits, left, right, condition, leftKeys, rightKeys, variablesSet, joinType);
         Preconditions.checkArgument(getConvention() == OLAPRel.CONVENTION);
         this.rowType = getRowType();
         this.isTopJoin = false;
@@ -87,7 +89,7 @@ public class OLAPJoinRel extends EnumerableJoin implements OLAPRel {
         final JoinInfo joinInfo = JoinInfo.of(left, right, condition);
         assert joinInfo.isEqui();
         try {
-            return new OLAPJoinRel(getCluster(), traitSet, left, right, condition, joinInfo.leftKeys, joinInfo.rightKeys, joinType, variablesStopped);
+            return new OLAPJoinRel(getCluster(), traitSet, left, right, condition, joinInfo.leftKeys, joinInfo.rightKeys, variablesSet, joinType);
         } catch (InvalidRelException e) {
             // Semantic error not possible. Must be a bug. Convert to internal error.
             throw new AssertionError(e);
@@ -95,13 +97,13 @@ public class OLAPJoinRel extends EnumerableJoin implements OLAPRel {
     }
 
     @Override
-    public RelOptCost computeSelfCost(RelOptPlanner planner) {
-        return super.computeSelfCost(planner).multiplyBy(.05);
+    public RelOptCost computeSelfCost(RelOptPlanner planner, RelMetadataQuery mq) {
+        return super.computeSelfCost(planner, mq).multiplyBy(.05);
     }
 
     @Override
-    public double getRows() {
-        return super.getRows() * 0.1;
+    public double estimateRowCount(RelMetadataQuery mq) {
+        return super.estimateRowCount(mq) * 0.1;
     }
 
     @Override
@@ -225,8 +227,8 @@ public class OLAPJoinRel extends EnumerableJoin implements OLAPRel {
                     RexNode.class, //
                     ImmutableIntList.class, //
                     ImmutableIntList.class, //
-                    JoinRelType.class, //
-                    Set.class);
+                    Set.class, //
+                    JoinRelType.class);
             constr.setAccessible(true);
         } catch (Exception e) {
             throw new RuntimeException(e);
@@ -238,7 +240,7 @@ public class OLAPJoinRel extends EnumerableJoin implements OLAPRel {
         if (this.hasSubQuery) {
             try {
                 return constr.newInstance(getCluster(), getCluster().traitSetOf(EnumerableConvention.INSTANCE), //
-                        inputs.get(0), inputs.get(1), condition, leftKeys, rightKeys, joinType, variablesStopped);
+                        inputs.get(0), inputs.get(1), condition, leftKeys, rightKeys, variablesSet, joinType);
             } catch (Exception e) {
                 throw new IllegalStateException("Can't create EnumerableJoin!", e);
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/query/src/main/java/org/apache/kylin/query/relnode/OLAPLimitRel.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPLimitRel.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPLimitRel.java
index 82aa9de..988d1fa 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPLimitRel.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPLimitRel.java
@@ -31,6 +31,7 @@ import org.apache.calcite.plan.RelTraitSet;
 import org.apache.calcite.rel.RelNode;
 import org.apache.calcite.rel.RelWriter;
 import org.apache.calcite.rel.SingleRel;
+import org.apache.calcite.rel.metadata.RelMetadataQuery;
 import org.apache.calcite.rex.RexLiteral;
 import org.apache.calcite.rex.RexNode;
 
@@ -59,8 +60,8 @@ public class OLAPLimitRel extends SingleRel implements OLAPRel {
     }
 
     @Override
-    public RelOptCost computeSelfCost(RelOptPlanner planner) {
-        return super.computeSelfCost(planner).multiplyBy(.05);
+    public RelOptCost computeSelfCost(RelOptPlanner planner, RelMetadataQuery mq) {
+        return super.computeSelfCost(planner, mq).multiplyBy(.05);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/query/src/main/java/org/apache/kylin/query/relnode/OLAPProjectRel.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPProjectRel.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPProjectRel.java
index b0436e2..8e454c9 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPProjectRel.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPProjectRel.java
@@ -35,6 +35,7 @@ import org.apache.calcite.plan.RelTrait;
 import org.apache.calcite.plan.RelTraitSet;
 import org.apache.calcite.rel.RelNode;
 import org.apache.calcite.rel.core.Project;
+import org.apache.calcite.rel.metadata.RelMetadataQuery;
 import org.apache.calcite.rel.type.RelDataType;
 import org.apache.calcite.rel.type.RelDataTypeFactory.FieldInfoBuilder;
 import org.apache.calcite.rel.type.RelDataTypeField;
@@ -86,8 +87,8 @@ public class OLAPProjectRel extends Project implements OLAPRel {
     }
 
     @Override
-    public RelOptCost computeSelfCost(RelOptPlanner planner) {
-        return super.computeSelfCost(planner).multiplyBy(.05);
+    public RelOptCost computeSelfCost(RelOptPlanner planner, RelMetadataQuery mq) {
+        return super.computeSelfCost(planner, mq).multiplyBy(.05);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/query/src/main/java/org/apache/kylin/query/relnode/OLAPSortRel.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPSortRel.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPSortRel.java
index c3e0595..64a6c3c 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPSortRel.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPSortRel.java
@@ -32,6 +32,7 @@ import org.apache.calcite.rel.RelCollation;
 import org.apache.calcite.rel.RelFieldCollation;
 import org.apache.calcite.rel.RelNode;
 import org.apache.calcite.rel.core.Sort;
+import org.apache.calcite.rel.metadata.RelMetadataQuery;
 import org.apache.calcite.rex.RexNode;
 import org.apache.kylin.metadata.model.MeasureDesc;
 import org.apache.kylin.metadata.model.TblColRef;
@@ -58,8 +59,8 @@ public class OLAPSortRel extends Sort implements OLAPRel {
     }
 
     @Override
-    public RelOptCost computeSelfCost(RelOptPlanner planner) {
-        return super.computeSelfCost(planner).multiplyBy(.05);
+    public RelOptCost computeSelfCost(RelOptPlanner planner, RelMetadataQuery mq) {
+        return super.computeSelfCost(planner, mq).multiplyBy(.05);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/query/src/main/java/org/apache/kylin/query/relnode/OLAPTableScan.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPTableScan.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPTableScan.java
index 675fd99..bcef413 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPTableScan.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPTableScan.java
@@ -40,14 +40,17 @@ import org.apache.calcite.plan.volcano.AbstractConverter.ExpandConversionRule;
 import org.apache.calcite.rel.RelNode;
 import org.apache.calcite.rel.RelWriter;
 import org.apache.calcite.rel.core.TableScan;
+import org.apache.calcite.rel.metadata.RelMetadataQuery;
 import org.apache.calcite.rel.rules.AggregateExpandDistinctAggregatesRule;
 import org.apache.calcite.rel.rules.AggregateJoinTransposeRule;
 import org.apache.calcite.rel.rules.AggregateProjectMergeRule;
 import org.apache.calcite.rel.rules.FilterJoinRule;
 import org.apache.calcite.rel.rules.FilterProjectTransposeRule;
 import org.apache.calcite.rel.rules.JoinCommuteRule;
+import org.apache.calcite.rel.rules.JoinPushExpressionsRule;
 import org.apache.calcite.rel.rules.JoinPushThroughJoinRule;
 import org.apache.calcite.rel.rules.ReduceExpressionsRule;
+import org.apache.calcite.rel.rules.SortJoinTransposeRule;
 import org.apache.calcite.rel.type.RelDataType;
 import org.apache.calcite.rel.type.RelDataTypeFactory;
 import org.apache.calcite.rel.type.RelDataTypeField;
@@ -147,6 +150,8 @@ public class OLAPTableScan extends TableScan implements OLAPRel, EnumerableRel {
         planner.removeRule(AggregateJoinTransposeRule.INSTANCE);
         planner.removeRule(AggregateProjectMergeRule.INSTANCE);
         planner.removeRule(FilterProjectTransposeRule.INSTANCE);
+        planner.removeRule(SortJoinTransposeRule.INSTANCE);
+        planner.removeRule(JoinPushExpressionsRule.INSTANCE);
         // distinct count will be split into a separated query that is joined with the left query
         planner.removeRule(AggregateExpandDistinctAggregatesRule.INSTANCE);
 
@@ -165,8 +170,8 @@ public class OLAPTableScan extends TableScan implements OLAPRel, EnumerableRel {
     }
 
     @Override
-    public RelOptCost computeSelfCost(RelOptPlanner planner) {
-        return super.computeSelfCost(planner).multiplyBy(.05);
+    public RelOptCost computeSelfCost(RelOptPlanner planner, RelMetadataQuery mq) {
+        return super.computeSelfCost(planner, mq).multiplyBy(.05);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/query/src/main/java/org/apache/kylin/query/relnode/OLAPToEnumerableConverter.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPToEnumerableConverter.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPToEnumerableConverter.java
index 7053694..de7e7e2 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPToEnumerableConverter.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPToEnumerableConverter.java
@@ -35,6 +35,7 @@ import org.apache.calcite.plan.RelOptUtil;
 import org.apache.calcite.plan.RelTraitSet;
 import org.apache.calcite.rel.RelNode;
 import org.apache.calcite.rel.convert.ConverterImpl;
+import org.apache.calcite.rel.metadata.RelMetadataQuery;
 import org.apache.calcite.rel.type.RelDataType;
 import org.apache.calcite.sql.SqlExplainLevel;
 import org.apache.kylin.metadata.realization.IRealization;
@@ -56,8 +57,9 @@ public class OLAPToEnumerableConverter extends ConverterImpl implements Enumerab
     }
 
     @Override
-    public RelOptCost computeSelfCost(RelOptPlanner planner) {
-        return super.computeSelfCost(planner).multiplyBy(.05);
+    public RelOptCost computeSelfCost(RelOptPlanner planner, RelMetadataQuery mq) {
+        // huge cost to ensure OLAPToEnumerableConverter only appears once in rel tree
+        return planner.getCostFactory().makeCost(1E100, 0, 0);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/f73abf6c/query/src/main/java/org/apache/kylin/query/schema/OLAPTable.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/schema/OLAPTable.java b/query/src/main/java/org/apache/kylin/query/schema/OLAPTable.java
index a8789ea..12ed6e5 100644
--- a/query/src/main/java/org/apache/kylin/query/schema/OLAPTable.java
+++ b/query/src/main/java/org/apache/kylin/query/schema/OLAPTable.java
@@ -43,6 +43,7 @@ import org.apache.calcite.schema.impl.AbstractTableQueryable;
 import org.apache.calcite.sql.type.SqlTypeName;
 import org.apache.calcite.sql.type.SqlTypeUtil;
 import org.apache.calcite.util.ImmutableBitSet;
+import org.apache.kylin.metadata.datatype.DataType;
 import org.apache.kylin.metadata.model.ColumnDesc;
 import org.apache.kylin.metadata.model.FunctionDesc;
 import org.apache.kylin.metadata.model.MeasureDesc;
@@ -128,21 +129,21 @@ public class OLAPTable extends AbstractQueryableTable implements TranslatableTab
     private RelDataType deriveRowType(RelDataTypeFactory typeFactory) {
         RelDataTypeFactory.FieldInfoBuilder fieldInfo = typeFactory.builder();
         for (ColumnDesc column : exposedColumns) {
-            RelDataType sqlType = createSqlType(typeFactory, column);
+            RelDataType sqlType = createSqlType(typeFactory, column.getType(), column.isNullable());
             sqlType = SqlTypeUtil.addCharsetAndCollation(sqlType, typeFactory);
             fieldInfo.add(column.getName(), sqlType);
         }
         return typeFactory.createStructType(fieldInfo);
     }
 
-    private RelDataType createSqlType(RelDataTypeFactory typeFactory, ColumnDesc column) {
-        SqlTypeName sqlTypeName = SQLTYPE_MAPPING.get(column.getTypeName());
+    public static RelDataType createSqlType(RelDataTypeFactory typeFactory, DataType dataType, boolean isNullable) {
+        SqlTypeName sqlTypeName = SQLTYPE_MAPPING.get(dataType.getName());
         if (sqlTypeName == null)
-            throw new IllegalArgumentException("Unrecognized column type " + column.getTypeName() + " from " + column);
-
-        int precision = column.getTypePrecision();
-        int scale = column.getTypeScale();
+            throw new IllegalArgumentException("Unrecognized data type " + dataType);
 
+        int precision = dataType.getPrecision();
+        int scale = dataType.getScale();
+        
         RelDataType result;
         if (precision >= 0 && scale >= 0)
             result = typeFactory.createSqlType(sqlTypeName, precision, scale);
@@ -152,7 +153,7 @@ public class OLAPTable extends AbstractQueryableTable implements TranslatableTab
             result = typeFactory.createSqlType(sqlTypeName);
 
         // due to left join and uncertain data quality, dimension value can be null
-        if (column.isNullable()) {
+        if (isNullable) {
             result = typeFactory.createTypeWithNullability(result, true);
         } else {
             result = typeFactory.createTypeWithNullability(result, false);


[26/50] [abbrv] kylin git commit: KYLIN-1383 remove deploy.env from front end, for permission control only depend on acl.defaultRole and acl.adminRole in kylin.properties

Posted by li...@apache.org.
KYLIN-1383 remove deploy.env from front end, for permission control only depend on acl.defaultRole and acl.adminRole in kylin.properties


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/2d4922dc
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/2d4922dc
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/2d4922dc

Branch: refs/heads/master
Commit: 2d4922dc6a55aa9376d424f1fce5454f7151e3de
Parents: c4d94f7
Author: Jason <ji...@163.com>
Authored: Thu Mar 3 14:24:38 2016 +0800
Committer: Jason <ji...@163.com>
Committed: Thu Mar 3 14:25:32 2016 +0800

----------------------------------------------------------------------
 webapp/app/js/services/kylinProperties.js  | 1 +
 webapp/app/partials/jobs/jobs.html         | 2 +-
 webapp/app/partials/models/models.html     | 2 +-
 webapp/app/partials/projects/projects.html | 2 +-
 webapp/app/partials/query/query.html       | 2 +-
 5 files changed, 5 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/2d4922dc/webapp/app/js/services/kylinProperties.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/services/kylinProperties.js b/webapp/app/js/services/kylinProperties.js
index 546db2b..68e8766 100644
--- a/webapp/app/js/services/kylinProperties.js
+++ b/webapp/app/js/services/kylinProperties.js
@@ -55,6 +55,7 @@ KylinApp.service('kylinConfig', function (AdminService, $log) {
     return false;
   }
 
+  //deprecated
   this.getDeployEnv = function () {
     this.deployEnv = this.getProperty("deploy.env");
     if (!this.deployEnv) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/2d4922dc/webapp/app/partials/jobs/jobs.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/jobs/jobs.html b/webapp/app/partials/jobs/jobs.html
index daf4578..cc5840b 100644
--- a/webapp/app/partials/jobs/jobs.html
+++ b/webapp/app/partials/jobs/jobs.html
@@ -22,7 +22,7 @@
         <form ng-if="userService.isAuthorized()">
             <div class="form-group" ng-if="userService.hasRole('ROLE_MODELER')" >
                 <a class="btn btn-xs btn-info" href="projects" tooltip="Manage Project"><i class="fa fa-gears"></i></a>
-              <a class="btn btn-xs btn-primary" ng-if="userService.hasRole('ROLE_ADMIN')||kylinConfig.getDeployEnv()!=='PROD'" style="width: 29px" tooltip="Add Project" ng-click="toCreateProj()">
+              <a class="btn btn-xs btn-primary" ng-if="userService.hasRole('ROLE_MODELER')" style="width: 29px" tooltip="Add Project" ng-click="toCreateProj()">
                     <i class="fa fa-plus"></i>
                 </a>
             </div>

http://git-wip-us.apache.org/repos/asf/kylin/blob/2d4922dc/webapp/app/partials/models/models.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/models/models.html b/webapp/app/partials/models/models.html
index 88cc6f3..c8e6ed1 100644
--- a/webapp/app/partials/models/models.html
+++ b/webapp/app/partials/models/models.html
@@ -21,7 +21,7 @@
     <form class="navbar-form navbar-left" style="margin-top: 0px !important;" ng-if="userService.isAuthorized()">
         <div class="form-group" ng-if="userService.hasRole('ROLE_MODELER')" >
             <a class="btn btn-xs btn-info" href="projects" tooltip="Manage Project"><i class="fa fa-gears"></i></a>
-          <a class="btn btn-xs btn-primary" ng-if="userService.hasRole('ROLE_ADMIN')||kylinConfig.getDeployEnv()!=='PROD'" style="width: 29px" tooltip="Add Project" ng-click="toCreateProj()">
+          <a class="btn btn-xs btn-primary" ng-if="userService.hasRole('ROLE_MODELER')" style="width: 29px" tooltip="Add Project" ng-click="toCreateProj()">
                 <i class="fa fa-plus"></i>
             </a>
         </div>

http://git-wip-us.apache.org/repos/asf/kylin/blob/2d4922dc/webapp/app/partials/projects/projects.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/projects/projects.html b/webapp/app/partials/projects/projects.html
index 26a2037..96e4a91 100644
--- a/webapp/app/partials/projects/projects.html
+++ b/webapp/app/partials/projects/projects.html
@@ -17,7 +17,7 @@
 -->
 
 <div class="page-header">
-    <button class="btn btn-primary btn-sm" ng-if="userService.hasRole('ROLE_ADMIN')||userService.hasRole('ROLE_MODELER')&&kylinConfig.getDeployEnv()!=='PROD'" ng-click="toCreateProj()"><i class="fa fa-plus"></i> Project</button>
+    <button class="btn btn-primary btn-sm" ng-if="userService.hasRole('ROLE_MODELER')" ng-click="toCreateProj()"><i class="fa fa-plus"></i> Project</button>
 </div>
 
 <div ng-if="!loading && projects.length == 0">

http://git-wip-us.apache.org/repos/asf/kylin/blob/2d4922dc/webapp/app/partials/query/query.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/query/query.html b/webapp/app/partials/query/query.html
index 0af08d6..69fe649 100644
--- a/webapp/app/partials/query/query.html
+++ b/webapp/app/partials/query/query.html
@@ -21,7 +21,7 @@
     <form class="navbar-form navbar-left" style="margin-top: 0px !important;" ng-if="userService.isAuthorized()">
         <div class="form-group" ng-if="userService.hasRole('ROLE_MODELER')">
             <a class="btn btn-xs btn-info" href="projects" tooltip="Manage Project"><i class="fa fa-gears"></i></a>
-          <a class="btn btn-xs btn-primary" ng-if="userService.hasRole('ROLE_ADMIN')||kylinConfig.getDeployEnv()!=='PROD'" style="width: 29px" tooltip="Add Project" ng-click="toCreateProj()">
+          <a class="btn btn-xs btn-primary" ng-if="userService.hasRole('ROLE_MODELER')" style="width: 29px" tooltip="Add Project" ng-click="toCreateProj()">
                 <i class="fa fa-plus"></i>
             </a>
         </div>


[27/50] [abbrv] kylin git commit: KYLIN-1466 Update kylin.sh and bring denpendencies to runnable classes

Posted by li...@apache.org.
KYLIN-1466 Update kylin.sh and bring denpendencies to runnable classes


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/daeaf084
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/daeaf084
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/daeaf084

Branch: refs/heads/master
Commit: daeaf08444ebd86a9de4e8addafa7541c25523be
Parents: 2d4922d
Author: lidongsjtu <li...@apache.org>
Authored: Thu Mar 3 23:26:32 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Thu Mar 3 23:26:55 2016 +0800

----------------------------------------------------------------------
 build/bin/kylin.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/daeaf084/build/bin/kylin.sh
----------------------------------------------------------------------
diff --git a/build/bin/kylin.sh b/build/bin/kylin.sh
index a21928e..f4d9fac 100644
--- a/build/bin/kylin.sh
+++ b/build/bin/kylin.sh
@@ -206,7 +206,7 @@ then
 
     export HBASE_CLASSPATH=${KYLIN_HOME}/lib/*:$hive_dependency:${HBASE_CLASSPATH}
 
-    exec hbase -Dlog4j.configuration=kylin-log4j.properties "$@"
+    exec hbase ${KYLIN_EXTRA_START_OPTS} -Dkylin.hive.dependency=${hive_dependency} -Dkylin.hbase.dependency=${hbase_dependency} -Dlog4j.configuration=kylin-log4j.properties "$@"
 
 else
     echo "usage: kylin.sh start or kylin.sh stop"


[30/50] [abbrv] kylin git commit: KYLIN-1471 - LIMIT after having clause should not be pushed down to storage context

Posted by li...@apache.org.
KYLIN-1471 - LIMIT after having clause should not be pushed down to storage context


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/2f44970d
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/2f44970d
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/2f44970d

Branch: refs/heads/master
Commit: 2f44970d28318a21c16aaa8f28e844c5d88f5e3d
Parents: b9a3418
Author: Hongbin Ma <ma...@apache.org>
Authored: Fri Mar 4 16:00:11 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Fri Mar 4 16:01:04 2016 +0800

----------------------------------------------------------------------
 .../resources/query/sql_tableau/query29.sql     | 29 ++++++++++++++++++++
 .../apache/kylin/query/relnode/OLAPContext.java |  1 +
 .../kylin/query/relnode/OLAPFilterRel.java      |  2 ++
 .../kylin/query/relnode/OLAPLimitRel.java       | 21 ++++++++------
 4 files changed, 44 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/2f44970d/kylin-it/src/test/resources/query/sql_tableau/query29.sql
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql_tableau/query29.sql b/kylin-it/src/test/resources/query/sql_tableau/query29.sql
new file mode 100644
index 0000000..0858087
--- /dev/null
+++ b/kylin-it/src/test/resources/query/sql_tableau/query29.sql
@@ -0,0 +1,29 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements.  See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership.  The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+
+SELECT * 
+ FROM ( 
+ select test_kylin_fact.lstg_format_name, test_cal_dt.week_beg_dt,sum(test_kylin_fact.price) as GMV 
+ , count(*) as TRANS_CNT 
+ from test_kylin_fact 
+ inner JOIN edw.test_cal_dt as test_cal_dt 
+ ON test_kylin_fact.cal_dt = test_cal_dt.cal_dt 
+ where test_cal_dt.week_beg_dt between DATE '2013-05-01' and DATE '2013-08-01' 
+ group by test_kylin_fact.lstg_format_name, test_cal_dt.week_beg_dt 
+ ) "TableauSQL" 
+ LIMIT 1 

http://git-wip-us.apache.org/repos/asf/kylin/blob/2f44970d/query/src/main/java/org/apache/kylin/query/relnode/OLAPContext.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPContext.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPContext.java
index 6de1790..431328f 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPContext.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPContext.java
@@ -103,6 +103,7 @@ public class OLAPContext {
     public OLAPTableScan firstTableScan = null; // to be fact table scan except "select * from lookupTable"
     public TupleInfo returnTupleInfo = null;
     public boolean afterAggregate = false;
+    public boolean afterSkippedFilter = false;
     public boolean afterJoin = false;
     public boolean hasJoin = false;
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/2f44970d/query/src/main/java/org/apache/kylin/query/relnode/OLAPFilterRel.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPFilterRel.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPFilterRel.java
index 7b8bfdb..a847890 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPFilterRel.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPFilterRel.java
@@ -271,6 +271,8 @@ public class OLAPFilterRel extends Filter implements OLAPRel {
         // only translate where clause and don't translate having clause
         if (!context.afterAggregate) {
             translateFilter(context);
+        } else {
+            context.afterSkippedFilter = true;//having clause is skipped
         }
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/2f44970d/query/src/main/java/org/apache/kylin/query/relnode/OLAPLimitRel.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPLimitRel.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPLimitRel.java
index 572a5c7..82aa9de 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPLimitRel.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPLimitRel.java
@@ -73,16 +73,19 @@ public class OLAPLimitRel extends SingleRel implements OLAPRel {
         implementor.visitChild(getInput(), this);
 
         this.columnRowType = buildColumnRowType();
-
         this.context = implementor.getContext();
-        Number limitValue = (Number) (((RexLiteral) localFetch).getValue());
-        int limit = limitValue.intValue();
-        this.context.storageContext.setLimit(limit);
-        this.context.limit = limit;
-        if(localOffset != null) {
-            Number offsetValue = (Number) (((RexLiteral) localOffset).getValue());
-            int offset = offsetValue.intValue();
-            this.context.storageContext.setOffset(offset);
+
+        if (!context.afterSkippedFilter) {
+            Number limitValue = (Number) (((RexLiteral) localFetch).getValue());
+            int limit = limitValue.intValue();
+            this.context.storageContext.setLimit(limit);
+            this.context.limit = limit;
+
+            if (localOffset != null) {
+                Number offsetValue = (Number) (((RexLiteral) localOffset).getValue());
+                int offset = offsetValue.intValue();
+                this.context.storageContext.setOffset(offset);
+            }
         }
     }
 


[43/50] [abbrv] kylin git commit: initial commit for KYLIN-1431

Posted by li...@apache.org.
initial commit for KYLIN-1431


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/9021f17d
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/9021f17d
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/9021f17d

Branch: refs/heads/master
Commit: 9021f17d85be01bf34b48a7a31be82f53ceb9c8f
Parents: 0ec3ed0
Author: shaofengshi <sh...@apache.org>
Authored: Wed Mar 2 11:16:46 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Tue Mar 8 15:07:04 2016 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/job/CubeMetaExtractor.java |   2 +-
 build/bin/streaming_build.sh                    |   4 +-
 build/bin/streaming_fillgap.sh                  |   5 +-
 .../kylin/metadata/model/ISourceAware.java      |   1 +
 .../kylin/engine/streaming/BootstrapConfig.java |  20 +-
 .../kylin/engine/streaming/IStreamingInput.java |   3 +-
 .../streaming/OneOffStreamingBuilder.java       |  17 +-
 .../kylin/engine/streaming/StreamingConfig.java |  33 +--
 .../engine/streaming/StreamingManager.java      |  12 +
 .../engine/streaming/cli/StreamingCLI.java      |  21 +-
 .../engine/streaming/util/StreamingUtils.java   |  18 +-
 .../kafka/default.streaming_table.json          |  21 ++
 .../localmeta/kafka/kafka_test.json             |  20 --
 .../kafka/test_streaming_table_cube.json        |  22 --
 .../kafka/test_streaming_table_ii.json          |  22 --
 .../streaming/default.streaming_table.json      |   6 +
 .../localmeta/streaming/kafka_test.json         |  20 --
 .../streaming/test_streaming_table_cube.json    |   8 -
 .../streaming/test_streaming_table_ii.json      |   8 -
 .../kylin/provision/BuildCubeWithStream.java    |  16 +-
 .../kylin/rest/controller/CubeController.java   | 234 -------------------
 .../rest/controller/StreamingController.java    |   4 +-
 .../kylin/rest/service/StreamingService.java    |  18 +-
 .../kylin/source/kafka/KafkaStreamingInput.java |  78 ++++---
 .../kylin/source/kafka/StreamingParser.java     |   6 +-
 25 files changed, 163 insertions(+), 456 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/assembly/src/test/java/org/apache/kylin/job/CubeMetaExtractor.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/CubeMetaExtractor.java b/assembly/src/test/java/org/apache/kylin/job/CubeMetaExtractor.java
index 527ef0a..ef27ade 100644
--- a/assembly/src/test/java/org/apache/kylin/job/CubeMetaExtractor.java
+++ b/assembly/src/test/java/org/apache/kylin/job/CubeMetaExtractor.java
@@ -227,7 +227,7 @@ public class CubeMetaExtractor extends AbstractApplication {
 
     private void dealWithStreaming(CubeInstance cube) {
         for (StreamingConfig streamingConfig : streamingManager.listAllStreaming()) {
-            if (streamingConfig.getCubeName() != null && streamingConfig.getCubeName().equalsIgnoreCase(cube.getName())) {
+            if (streamingConfig.getName() != null && streamingConfig.getName().equalsIgnoreCase(cube.getFactTable())) {
                 requiredResources.add(StreamingConfig.concatResourcePath(streamingConfig.getName()));
                 requiredResources.add(KafkaConfig.concatResourcePath(streamingConfig.getName()));
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/build/bin/streaming_build.sh
----------------------------------------------------------------------
diff --git a/build/bin/streaming_build.sh b/build/bin/streaming_build.sh
index a96ecc1..ed19036 100644
--- a/build/bin/streaming_build.sh
+++ b/build/bin/streaming_build.sh
@@ -20,7 +20,7 @@
 source /etc/profile
 source ~/.bash_profile
 
-STREAMING=$1
+CUBE=$1
 INTERVAL=$2
 DELAY=$3
 CURRENT_TIME_IN_SECOND=`date +%s`
@@ -30,4 +30,4 @@ END=$(($CURRENT_TIME - CURRENT_TIME%INTERVAL - DELAY + INTERVAL))
 
 ID="$START"_"$END"
 echo "building for ${ID}" >> ${KYLIN_HOME}/logs/build_trace.log
-sh ${KYLIN_HOME}/bin/kylin.sh streaming start ${STREAMING} ${ID} -start ${START} -end ${END} -streaming ${STREAMING}
\ No newline at end of file
+sh ${KYLIN_HOME}/bin/kylin.sh streaming start ${CUBE} ${ID} -start ${START} -end ${END} -cube ${CUBE}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/build/bin/streaming_fillgap.sh
----------------------------------------------------------------------
diff --git a/build/bin/streaming_fillgap.sh b/build/bin/streaming_fillgap.sh
index 74d9037..c67809a 100644
--- a/build/bin/streaming_fillgap.sh
+++ b/build/bin/streaming_fillgap.sh
@@ -20,8 +20,7 @@
 source /etc/profile
 source ~/.bash_profile
 
-streaming=$1
-margin=$2
+cube=$1
 
 cd ${KYLIN_HOME}
-sh ${KYLIN_HOME}/bin/kylin.sh streaming start ${streaming} fillgap -streaming ${streaming} -fillGap true -margin ${margin}
\ No newline at end of file
+sh ${KYLIN_HOME}/bin/kylin.sh streaming start ${cube} fillgap -cube ${cube} -fillGap true
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISourceAware.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISourceAware.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISourceAware.java
index 3d89f40..8cfda15 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISourceAware.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISourceAware.java
@@ -21,6 +21,7 @@ package org.apache.kylin.metadata.model;
 public interface ISourceAware {
 
     public static final int ID_HIVE = 0;
+    public static final int ID_STREAMING = 1;
     public static final int ID_SPARKSQL = 5;
 
     int getSourceType();

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/BootstrapConfig.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/BootstrapConfig.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/BootstrapConfig.java
index a3e2db5..a4c4618 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/BootstrapConfig.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/BootstrapConfig.java
@@ -4,9 +4,7 @@ package org.apache.kylin.engine.streaming;
  */
 public class BootstrapConfig {
 
-    private String streaming;
-    private int partitionId = -1;
-
+    private String cubeName;
     private long start = 0L;
     private long end = 0L;
 
@@ -28,20 +26,12 @@ public class BootstrapConfig {
         this.end = end;
     }
 
-    public String getStreaming() {
-        return streaming;
-    }
-
-    public void setStreaming(String streaming) {
-        this.streaming = streaming;
-    }
-
-    public int getPartitionId() {
-        return partitionId;
+    public String getCubeName() {
+        return cubeName;
     }
 
-    public void setPartitionId(int partitionId) {
-        this.partitionId = partitionId;
+    public void setCubeName(String cubeName) {
+        this.cubeName = cubeName;
     }
 
     public boolean isFillGap() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/IStreamingInput.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/IStreamingInput.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/IStreamingInput.java
index 1cf3d98..4b4cf02 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/IStreamingInput.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/IStreamingInput.java
@@ -34,11 +34,12 @@
 package org.apache.kylin.engine.streaming;
 
 import org.apache.kylin.common.util.StreamingBatch;
+import org.apache.kylin.metadata.realization.RealizationType;
 
 /**
  */
 public interface IStreamingInput {
 
-    StreamingBatch getBatchWithTimeWindow(String streamingConfig, int id, long startTime, long endTime);
+    StreamingBatch getBatchWithTimeWindow(RealizationType realizationType, String realizationName, int id, long startTime, long endTime);
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/OneOffStreamingBuilder.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/OneOffStreamingBuilder.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/OneOffStreamingBuilder.java
index 3fbade2..6bad000 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/OneOffStreamingBuilder.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/OneOffStreamingBuilder.java
@@ -43,6 +43,7 @@ import org.apache.kylin.metadata.model.IBuildable;
 import org.apache.kylin.metadata.model.TblColRef;
 
 import com.google.common.base.Preconditions;
+import org.apache.kylin.metadata.realization.RealizationType;
 
 /**
  */
@@ -53,23 +54,25 @@ public class OneOffStreamingBuilder {
     private final StreamingBatchBuilder streamingBatchBuilder;
     private final long startTime;
     private final long endTime;
-    private final String streamingConfig;
+    private final RealizationType realizationType;
+    private final String realizationName;
 
-    public OneOffStreamingBuilder(String streamingConfig, long startTime, long endTime) {
+    public OneOffStreamingBuilder(RealizationType realizationType, String realizationName, long startTime, long endTime) {
         Preconditions.checkArgument(startTime < endTime);
         this.startTime = startTime;
         this.endTime = endTime;
-        this.streamingConfig = Preconditions.checkNotNull(streamingConfig);
-        this.streamingInput = Preconditions.checkNotNull(StreamingUtils.getStreamingInput(streamingConfig));
-        this.streamingOutput = Preconditions.checkNotNull(StreamingUtils.getStreamingOutput(streamingConfig));
-        this.streamingBatchBuilder = Preconditions.checkNotNull(StreamingUtils.getMicroBatchBuilder(streamingConfig));
+        this.realizationType = Preconditions.checkNotNull(realizationType);
+        this.realizationName = Preconditions.checkNotNull(realizationName);
+        this.streamingInput = Preconditions.checkNotNull(StreamingUtils.getStreamingInput());
+        this.streamingOutput = Preconditions.checkNotNull(StreamingUtils.getStreamingOutput());
+        this.streamingBatchBuilder = Preconditions.checkNotNull(StreamingUtils.getMicroBatchBuilder(realizationType, realizationName));
     }
 
     public Runnable build() {
         return new Runnable() {
             @Override
             public void run() {
-                StreamingBatch streamingBatch = streamingInput.getBatchWithTimeWindow(streamingConfig, -1, startTime, endTime);
+                StreamingBatch streamingBatch = streamingInput.getBatchWithTimeWindow(realizationType, realizationName, -1, startTime, endTime);
                 final IBuildable buildable = streamingBatchBuilder.createBuildable(streamingBatch);
                 final Map<Long, HyperLogLogPlusCounter> samplingResult = streamingBatchBuilder.sampling(streamingBatch);
                 final Map<TblColRef, Dictionary<String>> dictionaryMap = streamingBatchBuilder.buildDictionary(streamingBatch, buildable);

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingConfig.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingConfig.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingConfig.java
index f0a7ab1..c8d1911 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingConfig.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingConfig.java
@@ -55,29 +55,24 @@ public class StreamingConfig extends RootPersistentEntity {
 
     public static Serializer<StreamingConfig> SERIALIZER = new JsonSerializer<StreamingConfig>(StreamingConfig.class);
 
+    public static final String STREAMING_TYPE_KAFKA = "kafka";
+
     @JsonProperty("name")
     private String name;
 
-    @JsonProperty("iiName")
-    private String iiName;
-
-    @JsonProperty("cubeName")
-    private String cubeName;
+    @JsonProperty("type")
+    private String type = STREAMING_TYPE_KAFKA;
 
-    public String getCubeName() {
-        return cubeName;
+    public String getType() {
+        return type;
     }
 
-    public void setCubeName(String cubeName) {
-        this.cubeName = cubeName;
+    public void setType(String type) {
+        this.type = type;
     }
 
-    public String getIiName() {
-        return iiName;
-    }
-
-    public void setIiName(String iiName) {
-        this.iiName = iiName;
+    public String getResourcePath() {
+        return concatResourcePath(name);
     }
 
     public String getName() {
@@ -88,12 +83,8 @@ public class StreamingConfig extends RootPersistentEntity {
         this.name = name;
     }
 
-    public String getResourcePath() {
-        return concatResourcePath(name);
-    }
-
-    public static String concatResourcePath(String streamingName) {
-        return ResourceStore.STREAMING_RESOURCE_ROOT + "/" + streamingName + ".json";
+    public static String concatResourcePath(String name) {
+        return ResourceStore.STREAMING_RESOURCE_ROOT + "/" + name + ".json";
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingManager.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingManager.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingManager.java
index e0b086d..f652762 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingManager.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingManager.java
@@ -104,6 +104,18 @@ public class StreamingManager {
         }
     }
 
+    private static String formatStreamingConfigPath(String name) {
+        return ResourceStore.STREAMING_RESOURCE_ROOT + "/" + name + ".json";
+    }
+
+    private static String formatStreamingOutputPath(String streaming, int partition) {
+        return ResourceStore.STREAMING_OUTPUT_RESOURCE_ROOT + "/" + streaming + "_" + partition + ".json";
+    }
+
+    private static String formatStreamingOutputPath(String streaming, List<Integer> partitions) {
+        return ResourceStore.STREAMING_OUTPUT_RESOURCE_ROOT + "/" + streaming + "_" + StringUtils.join(partitions, "_") + ".json";
+    }
+
     public StreamingConfig getStreamingConfig(String name) {
         return streamingMap.get(name);
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cli/StreamingCLI.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cli/StreamingCLI.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cli/StreamingCLI.java
index a73a6ac..0bab396 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cli/StreamingCLI.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cli/StreamingCLI.java
@@ -44,6 +44,7 @@ import org.apache.kylin.engine.streaming.OneOffStreamingBuilder;
 import org.apache.kylin.engine.streaming.StreamingConfig;
 import org.apache.kylin.engine.streaming.StreamingManager;
 import org.apache.kylin.engine.streaming.monitor.StreamingMonitor;
+import org.apache.kylin.metadata.realization.RealizationType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -55,7 +56,7 @@ public class StreamingCLI {
 
     public static void main(String[] args) {
         try {
-            Preconditions.checkArgument(args[0].equals("streaming"));
+            Preconditions.checkArgument(args[0].equals("cube"));
             Preconditions.checkArgument(args[1].equals("start"));
 
             int i = 2;
@@ -69,11 +70,8 @@ public class StreamingCLI {
                 case "-end":
                     bootstrapConfig.setEnd(Long.parseLong(args[++i]));
                     break;
-                case "-streaming":
-                    bootstrapConfig.setStreaming(args[++i]);
-                    break;
-                case "-partition":
-                    bootstrapConfig.setPartitionId(Integer.parseInt(args[++i]));
+                case "-cube":
+                    bootstrapConfig.setCubeName(args[++i]);
                     break;
                 case "-fillGap":
                     bootstrapConfig.setFillGap(Boolean.parseBoolean(args[++i]));
@@ -84,14 +82,13 @@ public class StreamingCLI {
                 i++;
             }
             if (bootstrapConfig.isFillGap()) {
-                final StreamingConfig streamingConfig = StreamingManager.getInstance(KylinConfig.getInstanceFromEnv()).getStreamingConfig(bootstrapConfig.getStreaming());
-                final List<Pair<Long, Long>> gaps = StreamingMonitor.findGaps(streamingConfig.getCubeName());
+                final List<Pair<Long, Long>> gaps = StreamingMonitor.findGaps(bootstrapConfig.getCubeName());
                 logger.info("all gaps:" + StringUtils.join(gaps, ","));
                 for (Pair<Long, Long> gap : gaps) {
-                    startOneOffCubeStreaming(bootstrapConfig.getStreaming(), gap.getFirst(), gap.getSecond());
+                    startOneOffCubeStreaming(bootstrapConfig.getCubeName(), gap.getFirst(), gap.getSecond());
                 }
             } else {
-                startOneOffCubeStreaming(bootstrapConfig.getStreaming(), bootstrapConfig.getStart(), bootstrapConfig.getEnd());
+                startOneOffCubeStreaming(bootstrapConfig.getCubeName(), bootstrapConfig.getStart(), bootstrapConfig.getEnd());
                 logger.info("streaming process finished, exit with 0");
                 System.exit(0);
             }
@@ -102,8 +99,8 @@ public class StreamingCLI {
         }
     }
     
-    private static void startOneOffCubeStreaming(String streaming, long start, long end) {
-        final Runnable runnable = new OneOffStreamingBuilder(streaming, start, end).build();
+    private static void startOneOffCubeStreaming(String cubeName, long start, long end) {
+        final Runnable runnable = new OneOffStreamingBuilder(RealizationType.CUBE, cubeName, start, end).build();
         runnable.run();
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/util/StreamingUtils.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/util/StreamingUtils.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/util/StreamingUtils.java
index 0ae7143..66a0af2 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/util/StreamingUtils.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/util/StreamingUtils.java
@@ -43,29 +43,27 @@ import org.apache.kylin.engine.streaming.StreamingManager;
 import org.apache.kylin.engine.streaming.cube.StreamingCubeBuilder;
 
 import com.google.common.base.Preconditions;
+import org.apache.kylin.metadata.realization.RealizationType;
 
 /**
  * TODO: like MRUtil, use Factory pattern to allow config
  */
 public class StreamingUtils {
 
-    public static IStreamingInput getStreamingInput(String streaming) {
+    public static IStreamingInput getStreamingInput() {
         return (IStreamingInput) ClassUtil.newInstance("org.apache.kylin.source.kafka.KafkaStreamingInput");
     }
 
-    public static IStreamingOutput getStreamingOutput(String streaming) {
+    public static IStreamingOutput getStreamingOutput() {
         return (IStreamingOutput) ClassUtil.newInstance("org.apache.kylin.storage.hbase.steps.HBaseStreamingOutput");
     }
 
-    public static StreamingBatchBuilder getMicroBatchBuilder(String streaming) {
-        final StreamingConfig streamingConfig = StreamingManager.getInstance(KylinConfig.getInstanceFromEnv()).getStreamingConfig(streaming);
-        Preconditions.checkNotNull(streamingConfig);
-        if (streamingConfig.getCubeName() != null) {
-            return new StreamingCubeBuilder(streamingConfig.getCubeName());
-        } else if (streamingConfig.getIiName() != null) {
-            throw new UnsupportedOperationException("not implemented yet");
+    public static StreamingBatchBuilder getMicroBatchBuilder(RealizationType realizationType, String realizationName) {
+        Preconditions.checkNotNull(realizationName);
+        if (realizationType == RealizationType.CUBE) {
+            return new StreamingCubeBuilder(realizationName);
         } else {
-            throw new UnsupportedOperationException("StreamingConfig is not valid");
+            throw new UnsupportedOperationException("not implemented yet");
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/examples/test_case_data/localmeta/kafka/default.streaming_table.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/kafka/default.streaming_table.json b/examples/test_case_data/localmeta/kafka/default.streaming_table.json
new file mode 100644
index 0000000..c99b8e5
--- /dev/null
+++ b/examples/test_case_data/localmeta/kafka/default.streaming_table.json
@@ -0,0 +1,21 @@
+{
+  "version":"2.1",
+  "uuid": "8b2b9dfe-777c-4d39-bf89-8472ec909193",
+  "name": "default.streaming_table",
+  "topic": "test_streaming_table_topic_xyz",
+  "timeout": 60000,
+  "bufferSize": 65536,
+  "parserName": "org.apache.kylin.source.kafka.TimedJsonStreamParser",
+  "last_modified": 0,
+  "clusters": [
+    {
+      "brokers": [
+        {
+          "id": 0,
+          "host": "sandbox",
+          "port": 6667
+        }
+      ]
+    }
+  ]
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/examples/test_case_data/localmeta/kafka/kafka_test.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/kafka/kafka_test.json b/examples/test_case_data/localmeta/kafka/kafka_test.json
deleted file mode 100644
index a20f71e..0000000
--- a/examples/test_case_data/localmeta/kafka/kafka_test.json
+++ /dev/null
@@ -1,20 +0,0 @@
-{
-  "version":"2.1",
-  "uuid": "8b2b9dfe-900c-4d39-bf89-8472ec6c3c0d",
-  "name": "kafka_test",
-  "topic": "kafka_stream_test",
-  "timeout": 60000,
-  "bufferSize": 65536,
-  "last_modified": 0,
-  "clusters": [
-    {
-      "brokers": [
-        {
-          "id": 0,
-          "host": "sandbox.hortonworks.com",
-          "port": 6667
-        }
-      ]
-    }
-  ]
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/examples/test_case_data/localmeta/kafka/test_streaming_table_cube.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/kafka/test_streaming_table_cube.json b/examples/test_case_data/localmeta/kafka/test_streaming_table_cube.json
deleted file mode 100644
index 554fa62..0000000
--- a/examples/test_case_data/localmeta/kafka/test_streaming_table_cube.json
+++ /dev/null
@@ -1,22 +0,0 @@
-{
-  "version":"2.1",
-  "uuid": "8b2b9dfe-777c-4d39-bf89-8472ec909193",
-  "name": "test_streaming_table_cube",
-  "topic": "test_streaming_table_topic_xyz",
-  "timeout": 60000,
-  "bufferSize": 65536,
-  "parserName": "org.apache.kylin.source.kafka.TimedJsonStreamParser",
-  "partition": 1,
-  "last_modified": 0,
-  "clusters": [
-    {
-      "brokers": [
-        {
-          "id": 0,
-          "host": "sandbox",
-          "port": 6667
-        }
-      ]
-    }
-  ]
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/examples/test_case_data/localmeta/kafka/test_streaming_table_ii.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/kafka/test_streaming_table_ii.json b/examples/test_case_data/localmeta/kafka/test_streaming_table_ii.json
deleted file mode 100644
index b6f18c7..0000000
--- a/examples/test_case_data/localmeta/kafka/test_streaming_table_ii.json
+++ /dev/null
@@ -1,22 +0,0 @@
-{
-  "version":"2.1",
-  "uuid": "8b2b9dfe-900c-4d39-bf89-8472ec909322",
-  "name": "test_streaming_table_ii",
-  "topic": "test_streaming_table_topic_xyz",
-  "timeout": 60000,
-  "bufferSize": 65536,
-  "parserName": "org.apache.kylin.source.kafka.JsonStreamParser",
-  "partition": 1,
-  "last_modified": 0,
-  "clusters": [
-    {
-      "brokers": [
-        {
-          "id": 0,
-          "host": "sandbox",
-          "port": 6667
-        }
-      ]
-    }
-  ]
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/examples/test_case_data/localmeta/streaming/default.streaming_table.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/streaming/default.streaming_table.json b/examples/test_case_data/localmeta/streaming/default.streaming_table.json
new file mode 100644
index 0000000..6eb4a88
--- /dev/null
+++ b/examples/test_case_data/localmeta/streaming/default.streaming_table.json
@@ -0,0 +1,6 @@
+{
+  "uuid": "8b2b9dfe-777c-4d39-bf89-8472ec909193",
+  "name": "default.streaming_table",
+  "type": "kafka",
+  "last_modified": 0
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/examples/test_case_data/localmeta/streaming/kafka_test.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/streaming/kafka_test.json b/examples/test_case_data/localmeta/streaming/kafka_test.json
deleted file mode 100644
index a20f71e..0000000
--- a/examples/test_case_data/localmeta/streaming/kafka_test.json
+++ /dev/null
@@ -1,20 +0,0 @@
-{
-  "version":"2.1",
-  "uuid": "8b2b9dfe-900c-4d39-bf89-8472ec6c3c0d",
-  "name": "kafka_test",
-  "topic": "kafka_stream_test",
-  "timeout": 60000,
-  "bufferSize": 65536,
-  "last_modified": 0,
-  "clusters": [
-    {
-      "brokers": [
-        {
-          "id": 0,
-          "host": "sandbox.hortonworks.com",
-          "port": 6667
-        }
-      ]
-    }
-  ]
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/examples/test_case_data/localmeta/streaming/test_streaming_table_cube.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/streaming/test_streaming_table_cube.json b/examples/test_case_data/localmeta/streaming/test_streaming_table_cube.json
deleted file mode 100644
index ecf0511..0000000
--- a/examples/test_case_data/localmeta/streaming/test_streaming_table_cube.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-  "version":"2.1",
-  "uuid": "8b2b9dfe-777c-4d39-bf89-8472ec909193",
-  "name": "test_streaming_table_cube",
-  "cubeName": "test_streaming_table_cube",
-  "partition": 1,
-  "last_modified": 0
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/examples/test_case_data/localmeta/streaming/test_streaming_table_ii.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/streaming/test_streaming_table_ii.json b/examples/test_case_data/localmeta/streaming/test_streaming_table_ii.json
deleted file mode 100644
index 022ab70..0000000
--- a/examples/test_case_data/localmeta/streaming/test_streaming_table_ii.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-  "version":"2.1",
-  "uuid": "8b2b9dfe-900c-4d39-bf89-8472ec909322",
-  "name": "test_streaming_table_ii",
-  "iiName": "test_streaming_table_ii",
-  "partition": 1,
-  "last_modified": 0
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
index 27226e7..eeff999 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
@@ -27,10 +27,13 @@ import org.apache.kylin.common.util.AbstractKylinTestCase;
 import org.apache.kylin.common.util.ClassUtil;
 import org.apache.kylin.common.util.DateFormat;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.engine.streaming.OneOffStreamingBuilder;
 import org.apache.kylin.engine.streaming.StreamingConfig;
 import org.apache.kylin.engine.streaming.StreamingManager;
 import org.apache.kylin.job.DeployUtil;
+import org.apache.kylin.metadata.realization.RealizationType;
 import org.apache.kylin.source.kafka.KafkaConfigManager;
 import org.apache.kylin.source.kafka.config.KafkaConfig;
 import org.slf4j.Logger;
@@ -42,7 +45,7 @@ import org.slf4j.LoggerFactory;
 public class BuildCubeWithStream {
 
     private static final Logger logger = LoggerFactory.getLogger(BuildCubeWithStream.class);
-    private static final String streamingName = "test_streaming_table_cube";
+    private static final String cubeName = "test_streaming_table_cube";
     private static final long startTime = DateFormat.stringToMillis("2015-01-01 00:00:00");
     private static final long endTime = DateFormat.stringToMillis("2015-01-03 00:00:00");
     private static final long batchInterval = 16 * 60 * 60 * 1000;//16 hours
@@ -75,15 +78,16 @@ public class BuildCubeWithStream {
         DeployUtil.overrideJobJarLocations();
 
         kylinConfig = KylinConfig.getInstanceFromEnv();
-
-        final StreamingConfig config = StreamingManager.getInstance(KylinConfig.getInstanceFromEnv()).getStreamingConfig(streamingName);
+        final CubeInstance cubeInstance = CubeManager.getInstance(kylinConfig).getCube(cubeName);
+        final String factTable = cubeInstance.getFactTable();
+        final StreamingConfig config = StreamingManager.getInstance(kylinConfig).getStreamingConfig(factTable);
 
         //Use a random topic for kafka data stream
-        KafkaConfig streamingConfig = KafkaConfigManager.getInstance(kylinConfig).getKafkaConfig(streamingName);
+        KafkaConfig streamingConfig = KafkaConfigManager.getInstance(kylinConfig).getKafkaConfig(config.getName());
         streamingConfig.setTopic(UUID.randomUUID().toString());
         KafkaConfigManager.getInstance(kylinConfig).saveKafkaConfig(streamingConfig);
 
-        DeployUtil.prepareTestDataForStreamingCube(startTime, endTime, config.getCubeName(), streamingConfig);
+        DeployUtil.prepareTestDataForStreamingCube(startTime, endTime, cubeName, streamingConfig);
     }
 
     public static void afterClass() throws Exception {
@@ -94,7 +98,7 @@ public class BuildCubeWithStream {
         logger.info("start time:" + startTime + " end time:" + endTime + " batch interval:" + batchInterval + " batch count:" + ((endTime - startTime) / batchInterval));
         for (long start = startTime; start < endTime; start += batchInterval) {
             logger.info(String.format("build batch:{%d, %d}", start, start + batchInterval));
-            new OneOffStreamingBuilder(streamingName, start, start + batchInterval).build().run();
+            new OneOffStreamingBuilder(RealizationType.CUBE, cubeName, start, start + batchInterval).build().run();
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/server/src/main/java/org/apache/kylin/rest/controller/CubeController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/CubeController.java b/server/src/main/java/org/apache/kylin/rest/controller/CubeController.java
index 9afa750..e60f330 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/CubeController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/CubeController.java
@@ -284,92 +284,6 @@ public class CubeController extends BasicController {
             throw new InternalErrorException("Failed to clone cube ", e);
         }
 
-        boolean isStreamingCube = false, cloneStreamingConfigSuccess = false, cloneKafkaConfigSuccess = false;
-
-
-        List<StreamingConfig> streamingConfigs = null;
-        try {
-            streamingConfigs = streamingService.listAllStreamingConfigs(cubeName);
-            if (streamingConfigs.size() != 0) {
-                isStreamingCube = true;
-            }
-
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-
-        StreamingConfig newStreamingConfig = null;
-        KafkaConfig newKafkaConfig = null;
-
-        try {
-
-            if (isStreamingCube) {
-
-                isStreamingCube = true;
-                newStreamingConfig = streamingConfigs.get(0).clone();
-                newStreamingConfig.setName(newCubeName + "_STREAMING");
-                newStreamingConfig.updateRandomUuid();
-                newStreamingConfig.setLastModified(0);
-                newStreamingConfig.setCubeName(newCubeName);
-                try {
-                    streamingService.createStreamingConfig(newStreamingConfig);
-                    cloneStreamingConfigSuccess = true;
-                } catch (IOException e) {
-                    throw new InternalErrorException("Failed to clone streaming config. ", e);
-                }
-
-                //StreamingConfig name and KafkaConfig name is the same for same cube
-                String kafkaConfigName = streamingConfigs.get(0).getName();
-                KafkaConfig kafkaConfig = null;
-                try {
-                    kafkaConfig = kafkaConfigService.getKafkaConfig(kafkaConfigName);
-                    if (kafkaConfig != null) {
-                        newKafkaConfig = kafkaConfig.clone();
-                        newKafkaConfig.setName(newStreamingConfig.getName());
-                        newKafkaConfig.setLastModified(0);
-                        newKafkaConfig.updateRandomUuid();
-                    }
-                } catch (IOException e) {
-                    throw new InternalErrorException("Failed to get kafka config info. ", e);
-                }
-
-                try {
-                    kafkaConfigService.createKafkaConfig(newKafkaConfig);
-                    cloneKafkaConfigSuccess = true;
-                } catch (IOException e) {
-                    throw new InternalErrorException("Failed to clone streaming config. ", e);
-                }
-            }
-        } finally {
-
-            //rollback if failed
-            if (isStreamingCube) {
-                if (cloneStreamingConfigSuccess == false || cloneKafkaConfigSuccess == false) {
-                    try {
-                        cubeService.deleteCube(newCube);
-                    } catch (Exception ex) {
-                        throw new InternalErrorException("Failed, and failed to rollback on delete cube. " + " Caused by: " + ex.getMessage(), ex);
-                    }
-                    if (cloneStreamingConfigSuccess == true) {
-                        try {
-                            streamingService.dropStreamingConfig(newStreamingConfig);
-                        } catch (IOException e) {
-                            throw new InternalErrorException("Failed to clone cube, and StreamingConfig created and failed to delete: " + e.getLocalizedMessage());
-                        }
-                    }
-                    if (cloneKafkaConfigSuccess == true) {
-                        try {
-                            kafkaConfigService.dropKafkaConfig(newKafkaConfig);
-                        } catch (IOException e) {
-                            throw new InternalErrorException("Failed to clone cube, and KafkaConfig created and failed to delete: " + e.getLocalizedMessage());
-                        }
-                    }
-
-                }
-
-            }
-        }
-
         return newCube;
 
     }
@@ -400,27 +314,6 @@ public class CubeController extends BasicController {
             throw new NotFoundException("Cube with name " + cubeName + " not found..");
         }
 
-        //drop related StreamingConfig KafkaConfig if exist
-        try {
-            List<StreamingConfig> configs = streamingService.listAllStreamingConfigs(cubeName);
-            for (StreamingConfig config : configs) {
-                try {
-                    streamingService.dropStreamingConfig(config);
-                } catch (IOException e) {
-                    logger.error(e.getLocalizedMessage(), e);
-                    throw new InternalErrorException("Failed to delete StreamingConfig. " + " Caused by: " + e.getMessage(), e);
-                }
-                try {
-                    KafkaConfig kfkConfig = kafkaConfigService.getKafkaConfig(config.getName());
-                    kafkaConfigService.dropKafkaConfig(kfkConfig);
-                } catch (IOException e) {
-                    throw new InternalErrorException("Failed to delete KafkaConfig. " + " Caused by: " + e.getMessage(), e);
-                }
-            }
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-
         //drop Cube
         try {
             cubeService.deleteCube(cube);
@@ -587,133 +480,6 @@ public class CubeController extends BasicController {
             return cubeRequest;
         }
 
-        boolean updateStreamingConfigSuccess = false, updateKafkaConfigSuccess = false;
-
-        boolean isStreamingCube = cubeRequest.getStreamingCube() != null && cubeRequest.getStreamingCube().equals("true");
-
-        //oldConfig is for recover use
-        StreamingConfig streamingConfig = null, oldStreamingConfig = null;
-        KafkaConfig kafkaConfig = null, oldKafkaConfig = null;
-        if (isStreamingCube) {
-            streamingConfig = deserializeStreamingDesc(cubeRequest);
-            kafkaConfig = deserializeKafkaDesc(cubeRequest);
-            try {
-                oldKafkaConfig = kafkaConfigService.getKafkaConfig(kafkaConfig.getName());
-            } catch (IOException e) {
-                e.printStackTrace();
-            }
-            oldStreamingConfig = streamingService.getStreamingManager().getStreamingConfig(streamingConfig.getName());
-        }
-        try {
-            //streaming Cube
-            if (isStreamingCube) {
-                if (streamingConfig == null) {
-                    cubeRequest.setMessage("No StreamingConfig info to update.");
-                    return cubeRequest;
-                }
-                if (kafkaConfig == null) {
-                    cubeRequest.setMessage("No KafkaConfig info to update.");
-                    return cubeRequest;
-                }
-
-                if (oldStreamingConfig == null) {
-                    streamingConfig.setUuid(UUID.randomUUID().toString());
-                    try {
-                        streamingService.createStreamingConfig(streamingConfig);
-                        updateStreamingConfigSuccess = true;
-                    } catch (IOException e) {
-                        logger.error("Failed to add StreamingConfig:" + e.getLocalizedMessage(), e);
-                        throw new InternalErrorException("Failed to add StreamingConfig: " + e.getLocalizedMessage());
-                    }
-                } else {
-                    try {
-                        streamingConfig = streamingService.updateStreamingConfig(streamingConfig);
-                        updateStreamingConfigSuccess = true;
-
-                    } catch (IOException e) {
-                        logger.error("Failed to update StreamingConfig:" + e.getLocalizedMessage(), e);
-                        throw new InternalErrorException("Failed to update StreamingConfig: " + e.getLocalizedMessage());
-                    }
-                }
-                if (oldKafkaConfig == null) {
-                    kafkaConfig.setUuid(UUID.randomUUID().toString());
-                    try {
-                        kafkaConfigService.createKafkaConfig(kafkaConfig);
-                        updateKafkaConfigSuccess = true;
-                    } catch (IOException e) {
-                        logger.error("Failed to add KafkaConfig:" + e.getLocalizedMessage(), e);
-                        throw new InternalErrorException("Failed to add KafkaConfig: " + e.getLocalizedMessage());
-                    }
-
-                } else {
-                    try {
-                        kafkaConfig = kafkaConfigService.updateKafkaConfig(kafkaConfig);
-                        updateKafkaConfigSuccess = true;
-                    } catch (IOException e) {
-                        logger.error("Failed to update KafkaConfig:" + e.getLocalizedMessage(), e);
-                        throw new InternalErrorException("Failed to update KafkaConfig: " + e.getLocalizedMessage());
-                    }
-                }
-
-            }
-        } finally {
-            if (isStreamingCube) {
-                //recover cube desc
-                if (updateStreamingConfigSuccess == false || updateKafkaConfigSuccess == false) {
-                    oldCubeDesc.setLastModified(desc.getLastModified());
-                    CubeInstance cube = cubeService.getCubeManager().getCube(cubeRequest.getCubeName());
-                    try {
-                        desc = cubeService.updateCubeAndDesc(cube, oldCubeDesc, projectName);
-                    } catch (Exception e) {
-                        logger.error("Failed to recover CubeDesc:" + e.getLocalizedMessage(), e);
-                        throw new InternalErrorException("Failed to recover CubeDesc: " + e.getLocalizedMessage());
-                    }
-
-                    if (updateStreamingConfigSuccess == true) {
-
-                        if (oldStreamingConfig != null) {
-
-                            oldStreamingConfig.setLastModified(streamingConfig.getLastModified());
-                            try {
-                                streamingService.updateStreamingConfig(oldStreamingConfig);
-                            } catch (IOException e) {
-                                logger.error("Failed to recover StreamingConfig:" + e.getLocalizedMessage(), e);
-                                throw new InternalErrorException("Failed to recover StreamingConfig: " + e.getLocalizedMessage());
-                            }
-                        } else {
-                            try {
-                                streamingService.dropStreamingConfig(streamingConfig);
-                            } catch (IOException e) {
-                                logger.error("Failed to remove added StreamingConfig:" + e.getLocalizedMessage(), e);
-                                throw new InternalErrorException("Failed to remove added StreamingConfig: " + e.getLocalizedMessage());
-                            }
-                        }
-                    }
-
-                    if (updateKafkaConfigSuccess == true) {
-                        if (oldKafkaConfig != null) {
-                            oldKafkaConfig.setLastModified(kafkaConfig.getLastModified());
-                            try {
-                                kafkaConfigService.updateKafkaConfig(oldKafkaConfig);
-                            } catch (IOException e) {
-                                logger.error("Failed to recover KafkaConfig:" + e.getLocalizedMessage(), e);
-                                throw new InternalErrorException("Failed to recover KafkaConfig: " + e.getLocalizedMessage());
-                            }
-                        } else {
-                            try {
-                                kafkaConfigService.dropKafkaConfig(kafkaConfig);
-                            } catch (IOException e) {
-                                logger.error("Failed to remove added KafkaConfig:" + e.getLocalizedMessage(), e);
-                                throw new InternalErrorException("Failed to remove added KafkaConfig: " + e.getLocalizedMessage());
-                            }
-                        }
-                    }
-
-                }
-            }
-
-        }
-
         String descData = JsonUtil.writeValueAsIndentString(desc);
         cubeRequest.setCubeDescData(descData);
         cubeRequest.setSuccessful(true);

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java b/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
index e22bd30..ecd7571 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
@@ -60,9 +60,9 @@ public class StreamingController extends BasicController {
 
     @RequestMapping(value = "/getConfig", method = { RequestMethod.GET })
     @ResponseBody
-    public List<StreamingConfig> getStreamings(@RequestParam(value = "cubeName", required = false) String cubeName, @RequestParam(value = "limit", required = false) Integer limit, @RequestParam(value = "offset", required = false) Integer offset) {
+    public List<StreamingConfig> getStreamings(@RequestParam(value = "table", required = false) String table, @RequestParam(value = "limit", required = false) Integer limit, @RequestParam(value = "offset", required = false) Integer offset) {
         try {
-            return streamingService.getStreamingConfigs(cubeName, limit, offset);
+            return streamingService.getStreamingConfigs(table, limit, offset);
         } catch (IOException e) {
             logger.error("Failed to deal with the request:" + e.getLocalizedMessage(), e);
             throw new InternalErrorException("Failed to deal with the request: " + e.getLocalizedMessage());

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java b/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
index e40426b..a0473e9 100644
--- a/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
+++ b/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.rest.service;
 
+import org.apache.commons.lang3.StringUtils;
+import org.apache.kylin.common.util.StringUtil;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.engine.streaming.StreamingConfig;
 import org.apache.kylin.rest.constant.Constant;
@@ -37,26 +39,22 @@ public class StreamingService extends BasicService {
     private AccessService accessService;
 
     @PostFilter(Constant.ACCESS_POST_FILTER_READ)
-    public List<StreamingConfig> listAllStreamingConfigs(final String cubeName) throws IOException {
+    public List<StreamingConfig> listAllStreamingConfigs(final String table) throws IOException {
         List<StreamingConfig> streamingConfigs = new ArrayList();
-        CubeInstance cubeInstance = (null != cubeName) ? getCubeManager().getCube(cubeName) : null;
-        if (null == cubeInstance) {
+        if (StringUtils.isEmpty(table)) {
             streamingConfigs = getStreamingManager().listAllStreaming();
         } else {
-            for(StreamingConfig config : getStreamingManager().listAllStreaming()){
-                if(cubeInstance.getName().equals(config.getCubeName())){
-                    streamingConfigs.add(config);
-                }
-            }
+            StreamingConfig config = getStreamingManager().getConfig(table);
+            streamingConfigs.add(config);
         }
 
         return streamingConfigs;
     }
 
-    public List<StreamingConfig> getStreamingConfigs(final String cubeName, final Integer limit, final Integer offset) throws IOException {
+    public List<StreamingConfig> getStreamingConfigs(final String table, final Integer limit, final Integer offset) throws IOException {
 
         List<StreamingConfig> streamingConfigs;
-        streamingConfigs = listAllStreamingConfigs(cubeName);
+        streamingConfigs = listAllStreamingConfigs(table);
 
         if (limit == null || offset == null) {
             return streamingConfigs;

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaStreamingInput.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaStreamingInput.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaStreamingInput.java
index 2e262b3..c05119f 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaStreamingInput.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaStreamingInput.java
@@ -47,9 +47,14 @@ import kafka.message.MessageAndOffset;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.engine.streaming.IStreamingInput;
 import org.apache.kylin.common.util.StreamingBatch;
 import org.apache.kylin.common.util.StreamingMessage;
+import org.apache.kylin.engine.streaming.StreamingConfig;
+import org.apache.kylin.engine.streaming.StreamingManager;
+import org.apache.kylin.metadata.realization.RealizationType;
 import org.apache.kylin.source.kafka.config.KafkaClusterConfig;
 import org.apache.kylin.source.kafka.config.KafkaConfig;
 import org.apache.kylin.source.kafka.util.KafkaRequester;
@@ -65,39 +70,54 @@ public class KafkaStreamingInput implements IStreamingInput {
     private static final Logger logger = LoggerFactory.getLogger(KafkaStreamingInput.class);
 
     @Override
-    public StreamingBatch getBatchWithTimeWindow(String streaming, int id, long startTime, long endTime) {
-        try {
+    public StreamingBatch getBatchWithTimeWindow(RealizationType realizationType, String realizationName, int id, long startTime, long endTime) {
+        if (realizationType != RealizationType.CUBE) {
+            throw new IllegalArgumentException("Unsupported realization in KafkaStreamingInput: " + realizationType);
+        }
+        final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+        final CubeInstance cube = CubeManager.getInstance(kylinConfig).getCube(realizationName);
+        final String streaming = cube.getFactTable();
+        final StreamingManager streamingManager = StreamingManager.getInstance(kylinConfig);
+        final StreamingConfig streamingConfig = streamingManager.getConfig(streaming);
+        if (streamingConfig == null) {
+            throw new IllegalArgumentException("Table " + streaming + " is not a streaming table.");
+        }
+        if (StreamingConfig.STREAMING_TYPE_KAFKA.equals(streamingConfig.getType())) {
             logger.info(String.format("prepare to get streaming batch, name:%s, id:%d, startTime:%d, endTime:%d", streaming, id, startTime, endTime));
-            final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-            final KafkaConfigManager kafkaConfigManager = KafkaConfigManager.getInstance(kylinConfig);
-            final KafkaConfig kafkaConfig = kafkaConfigManager.getKafkaConfig(streaming);
-            final StreamingParser streamingParser = StreamingParser.getStreamingParser(kafkaConfig);
-            final ExecutorService executorService = Executors.newCachedThreadPool();
-            final List<Future<List<StreamingMessage>>> futures = Lists.newArrayList();
-            for (final KafkaClusterConfig kafkaClusterConfig : kafkaConfig.getKafkaClusterConfigs()) {
-                final int partitionCount = KafkaRequester.getKafkaTopicMeta(kafkaClusterConfig).getPartitionIds().size();
-                for (int i = 0; i < partitionCount; ++i) {
-                    final StreamingMessageProducer producer = new StreamingMessageProducer(kafkaClusterConfig, i, Pair.newPair(startTime, endTime), kafkaConfig.getMargin(), streamingParser);
-                    final Future<List<StreamingMessage>> future = executorService.submit(producer);
-                    futures.add(future);
+
+            try {
+                final KafkaConfigManager kafkaConfigManager = KafkaConfigManager.getInstance(kylinConfig);
+                final KafkaConfig kafkaConfig = kafkaConfigManager.getKafkaConfig(streaming);
+                final StreamingParser streamingParser = StreamingParser.getStreamingParser(kafkaConfig, realizationType, realizationName);
+                final ExecutorService executorService = Executors.newCachedThreadPool();
+                final List<Future<List<StreamingMessage>>> futures = Lists.newArrayList();
+                for (final KafkaClusterConfig kafkaClusterConfig : kafkaConfig.getKafkaClusterConfigs()) {
+                    final int partitionCount = KafkaRequester.getKafkaTopicMeta(kafkaClusterConfig).getPartitionIds().size();
+                    for (int i = 0; i < partitionCount; ++i) {
+                        final StreamingMessageProducer producer = new StreamingMessageProducer(kafkaClusterConfig, i, Pair.newPair(startTime, endTime), kafkaConfig.getMargin(), streamingParser);
+                        final Future<List<StreamingMessage>> future = executorService.submit(producer);
+                        futures.add(future);
+                    }
                 }
-            }
-            List<StreamingMessage> messages = Lists.newLinkedList();
-            for (Future<List<StreamingMessage>> future : futures) {
-                try {
-                    messages.addAll(future.get());
-                } catch (InterruptedException e) {
-                    logger.warn("this thread should not be interrupted, just ignore", e);
-                    continue;
-                } catch (ExecutionException e) {
-                    throw new RuntimeException("error when get StreamingMessages",e.getCause());
+                List<StreamingMessage> messages = Lists.newLinkedList();
+                for (Future<List<StreamingMessage>> future : futures) {
+                    try {
+                        messages.addAll(future.get());
+                    } catch (InterruptedException e) {
+                        logger.warn("this thread should not be interrupted, just ignore", e);
+                        continue;
+                    } catch (ExecutionException e) {
+                        throw new RuntimeException("error when get StreamingMessages", e.getCause());
+                    }
                 }
+                final Pair<Long, Long> timeRange = Pair.newPair(startTime, endTime);
+                logger.info("finish to get streaming batch, total message count:" + messages.size());
+                return new StreamingBatch(messages, timeRange);
+            } catch (ReflectiveOperationException e) {
+                throw new RuntimeException("failed to create instance of StreamingParser", e);
             }
-            final Pair<Long, Long> timeRange = Pair.newPair(startTime, endTime);
-            logger.info("finish to get streaming batch, total message count:" + messages.size());
-            return new StreamingBatch(messages, timeRange);
-        } catch (ReflectiveOperationException e) {
-            throw new RuntimeException("failed to create instance of StreamingParser", e);
+        } else {
+            throw new IllegalArgumentException("kafka is the only supported streaming type.");
         }
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/9021f17d/source-kafka/src/main/java/org/apache/kylin/source/kafka/StreamingParser.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/StreamingParser.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/StreamingParser.java
index 3455f1d..7b326e2 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/StreamingParser.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/StreamingParser.java
@@ -50,6 +50,7 @@ import org.apache.kylin.engine.streaming.StreamingManager;
 import org.apache.kylin.common.util.StreamingMessage;
 import org.apache.kylin.metadata.model.IntermediateColumnDesc;
 import org.apache.kylin.metadata.model.TblColRef;
+import org.apache.kylin.metadata.realization.RealizationType;
 import org.apache.kylin.source.kafka.config.KafkaConfig;
 
 import com.google.common.base.Function;
@@ -68,9 +69,8 @@ public abstract class StreamingParser {
 
     abstract public boolean filter(StreamingMessage streamingMessage);
 
-    public static StreamingParser getStreamingParser(KafkaConfig kafkaConfig) throws ReflectiveOperationException {
-        final String cubeName = StreamingManager.getInstance(KylinConfig.getInstanceFromEnv()).getStreamingConfig(kafkaConfig.getName()).getCubeName();
-        final CubeInstance cubeInstance = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube(cubeName);
+    public static StreamingParser getStreamingParser(KafkaConfig kafkaConfig, RealizationType realizationType, String realizationName) throws ReflectiveOperationException {
+        final CubeInstance cubeInstance = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube(realizationName);
         List<TblColRef> columns = Lists.transform(new CubeJoinedFlatTableDesc(cubeInstance.getDescriptor(), null).getColumnList(), new Function<IntermediateColumnDesc, TblColRef>() {
             @Nullable
             @Override


[24/50] [abbrv] kylin git commit: minor, UI remove deprecated code

Posted by li...@apache.org.
minor, UI remove deprecated code


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/61f3278e
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/61f3278e
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/61f3278e

Branch: refs/heads/master
Commit: 61f3278e5dd98cbfaca8438b9cabd2d4b577ffc6
Parents: a541068
Author: Jason <ji...@163.com>
Authored: Wed Mar 2 17:55:46 2016 +0800
Committer: Jason <ji...@163.com>
Committed: Wed Mar 2 17:55:46 2016 +0800

----------------------------------------------------------------------
 webapp/app/partials/cubeDesigner/refresh_settings.html | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/61f3278e/webapp/app/partials/cubeDesigner/refresh_settings.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/cubeDesigner/refresh_settings.html b/webapp/app/partials/cubeDesigner/refresh_settings.html
index 15dd4af..1ad294e 100755
--- a/webapp/app/partials/cubeDesigner/refresh_settings.html
+++ b/webapp/app/partials/cubeDesigner/refresh_settings.html
@@ -131,7 +131,7 @@
                       Please input start date when partition date column is defined in model.
                     </small>
                     <!--vier model will convert use filter-->
-                    <span ng-if="state.mode=='view' && metaModel.model.partition_desc.partition_date_column!=null && metaModel.model.partition_desc.partition_date_column">{{(cubeMetaFrame.partition_date_start)|reverseToGMT0 }}</span>
+                    <span ng-if="state.mode=='view' && metaModel.model.partition_desc.partition_date_column">{{(cubeMetaFrame.partition_date_start)|reverseToGMT0 }}</span>
                   </div>
                 </div>
               </div>


[39/50] [abbrv] kylin git commit: KYLIN-1387 should support empty segment

Posted by li...@apache.org.
KYLIN-1387 should support empty segment


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/9352e5a9
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/9352e5a9
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/9352e5a9

Branch: refs/heads/master
Commit: 9352e5a9e87e8b7bae41303ac1556ca7b7dc0023
Parents: 929c7a4
Author: shaofengshi <sh...@apache.org>
Authored: Tue Feb 16 09:42:34 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Tue Mar 8 12:19:54 2016 +0800

----------------------------------------------------------------------
 .../hbase/steps/SequenceFileCuboidWriter.java   | 48 +++++++++++---------
 1 file changed, 26 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/9352e5a9/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
index 4d76522..8c2d5e4 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
@@ -25,35 +25,39 @@ public class SequenceFileCuboidWriter extends KVGTRecordWriter {
 
     public SequenceFileCuboidWriter(CubeDesc cubeDesc, CubeSegment segment) {
         super(cubeDesc, segment);
+        try {
+            initiate();
+        } catch (IOException e) {
+           throw new RuntimeException(e);
+        }
     }
 
-
-    @Override
-    protected void writeAsKeyValue(ByteArrayWritable key, ByteArrayWritable value) throws IOException {
+    protected void initiate() throws IOException {
         if (writer == null) {
-            synchronized (SequenceFileCuboidWriter.class) {
-                if (writer == null) {
-                    JobBuilderSupport jobBuilderSupport = new JobBuilderSupport(cubeSegment, "SYSTEM");
-                    String cuboidRoot = jobBuilderSupport.getCuboidRootPath(cubeSegment);
-                    Path cuboidPath = new Path(cuboidRoot);
-                    FileSystem fs = HadoopUtil.getFileSystem(cuboidRoot);
-                    try {
-                        if (fs.exists(cuboidPath)) {
-                            fs.delete(cuboidPath, true);
-                        }
-
-                        fs.mkdirs(cuboidPath);
-                    } finally {
-                        IOUtils.closeQuietly(fs);
-                    }
-
-                    Path cuboidFile = new Path(cuboidPath, "data.seq");
-                    logger.debug("Cuboid is written to " + cuboidFile);
-                    writer = SequenceFile.createWriter(HadoopUtil.getCurrentConfiguration(), SequenceFile.Writer.file(cuboidFile), SequenceFile.Writer.keyClass(Text.class), SequenceFile.Writer.valueClass(Text.class));
+            JobBuilderSupport jobBuilderSupport = new JobBuilderSupport(cubeSegment, "SYSTEM");
+            String cuboidRoot = jobBuilderSupport.getCuboidRootPath(cubeSegment);
+            Path cuboidPath = new Path(cuboidRoot);
+            FileSystem fs = HadoopUtil.getFileSystem(cuboidRoot);
+            try {
+                if (fs.exists(cuboidPath)) {
+                    fs.delete(cuboidPath, true);
                 }
+
+                fs.mkdirs(cuboidPath);
+            } finally {
+                IOUtils.closeQuietly(fs);
             }
+
+            Path cuboidFile = new Path(cuboidPath, "data.seq");
+            logger.debug("Cuboid is written to " + cuboidFile);
+            writer = SequenceFile.createWriter(HadoopUtil.getCurrentConfiguration(), SequenceFile.Writer.file(cuboidFile), SequenceFile.Writer.keyClass(Text.class), SequenceFile.Writer.valueClass(Text.class));
         }
 
+    }
+
+    @Override
+    protected void writeAsKeyValue(ByteArrayWritable key, ByteArrayWritable value) throws IOException {
+       
         Text outputValue = new Text();
         Text outputKey = new Text();
         outputKey.set(key.array(), key.offset(), key.length());


[41/50] [abbrv] kylin git commit: KYLIN-1417 Change to use TreeMap to allow null as value

Posted by li...@apache.org.
KYLIN-1417 Change to use TreeMap to allow null as value


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/d1a574b8
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/d1a574b8
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/d1a574b8

Branch: refs/heads/master
Commit: d1a574b80ef35ebdae69718998a2dfcaeafc3cbc
Parents: 50aab0b
Author: shaofengshi <sh...@apache.org>
Authored: Tue Feb 16 10:29:47 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Tue Mar 8 12:21:40 2016 +0800

----------------------------------------------------------------------
 .../apache/kylin/source/kafka/TimedJsonStreamParser.java  | 10 ++++------
 1 file changed, 4 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/d1a574b8/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
index 0907623..e3075d5 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
@@ -35,11 +35,7 @@
 package org.apache.kylin.source.kafka;
 
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 
 import kafka.message.MessageAndOffset;
 
@@ -102,7 +98,9 @@ public final class TimedJsonStreamParser extends StreamingParser {
     @Override
     public StreamingMessage parse(MessageAndOffset messageAndOffset) {
         try {
-            Map<String, String> root = mapper.readValue(new ByteBufferBackedInputStream(messageAndOffset.message().payload()), mapType);
+            Map<String, String> message = mapper.readValue(new ByteBufferBackedInputStream(messageAndOffset.message().payload()), mapType);
+            Map<String, String> root = new TreeMap<String, String>(String.CASE_INSENSITIVE_ORDER);
+            root.putAll(message);
             String tsStr = root.get(tsColName);
             //Preconditions.checkArgument(!StringUtils.isEmpty(tsStr), "Timestamp field " + tsColName + //
             //" cannot be null, the message offset is " + messageAndOffset.getOffset() + " content is " + new String(messageAndOffset.getRawData()));


[12/50] [abbrv] kylin git commit: correct hierarchyMasks building in AggregationGroup

Posted by li...@apache.org.
correct hierarchyMasks building in AggregationGroup


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/0f48f10b
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/0f48f10b
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/0f48f10b

Branch: refs/heads/master
Commit: 0f48f10bc1347c89c01cd28227bec15714601a63
Parents: ab4d890
Author: sunyerui <su...@gmail.com>
Authored: Sun Feb 28 21:36:17 2016 +0800
Committer: sunyerui <su...@gmail.com>
Committed: Sun Feb 28 21:36:17 2016 +0800

----------------------------------------------------------------------
 .../main/java/org/apache/kylin/cube/model/AggregationGroup.java   | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/0f48f10b/core-cube/src/main/java/org/apache/kylin/cube/model/AggregationGroup.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/AggregationGroup.java b/core-cube/src/main/java/org/apache/kylin/cube/model/AggregationGroup.java
index 905f8dc..35f85b0 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/AggregationGroup.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/AggregationGroup.java
@@ -129,8 +129,6 @@ public class AggregationGroup {
     }
 
     private void buildHierarchyMasks(Map<String, TblColRef> colNameAbbr, RowKeyDesc rowKeyDesc) {
-
-        HierarchyMask mask = new HierarchyMask();
         this.hierarchyMasks = new ArrayList<HierarchyMask>();
 
         if (this.selectRule.hierarchy_dims == null || this.selectRule.hierarchy_dims.length == 0) {
@@ -138,6 +136,7 @@ public class AggregationGroup {
         }
 
         for (String[] hierarchy_dims : this.selectRule.hierarchy_dims) {
+            HierarchyMask mask = new HierarchyMask();
             if (hierarchy_dims == null || hierarchy_dims.length == 0) {
                 continue;
             }


[04/50] [abbrv] kylin git commit: KYLIN-1248 fix streaming table auto generate convert issue

Posted by li...@apache.org.
KYLIN-1248 fix streaming table auto generate convert issue


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/6ee409cc
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/6ee409cc
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/6ee409cc

Branch: refs/heads/master
Commit: 6ee409ccc66c470e12b0e437264fa5db0ea5e5dd
Parents: 0b48e39
Author: janzhongi <ji...@ebay.com>
Authored: Fri Feb 26 14:39:31 2016 +0800
Committer: janzhongi <ji...@ebay.com>
Committed: Fri Feb 26 14:39:31 2016 +0800

----------------------------------------------------------------------
 webapp/app/js/controllers/sourceMeta.js    | 26 +++++++++++++------------
 webapp/app/partials/tables/table_load.html |  4 ++--
 2 files changed, 16 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/6ee409cc/webapp/app/js/controllers/sourceMeta.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/controllers/sourceMeta.js b/webapp/app/js/controllers/sourceMeta.js
index 8d9421f..abdeeb8 100755
--- a/webapp/app/js/controllers/sourceMeta.js
+++ b/webapp/app/js/controllers/sourceMeta.js
@@ -255,22 +255,24 @@ KylinApp
             });
           }
 
+        var firstCommit = false;
         if($scope.columnList.length==0){
-          $scope.columnList = columnList;
+          firstCommit = true;
         }
 
-        angular.forEach(columnList,function(item){
-          var included = false;
-          for(var i=0;i<$scope.columnList.length;i++){
-            if($scope.columnList[i].name==item.name){
-              included = true;
-              break;
+        if(!firstCommit){
+          angular.forEach(columnList,function(item){
+            for(var i=0;i<$scope.columnList.length;i++){
+              if($scope.columnList[i].name==item.name){
+                item.checked = $scope.columnList[i].checked;
+                item.type = $scope.columnList[i].type;
+                item.fromSource = $scope.columnList[i].fromSource;
+                break;
+              }
             }
-          }
-          if(!included){
-            $scope.columnList.push(item);
-          }
-        })
+          })
+        }
+        $scope.columnList = columnList;
 
       }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/6ee409cc/webapp/app/partials/tables/table_load.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/tables/table_load.html b/webapp/app/partials/tables/table_load.html
index 468208b..29a2e9a 100644
--- a/webapp/app/partials/tables/table_load.html
+++ b/webapp/app/partials/tables/table_load.html
@@ -85,7 +85,7 @@
         </form>
         <table class="table table-hover table-bordered">
           <tr>
-            <th>Check As Column</th>
+            <th></th>
             <th>Column</th>
             <th>Column Type</th>
             <th>Comment</th>
@@ -103,7 +103,7 @@
               <select chosen ng-model="column.type"
                       ng-options="type as type for type in tableConfig.dataTypes"
                       data-placeholder="select a column type"
-                      style="width: 200px !important;"
+                      style="width: 120px !important;"
                       class="chosen-select">
               </select>
             </td>


[18/50] [abbrv] kylin git commit: minor, fix CI

Posted by li...@apache.org.
minor, fix CI


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/098a8532
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/098a8532
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/098a8532

Branch: refs/heads/master
Commit: 098a8532f65562644b9b54b465ce23fbed904269
Parents: ca59795
Author: Hongbin Ma <ma...@apache.org>
Authored: Tue Mar 1 18:41:33 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Tue Mar 1 18:41:33 2016 +0800

----------------------------------------------------------------------
 .../java/org/apache/kylin/query/ITKylinQueryTest.java | 14 +++++++++++++-
 1 file changed, 13 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/098a8532/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
index fa71db2..fd88452 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
@@ -25,12 +25,14 @@ import java.sql.DriverManager;
 import java.util.List;
 import java.util.Properties;
 
+import net.sf.ehcache.CacheManager;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.query.enumerator.OLAPQuery;
 import org.apache.kylin.query.relnode.OLAPContext;
 import org.apache.kylin.query.schema.OLAPSchemaFactory;
+import org.apache.kylin.storage.cache.AbstractCacheFledgedQuery;
 import org.apache.kylin.storage.hbase.cube.v1.coprocessor.observer.ObserverEnabler;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
 import org.dbunit.database.DatabaseConnection;
@@ -42,6 +44,8 @@ import org.junit.Test;
 
 @Ignore("KylinQueryTest is contained by ITCombinationTest")
 public class ITKylinQueryTest extends KylinTestBase {
+    private static CacheManager cacheManager;
+
 
     @BeforeClass
     public static void setUp() throws Exception {
@@ -74,6 +78,9 @@ public class ITKylinQueryTest extends KylinTestBase {
         // Load H2 Tables (inner join)
         H2Database h2DB = new H2Database(h2Connection, config);
         h2DB.loadAllTables();
+
+        cacheManager = CacheManager.newInstance("../server/src/main/resources/ehcache-test.xml");
+        AbstractCacheFledgedQuery.setCacheManager(cacheManager);
     }
 
     protected static void clean() {
@@ -84,6 +91,11 @@ public class ITKylinQueryTest extends KylinTestBase {
 
         ObserverEnabler.forceCoprocessorUnset();
         HBaseMetadataTestCase.staticCleanupTestMetadata();
+
+        if (cacheManager != null) {
+            cacheManager.shutdown();
+        }
+        AbstractCacheFledgedQuery.setCacheManager(null);
     }
 
     @Ignore("this is only for debug")
@@ -237,7 +249,7 @@ public class ITKylinQueryTest extends KylinTestBase {
 
     @Test
     public void testTopNQuery() throws Exception {
-            if ("left".equalsIgnoreCase(joinType)) {
+        if ("left".equalsIgnoreCase(joinType)) {
             this.execAndCompQuery("src/test/resources/query/sql_topn", null, true);
         }
     }